cd2c68dd34605b3bd85da7dbc6ea4b67cef33738
[vlc.git] / modules / codec / videotoolbox.m
1 /*****************************************************************************
2  * videotoolbox.m: Video Toolbox decoder
3  *****************************************************************************
4  * Copyright © 2014-2015 VideoLabs SAS
5  *
6  * Authors: Felix Paul Kühne <fkuehne # videolan.org>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU Lesser General Public License as published by
10  * the Free Software Foundation; either version 2.1 of the License, or
11  * (at your option) any later version.
12  *
13  * This program is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16  * GNU Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public License
19  * along with this program; if not, write to the Free Software Foundation,
20  * Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA.
21  *****************************************************************************/
22
23 #pragma mark preamble
24
25 #ifdef HAVE_CONFIG_H
26 # import "config.h"
27 #endif
28
29 #import <vlc_common.h>
30 #import <vlc_plugin.h>
31 #import <vlc_codec.h>
32 #import "../packetizer/h264_nal.h"
33 #import "../packetizer/hxxx_nal.h"
34 #import "../video_chroma/copy.h"
35 #import <vlc_bits.h>
36 #import <vlc_boxes.h>
37
38 #import <VideoToolbox/VideoToolbox.h>
39
40 #import <Foundation/Foundation.h>
41 #import <TargetConditionals.h>
42
43 #import <sys/types.h>
44 #import <sys/sysctl.h>
45 #import <mach/machine.h>
46
47 #if TARGET_OS_IPHONE
48 #import <UIKit/UIKit.h>
49
50 /* support iOS SDKs < v9.1 */
51 #ifndef CPUFAMILY_ARM_TWISTER
52 #define CPUFAMILY_ARM_TWISTER 0x92fb37c8
53 #endif
54
55 #endif
56
57 #pragma mark - module descriptor
58
59 static int OpenDecoder(vlc_object_t *);
60 static void CloseDecoder(vlc_object_t *);
61
62 #if MAC_OS_X_VERSION_MAX_ALLOWED < 1090
63 const CFStringRef kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder = CFSTR("EnableHardwareAcceleratedVideoDecoder");
64 const CFStringRef kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder = CFSTR("RequireHardwareAcceleratedVideoDecoder");
65 #endif
66
67 #define VT_ZERO_COPY N_("Use zero-copy rendering")
68 #if !TARGET_OS_IPHONE
69 #define VT_REQUIRE_HW_DEC N_("Use Hardware decoders only")
70 #endif
71 #define VT_TEMPO_DEINTERLACE N_("Deinterlacing")
72 #define VT_TEMPO_DEINTERLACE_LONG N_("If interlaced content is detected, temporal deinterlacing is enabled at the expense of a pipeline delay.")
73
74 vlc_module_begin()
75 set_category(CAT_INPUT)
76 set_subcategory(SUBCAT_INPUT_VCODEC)
77 set_description(N_("VideoToolbox video decoder"))
78 set_capability("decoder",800)
79 set_callbacks(OpenDecoder, CloseDecoder)
80
81 add_bool("videotoolbox-temporal-deinterlacing", true, VT_TEMPO_DEINTERLACE, VT_TEMPO_DEINTERLACE_LONG, false)
82 #if !TARGET_OS_IPHONE
83 add_bool("videotoolbox-zero-copy", false, VT_ZERO_COPY, VT_ZERO_COPY, false)
84 add_bool("videotoolbox-hw-decoder-only", false, VT_REQUIRE_HW_DEC, VT_REQUIRE_HW_DEC, false)
85 #else
86 add_bool("videotoolbox-zero-copy", true, VT_ZERO_COPY, VT_ZERO_COPY, false)
87 #endif
88 vlc_module_end()
89
90 #pragma mark - local prototypes
91
92 static CFDataRef ESDSCreate(decoder_t *, uint8_t *, uint32_t);
93 static picture_t *DecodeBlock(decoder_t *, block_t **);
94 static void Flush(decoder_t *);
95 static void DecoderCallback(void *, void *, OSStatus, VTDecodeInfoFlags,
96                             CVPixelBufferRef, CMTime, CMTime);
97 void VTDictionarySetInt32(CFMutableDictionaryRef, CFStringRef, int);
98 static void copy420YpCbCr8Planar(picture_t *, CVPixelBufferRef buffer,
99                                  unsigned i_width, unsigned i_height);
100 static BOOL deviceSupportsAdvancedProfiles();
101 static BOOL deviceSupportsAdvancedLevels();
102
103 struct picture_sys_t {
104     CFTypeRef pixelBuffer;
105 };
106
107 #pragma mark - decoder structure
108
109 struct decoder_sys_t
110 {
111     CMVideoCodecType            codec;
112     uint8_t                     i_nal_length_size;
113
114     bool                        b_started;
115     bool                        b_is_avcc;
116     VTDecompressionSessionRef   session;
117     CMVideoFormatDescriptionRef videoFormatDescription;
118
119     NSMutableArray              *outputTimeStamps;
120     NSMutableDictionary         *outputFrames;
121     bool                        b_zero_copy;
122     bool                        b_enable_temporal_processing;
123
124     bool                        b_format_propagated;
125 };
126
127 #pragma mark - start & stop
128
129 static CMVideoCodecType CodecPrecheck(decoder_t *p_dec)
130 {
131     decoder_sys_t *p_sys = p_dec->p_sys;
132     size_t i_profile = 0xFFFF, i_level = 0xFFFF;
133     bool b_ret = false;
134     CMVideoCodecType codec;
135
136     /* check for the codec we can and want to decode */
137     switch (p_dec->fmt_in.i_codec) {
138         case VLC_CODEC_H264:
139             codec = kCMVideoCodecType_H264;
140
141             b_ret = h264_get_profile_level(&p_dec->fmt_in, &i_profile, &i_level, NULL);
142             if (!b_ret) {
143                 msg_Warn(p_dec, "H264 profile and level parsing failed because it didn't arrive yet");
144                 return kCMVideoCodecType_H264;
145             }
146
147             msg_Dbg(p_dec, "trying to decode MPEG-4 Part 10: profile %zu, level %zu", i_profile, i_level);
148
149             switch (i_profile) {
150                 case PROFILE_H264_BASELINE:
151                 case PROFILE_H264_MAIN:
152                 case PROFILE_H264_HIGH:
153                     break;
154
155                 case PROFILE_H264_HIGH_10:
156                 {
157                     if (deviceSupportsAdvancedProfiles())
158                         break;
159                 }
160
161                 default:
162                 {
163                     msg_Dbg(p_dec, "unsupported H264 profile %zu", i_profile);
164                     return -1;
165                 }
166             }
167
168 #if !TARGET_OS_IPHONE
169             /* a level higher than 5.2 was not tested, so don't dare to
170              * try to decode it*/
171             if (i_level > 52) {
172                 msg_Dbg(p_dec, "unsupported H264 level %zu", i_level);
173                 return -1;
174             }
175 #else
176             /* on SoC A8, 4.2 is the highest specified profile */
177             if (i_level > 42) {
178                 /* on Twister, we can do up to 5.2 */
179                 if (!deviceSupportsAdvancedLevels() || i_level > 52) {
180                     msg_Dbg(p_dec, "unsupported H264 level %zu", i_level);
181                     return -1;
182                 }
183             }
184 #endif
185
186             break;
187         case VLC_CODEC_MP4V:
188             codec = kCMVideoCodecType_MPEG4Video;
189             break;
190         case VLC_CODEC_H263:
191             codec = kCMVideoCodecType_H263;
192             break;
193
194 #if !TARGET_OS_IPHONE
195         /* there are no DV or ProRes decoders on iOS, so bailout early */
196         case VLC_CODEC_PRORES:
197             /* the VT decoder can't differenciate between the ProRes flavors, so we do it */
198             switch (p_dec->fmt_in.i_original_fourcc) {
199                 case VLC_FOURCC( 'a','p','4','c' ):
200                 case VLC_FOURCC( 'a','p','4','h' ):
201                     codec = kCMVideoCodecType_AppleProRes4444;
202                     break;
203
204                 case VLC_FOURCC( 'a','p','c','h' ):
205                     codec = kCMVideoCodecType_AppleProRes422HQ;
206                     break;
207
208                 case VLC_FOURCC( 'a','p','c','s' ):
209                     codec = kCMVideoCodecType_AppleProRes422LT;
210                     break;
211
212                 case VLC_FOURCC( 'a','p','c','o' ):
213                     codec = kCMVideoCodecType_AppleProRes422Proxy;
214                     break;
215
216                 default:
217                     codec = kCMVideoCodecType_AppleProRes422;
218                     break;
219             }
220             if (codec != 0)
221                 break;
222
223         case VLC_CODEC_DV:
224             /* the VT decoder can't differenciate between PAL and NTSC, so we need to do it */
225             switch (p_dec->fmt_in.i_original_fourcc) {
226                 case VLC_FOURCC( 'd', 'v', 'c', ' '):
227                 case VLC_FOURCC( 'd', 'v', ' ', ' '):
228                     msg_Dbg(p_dec, "Decoding DV NTSC");
229                     codec = kCMVideoCodecType_DVCNTSC;
230                     break;
231
232                 case VLC_FOURCC( 'd', 'v', 's', 'd'):
233                 case VLC_FOURCC( 'd', 'v', 'c', 'p'):
234                 case VLC_FOURCC( 'D', 'V', 'S', 'D'):
235                     msg_Dbg(p_dec, "Decoding DV PAL");
236                     codec = kCMVideoCodecType_DVCPAL;
237                     break;
238
239                 default:
240                     break;
241             }
242             if (codec != 0)
243                 break;
244 #endif
245             /* mpgv / mp2v needs fixing, so disable it for now */
246 #if 0
247         case VLC_CODEC_MPGV:
248             codec = kCMVideoCodecType_MPEG1Video;
249             break;
250         case VLC_CODEC_MP2V:
251             codec = kCMVideoCodecType_MPEG2Video;
252             break;
253 #endif
254
255         default:
256 #ifndef NDEBUG
257             msg_Err(p_dec, "'%4.4s' is not supported", (char *)&p_dec->fmt_in.i_codec);
258 #endif
259             return -1;
260     }
261
262     return codec;
263 }
264
265 static int StartVideoToolbox(decoder_t *p_dec, block_t *p_block)
266 {
267     decoder_sys_t *p_sys = p_dec->p_sys;
268     OSStatus status;
269
270     /* setup the decoder */
271     CFMutableDictionaryRef decoderConfiguration = CFDictionaryCreateMutable(kCFAllocatorDefault,
272                                                                             2,
273                                                                             &kCFTypeDictionaryKeyCallBacks,
274                                                                             &kCFTypeDictionaryValueCallBacks);
275     CFDictionarySetValue(decoderConfiguration,
276                          kCVImageBufferChromaLocationBottomFieldKey,
277                          kCVImageBufferChromaLocation_Left);
278     CFDictionarySetValue(decoderConfiguration,
279                          kCVImageBufferChromaLocationTopFieldKey,
280                          kCVImageBufferChromaLocation_Left);
281     p_sys->b_zero_copy = var_InheritBool(p_dec, "videotoolbox-zero-copy");
282
283     /* fetch extradata */
284     CFMutableDictionaryRef extradata_info = NULL;
285     CFDataRef extradata = NULL;
286
287     extradata_info = CFDictionaryCreateMutable(kCFAllocatorDefault,
288                                                1,
289                                                &kCFTypeDictionaryKeyCallBacks,
290                                                &kCFTypeDictionaryValueCallBacks);
291
292     unsigned i_video_width = 0, i_video_visible_width = 0;
293     unsigned i_video_height = 0, i_video_visible_height = 0;
294     int i_sar_den = 0;
295     int i_sar_num = 0;
296
297     if (p_sys->codec == kCMVideoCodecType_H264) {
298         /* Do a late opening if there is no extra data and no valid video size */
299         if ((p_dec->fmt_in.video.i_width == 0 || p_dec->fmt_in.video.i_height == 0
300           || p_dec->fmt_in.i_extra == 0) && p_block == NULL) {
301             msg_Dbg(p_dec, "waiting for H264 SPS/PPS, will start late");
302
303             return VLC_SUCCESS;
304         }
305
306         size_t i_buf;
307         uint8_t *p_buf = NULL;
308         uint8_t *p_alloc_buf = NULL;
309         int i_ret = 0;
310
311         if (p_block == NULL) {
312             /* we need to convert the SPS and PPS units we received from the
313             * demuxer's avvC atom so we can process them further */
314             if(h264_isavcC(p_dec->fmt_in.p_extra, p_dec->fmt_in.i_extra))
315             {
316                 p_alloc_buf = h264_avcC_to_AnnexB_NAL(p_dec->fmt_in.p_extra,
317                                                       p_dec->fmt_in.i_extra,
318                                                       &i_buf,
319                                                       &p_sys->i_nal_length_size);
320                 p_buf = p_alloc_buf;
321                 p_sys->b_is_avcc = !!p_buf;
322             }
323         } else {
324             /* we are mid-stream, let's have the h264_get helper see if it
325              * can find a NAL unit */
326             i_buf = p_block->i_buffer;
327             p_buf = p_block->p_buffer;
328             p_sys->i_nal_length_size = 4; /* default to 4 bytes */
329             i_ret = VLC_SUCCESS;
330         }
331
332         uint8_t *p_sps_buf = NULL, *p_pps_buf = NULL;
333         size_t i_sps_size = 0, i_pps_size = 0;
334         if (!p_buf) {
335             msg_Warn(p_dec, "no valid extradata or conversion failed");
336             return VLC_EGENERIC;
337         }
338
339         /* get the SPS and PPS units from the NAL unit which is either
340          * part of the demuxer's avvC atom or the mid stream data block */
341         i_ret = h264_get_spspps(p_buf,
342                                 i_buf,
343                                 &p_sps_buf,
344                                 &i_sps_size,
345                                 &p_pps_buf,
346                                 &i_pps_size);
347         if(p_alloc_buf)
348             free(p_alloc_buf);
349         if (i_ret != VLC_SUCCESS) {
350             msg_Warn(p_dec, "sps pps detection failed");
351             return VLC_EGENERIC;
352         }
353
354         /* Decode Sequence Parameter Set */
355         const uint8_t *p_stp_sps_buf = p_sps_buf;
356         size_t i_stp_sps_nal = i_sps_size;
357         h264_sequence_parameter_set_t *p_sps_data;
358         if( !( p_sps_data = h264_decode_sps(p_stp_sps_buf, i_stp_sps_nal, true) ) )
359         {
360             msg_Warn(p_dec, "sps pps parsing failed");
361             return VLC_EGENERIC;
362         }
363
364         /* this data is more trust-worthy than what we receive
365          * from the demuxer, so we will use it to over-write
366          * the current values */
367         (void)
368         h264_get_picture_size( p_sps_data, &i_video_width,
369                                            &i_video_height,
370                                            &i_video_visible_width,
371                                            &i_video_visible_height );
372         i_sar_den = p_sps_data->vui.i_sar_den;
373         i_sar_num = p_sps_data->vui.i_sar_num;
374
375         h264_release_sps( p_sps_data );
376         /* !Decode Sequence Parameter Set */
377
378         if(!p_sys->b_is_avcc)
379         {
380             block_t *p_avcC = h264_AnnexB_NAL_to_avcC(
381                                     p_sys->i_nal_length_size,
382                                     p_sps_buf, i_sps_size,
383                                     p_pps_buf, i_pps_size);
384             if (!p_avcC) {
385                 msg_Warn(p_dec, "buffer creation failed");
386                 return VLC_EGENERIC;
387             }
388
389             extradata = CFDataCreate(kCFAllocatorDefault,
390                                      p_avcC->p_buffer,
391                                      p_avcC->i_buffer);
392             block_Release(p_avcC);
393         }
394         else /* already avcC extradata */
395         {
396             extradata = CFDataCreate(kCFAllocatorDefault,
397                                      p_dec->fmt_in.p_extra,
398                                      p_dec->fmt_in.i_extra);
399         }
400
401         if (extradata)
402             CFDictionarySetValue(extradata_info, CFSTR("avcC"), extradata);
403
404         CFDictionarySetValue(decoderConfiguration,
405                              kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms,
406                              extradata_info);
407
408     } else if (p_sys->codec == kCMVideoCodecType_MPEG4Video) {
409         extradata = ESDSCreate(p_dec,
410                                (uint8_t*)p_dec->fmt_in.p_extra,
411                                p_dec->fmt_in.i_extra);
412
413         if (extradata)
414             CFDictionarySetValue(extradata_info, CFSTR("esds"), extradata);
415
416         CFDictionarySetValue(decoderConfiguration,
417                              kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms,
418                              extradata_info);
419     } else {
420         CFDictionarySetValue(decoderConfiguration,
421                              kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms,
422                              extradata_info);
423     }
424
425     if (extradata)
426         CFRelease(extradata);
427     CFRelease(extradata_info);
428
429     /* pixel aspect ratio */
430     CFMutableDictionaryRef pixelaspectratio = CFDictionaryCreateMutable(kCFAllocatorDefault,
431                                                                         2,
432                                                                         &kCFTypeDictionaryKeyCallBacks,
433                                                                         &kCFTypeDictionaryValueCallBacks);
434     /* fallback on the demuxer if we don't have better info */
435     /* FIXME ?: can't we skip temp storage using directly fmt_out */
436     if (i_video_width == 0)
437         i_video_width = p_dec->fmt_in.video.i_width;
438     if (i_video_height == 0)
439         i_video_height = p_dec->fmt_in.video.i_height;
440     if(!i_video_visible_width)
441         i_video_visible_width = p_dec->fmt_in.video.i_visible_width;
442     if(!i_video_visible_height)
443         i_video_visible_height = p_dec->fmt_in.video.i_visible_height;
444     if (i_sar_num == 0)
445         i_sar_num = p_dec->fmt_in.video.i_sar_num ? p_dec->fmt_in.video.i_sar_num : 1;
446     if (i_sar_den == 0)
447         i_sar_den = p_dec->fmt_in.video.i_sar_den ? p_dec->fmt_in.video.i_sar_den : 1;
448
449     VTDictionarySetInt32(pixelaspectratio,
450                          kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
451                          i_sar_num);
452     VTDictionarySetInt32(pixelaspectratio,
453                          kCVImageBufferPixelAspectRatioVerticalSpacingKey,
454                          i_sar_den);
455     CFDictionarySetValue(decoderConfiguration,
456                          kCVImageBufferPixelAspectRatioKey,
457                          pixelaspectratio);
458     CFRelease(pixelaspectratio);
459
460 #if !TARGET_OS_IPHONE
461     /* enable HW accelerated playback, since this is optional on OS X
462      * note that the backend may still fallback on software mode if no
463      * suitable hardware is available */
464     CFDictionarySetValue(decoderConfiguration,
465                          kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder,
466                          kCFBooleanTrue);
467
468     /* on OS X, we can force VT to fail if no suitable HW decoder is available,
469      * preventing the aforementioned SW fallback */
470     if (var_InheritInteger(p_dec, "videotoolbox-hw-decoder-only"))
471         CFDictionarySetValue(decoderConfiguration,
472                              kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder,
473                              kCFBooleanTrue);
474 #endif
475
476     p_sys->b_enable_temporal_processing = false;
477     if (var_InheritInteger(p_dec, "videotoolbox-temporal-deinterlacing")) {
478         if (p_block != NULL) {
479             if (p_block->i_flags & BLOCK_FLAG_TOP_FIELD_FIRST ||
480                 p_block->i_flags & BLOCK_FLAG_BOTTOM_FIELD_FIRST) {
481                 msg_Dbg(p_dec, "Interlaced content detected, inserting temporal deinterlacer");
482                 CFDictionarySetValue(decoderConfiguration, kVTDecompressionPropertyKey_FieldMode, kVTDecompressionProperty_FieldMode_DeinterlaceFields);
483                 CFDictionarySetValue(decoderConfiguration, kVTDecompressionPropertyKey_DeinterlaceMode, kVTDecompressionProperty_DeinterlaceMode_Temporal);
484                 p_sys->b_enable_temporal_processing = true;
485             }
486         }
487     }
488
489     /* create video format description */
490     status = CMVideoFormatDescriptionCreate(kCFAllocatorDefault,
491                                             p_sys->codec,
492                                             i_video_width,
493                                             i_video_height,
494                                             decoderConfiguration,
495                                             &p_sys->videoFormatDescription);
496     if (status) {
497         CFRelease(decoderConfiguration);
498         msg_Err(p_dec, "video format description creation failed (%i)", status);
499         return VLC_EGENERIC;
500     }
501
502     /* destination pixel buffer attributes */
503     CFMutableDictionaryRef dpba = CFDictionaryCreateMutable(kCFAllocatorDefault,
504                                                             2,
505                                                             &kCFTypeDictionaryKeyCallBacks,
506                                                             &kCFTypeDictionaryValueCallBacks);
507
508 #if !TARGET_OS_IPHONE
509     CFDictionarySetValue(dpba,
510                          kCVPixelBufferOpenGLCompatibilityKey,
511                          kCFBooleanTrue);
512 #else
513     CFDictionarySetValue(dpba,
514                          kCVPixelBufferOpenGLESCompatibilityKey,
515                          kCFBooleanTrue);
516 #endif
517
518     /* full range allows a broader range of colors but is H264 only */
519     if (p_sys->codec == kCMVideoCodecType_H264) {
520         VTDictionarySetInt32(dpba,
521                              kCVPixelBufferPixelFormatTypeKey,
522                              kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
523     } else {
524         VTDictionarySetInt32(dpba,
525                              kCVPixelBufferPixelFormatTypeKey,
526                              kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
527     }
528     VTDictionarySetInt32(dpba,
529                          kCVPixelBufferWidthKey,
530                          i_video_width);
531     VTDictionarySetInt32(dpba,
532                          kCVPixelBufferHeightKey,
533                          i_video_height);
534     VTDictionarySetInt32(dpba,
535                          kCVPixelBufferBytesPerRowAlignmentKey,
536                          i_video_width * 2);
537
538     /* setup decoder callback record */
539     VTDecompressionOutputCallbackRecord decoderCallbackRecord;
540     decoderCallbackRecord.decompressionOutputCallback = DecoderCallback;
541     decoderCallbackRecord.decompressionOutputRefCon = p_dec;
542
543     /* create decompression session */
544     status = VTDecompressionSessionCreate(kCFAllocatorDefault,
545                                           p_sys->videoFormatDescription,
546                                           decoderConfiguration,
547                                           dpba,
548                                           &decoderCallbackRecord,
549                                           &p_sys->session);
550
551     /* release no longer needed storage items */
552     CFRelease(dpba);
553     CFRelease(decoderConfiguration);
554
555     /* check if the session is valid */
556     if (status) {
557
558         switch (status) {
559             case -12470:
560                 msg_Err(p_dec, "VT is not supported on this hardware");
561                 break;
562             case -12471:
563                 msg_Err(p_dec, "Video format is not supported by VT");
564                 break;
565             case -12903:
566                 msg_Err(p_dec, "created session is invalid, could not select and open decoder instance");
567                 break;
568             case -12906:
569                 msg_Err(p_dec, "could not find decoder");
570                 break;
571             case -12910:
572                 msg_Err(p_dec, "unsupported data");
573                 break;
574             case -12913:
575                 msg_Err(p_dec, "VT is not available to sandboxed apps on this OS release or maximum number of decoders reached");
576                 break;
577             case -12917:
578                 msg_Err(p_dec, "Insufficient source color data");
579                 break;
580             case -12918:
581                 msg_Err(p_dec, "Could not create color correction data");
582                 break;
583             case -12210:
584                 msg_Err(p_dec, "Insufficient authorization to create decoder");
585                 break;
586             case -8973:
587                 msg_Err(p_dec, "Could not select and open decoder instance");
588                 break;
589
590             default:
591                 msg_Err(p_dec, "Decompression session creation failed (%i)", status);
592                 break;
593         }
594         return VLC_EGENERIC;
595     }
596
597     p_dec->fmt_out.video.i_width = i_video_width;
598     p_dec->fmt_out.video.i_height = i_video_height;
599     p_dec->fmt_out.video.i_visible_width = i_video_visible_width;
600     p_dec->fmt_out.video.i_visible_height = i_video_visible_height;
601     p_dec->fmt_out.video.i_sar_den = i_sar_den;
602     p_dec->fmt_out.video.i_sar_num = i_sar_num;
603
604     if (p_block) {
605         /* this is a mid stream change so we need to tell the core about it */
606         decoder_UpdateVideoFormat(p_dec);
607     }
608
609     /* setup storage */
610     p_sys->outputTimeStamps = [[NSMutableArray alloc] init];
611     p_sys->outputFrames = [[NSMutableDictionary alloc] init];
612     if (!p_sys->outputFrames) {
613         msg_Warn(p_dec, "buffer management structure allocation failed");
614         return VLC_ENOMEM;
615     }
616
617     p_sys->b_started = YES;
618
619     return VLC_SUCCESS;
620 }
621
622 static void StopVideoToolbox(decoder_t *p_dec)
623 {
624     decoder_sys_t *p_sys = p_dec->p_sys;
625
626     if (p_sys->b_started) {
627         if (p_sys->outputTimeStamps != nil)
628             CFRelease(p_sys->outputTimeStamps);
629         p_sys->outputTimeStamps = nil;
630         if (p_sys->outputFrames != nil)
631             CFRelease(p_sys->outputFrames);
632         p_sys->outputFrames = nil;
633
634         p_sys->b_started = false;
635         if (p_sys->session != nil) {
636             VTDecompressionSessionInvalidate(p_sys->session);
637             CFRelease(p_sys->session);
638             p_sys->session = nil;
639         }
640
641         p_sys->b_format_propagated = false;
642     }
643
644     if (p_sys->videoFormatDescription != nil) {
645         CFRelease(p_sys->videoFormatDescription);
646         p_sys->videoFormatDescription = nil;
647     }
648 }
649
650 #pragma mark - module open and close
651
652 static int OpenDecoder(vlc_object_t *p_this)
653 {
654     decoder_t *p_dec = (decoder_t *)p_this;
655
656 #if TARGET_OS_IPHONE
657     if (unlikely([[UIDevice currentDevice].systemVersion floatValue] < 8.0)) {
658         msg_Warn(p_dec, "decoder skipped as OS is too old");
659         return VLC_EGENERIC;
660     }
661 #endif
662
663     if (p_dec->fmt_in.i_cat != VIDEO_ES)
664         return VLC_EGENERIC;
665
666     /* check quickly if we can digest the offered data */
667     CMVideoCodecType codec;
668     codec = CodecPrecheck(p_dec);
669     if (codec == -1) {
670         return VLC_EGENERIC;
671     }
672
673     /* now that we see a chance to decode anything, allocate the
674      * internals and start the decoding session */
675     decoder_sys_t *p_sys;
676     p_sys = malloc(sizeof(*p_sys));
677     if (!p_sys)
678         return VLC_ENOMEM;
679     p_dec->p_sys = p_sys;
680     p_sys->b_started = false;
681     p_sys->b_is_avcc = false;
682     p_sys->codec = codec;
683     p_sys->videoFormatDescription = nil;
684
685     int i_ret = StartVideoToolbox(p_dec, NULL);
686     if (i_ret != VLC_SUCCESS) {
687         CloseDecoder(p_this);
688         return i_ret;
689     }
690
691     /* return our proper VLC internal state */
692     p_dec->fmt_out.i_cat = p_dec->fmt_in.i_cat;
693     p_dec->fmt_out.video = p_dec->fmt_in.video;
694     p_dec->fmt_out.audio = p_dec->fmt_in.audio;
695     if (p_sys->b_zero_copy) {
696         msg_Dbg(p_dec, "zero-copy rendering pipeline enabled");
697         p_dec->fmt_out.i_codec = VLC_CODEC_CVPX_OPAQUE;
698     } else {
699         msg_Dbg(p_dec, "copy rendering pipeline enabled");
700         p_dec->fmt_out.i_codec = VLC_CODEC_I420;
701     }
702
703     p_dec->pf_decode_video = DecodeBlock;
704     p_dec->pf_flush        = Flush;
705
706     msg_Info(p_dec, "Using Video Toolbox to decode '%4.4s'", (char *)&p_dec->fmt_in.i_codec);
707
708     return VLC_SUCCESS;
709 }
710
711 static void CloseDecoder(vlc_object_t *p_this)
712 {
713     decoder_t *p_dec = (decoder_t *)p_this;
714     decoder_sys_t *p_sys = p_dec->p_sys;
715
716     if (p_sys->session && p_sys->b_started) {
717         VTDecompressionSessionWaitForAsynchronousFrames(p_sys->session);
718     }
719     StopVideoToolbox(p_dec);
720
721     free(p_sys);
722 }
723
724 #pragma mark - helpers
725
726 static BOOL deviceSupportsAdvancedProfiles()
727 {
728 #if TARGET_IPHONE_SIMULATOR
729     return NO;
730 #endif
731 #if TARGET_OS_IPHONE
732     size_t size;
733     cpu_type_t type;
734
735     size = sizeof(type);
736     sysctlbyname("hw.cputype", &type, &size, NULL, 0);
737
738     /* Support for H264 profile HIGH 10 was introduced with the first 64bit Apple ARM SoC, the A7 */
739     if (type == CPU_TYPE_ARM64)
740         return YES;
741
742     return NO;
743 #else
744     return NO;
745 #endif
746 }
747
748 static BOOL deviceSupportsAdvancedLevels()
749 {
750 #if TARGET_IPHONE_SIMULATOR
751     return YES;
752 #endif
753 #if TARGET_OS_IPHONE
754     size_t size;
755     int32_t cpufamily;
756
757     size = sizeof(cpufamily);
758     sysctlbyname("hw.cpufamily", &cpufamily, &size, NULL, 0);
759
760     /* Proper 4K decoding requires a Twister SoC
761      * Everything below will kill the decoder daemon */
762     if (cpufamily == CPUFAMILY_ARM_TWISTER) {
763         return YES;
764     }
765
766     return NO;
767 #else
768     return YES;
769 #endif
770 }
771
772 static inline void bo_add_mp4_tag_descr(bo_t *p_bo, uint8_t tag, uint32_t size)
773 {
774     bo_add_8(p_bo, tag);
775     for (int i = 3; i>0; i--)
776         bo_add_8(p_bo, (size>>(7*i)) | 0x80);
777     bo_add_8(p_bo, size & 0x7F);
778 }
779
780 static CFDataRef ESDSCreate(decoder_t *p_dec, uint8_t *p_buf, uint32_t i_buf_size)
781 {
782     int full_size = 3 + 5 +13 + 5 + i_buf_size + 3;
783     int config_size = 13 + 5 + i_buf_size;
784     int padding = 12;
785
786     bo_t bo;
787     bool status = bo_init(&bo, 1024);
788     if (status != true)
789         return NULL;
790
791     bo_add_8(&bo, 0);       // Version
792     bo_add_24be(&bo, 0);    // Flags
793
794     // elementary stream description tag
795     bo_add_mp4_tag_descr(&bo, 0x03, full_size);
796     bo_add_16be(&bo, 0);    // esid
797     bo_add_8(&bo, 0);       // stream priority (0-3)
798
799     // decoder configuration description tag
800     bo_add_mp4_tag_descr(&bo, 0x04, config_size);
801     bo_add_8(&bo, 32);      // object type identification (32 == MPEG4)
802     bo_add_8(&bo, 0x11);    // stream type
803     bo_add_24be(&bo, 0);    // buffer size
804     bo_add_32be(&bo, 0);    // max bitrate
805     bo_add_32be(&bo, 0);    // avg bitrate
806
807     // decoder specific description tag
808     bo_add_mp4_tag_descr(&bo, 0x05, i_buf_size);
809     bo_add_mem(&bo, i_buf_size, p_buf);
810
811     // sync layer configuration description tag
812     bo_add_8(&bo, 0x06);    // tag
813     bo_add_8(&bo, 0x01);    // length
814     bo_add_8(&bo, 0x02);    // no SL
815
816     CFDataRef data = CFDataCreate(kCFAllocatorDefault,
817                                   bo.b->p_buffer,
818                                   bo.b->i_buffer);
819     bo_deinit(&bo);
820     return data;
821 }
822
823 static block_t *H264ProcessBlock(decoder_t *p_dec, block_t *p_block)
824 {
825     decoder_sys_t *p_sys = p_dec->p_sys;
826
827     if (p_sys->b_is_avcc) /* FIXME: no change checks done for AVC ? */
828         return p_block;
829
830     return (p_block) ? hxxx_AnnexB_to_xVC(p_block, p_sys->i_nal_length_size) : NULL;
831 }
832
833 static CMSampleBufferRef VTSampleBufferCreate(decoder_t *p_dec,
834                                               CMFormatDescriptionRef fmt_desc,
835                                               void *buffer,
836                                               size_t size,
837                                               mtime_t i_pts,
838                                               mtime_t i_dts,
839                                               mtime_t i_length)
840 {
841     OSStatus status;
842     CMBlockBufferRef  block_buf = NULL;
843     CMSampleBufferRef sample_buf = NULL;
844
845     CMSampleTimingInfo timeInfo;
846     CMSampleTimingInfo timeInfoArray[1];
847
848     timeInfo.duration = CMTimeMake(i_length, 1);
849     timeInfo.presentationTimeStamp = CMTimeMake(i_pts > 0 ? i_pts : i_dts, CLOCK_FREQ);
850     timeInfo.decodeTimeStamp = CMTimeMake(i_dts, CLOCK_FREQ);
851     timeInfoArray[0] = timeInfo;
852
853     status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,// structureAllocator
854                                                 buffer,             // memoryBlock
855                                                 size,               // blockLength
856                                                 kCFAllocatorNull,   // blockAllocator
857                                                 NULL,               // customBlockSource
858                                                 0,                  // offsetToData
859                                                 size,               // dataLength
860                                                 false,              // flags
861                                                 &block_buf);
862
863     if (!status) {
864         status = CMSampleBufferCreate(kCFAllocatorDefault,  // allocator
865                                       block_buf,            // dataBuffer
866                                       TRUE,                 // dataReady
867                                       0,                    // makeDataReadyCallback
868                                       0,                    // makeDataReadyRefcon
869                                       fmt_desc,             // formatDescription
870                                       1,                    // numSamples
871                                       1,                    // numSampleTimingEntries
872                                       timeInfoArray,        // sampleTimingArray
873                                       0,                    // numSampleSizeEntries
874                                       NULL,                 // sampleSizeArray
875                                       &sample_buf);
876         if (status != noErr)
877             msg_Warn(p_dec, "sample buffer creation failure %i", status);
878     } else
879         msg_Warn(p_dec, "cm block buffer creation failure %i", status);
880
881     if (block_buf != nil)
882         CFRelease(block_buf);
883     block_buf = nil;
884
885     return sample_buf;
886 }
887
888 void VTDictionarySetInt32(CFMutableDictionaryRef dict, CFStringRef key, int value)
889 {
890     CFNumberRef number;
891     number = CFNumberCreate(NULL, kCFNumberSInt32Type, &value);
892     CFDictionarySetValue(dict, key, number);
893     CFRelease(number);
894 }
895
896 static void copy420YpCbCr8Planar(picture_t *p_pic,
897                                  CVPixelBufferRef buffer,
898                                  unsigned i_width,
899                                  unsigned i_height)
900 {
901     uint8_t *pp_plane[2];
902     size_t pi_pitch[2];
903
904     if (!buffer)
905         return;
906
907     CVPixelBufferLockBaseAddress(buffer, 0);
908
909     for (int i = 0; i < 2; i++) {
910         pp_plane[i] = CVPixelBufferGetBaseAddressOfPlane(buffer, i);
911         pi_pitch[i] = CVPixelBufferGetBytesPerRowOfPlane(buffer, i);
912     }
913
914     CopyFromNv12ToI420(p_pic, pp_plane, pi_pitch, i_width, i_height);
915
916     CVPixelBufferUnlockBaseAddress(buffer, 0);
917 }
918
919 #pragma mark - actual decoding
920
921 static void Flush(decoder_t *p_dec)
922 {
923     decoder_sys_t *p_sys = p_dec->p_sys;
924
925     if (likely(p_sys->b_started)) {
926         @synchronized(p_sys->outputTimeStamps) {
927             [p_sys->outputTimeStamps removeAllObjects];
928         }
929         @synchronized(p_sys->outputFrames) {
930             [p_sys->outputFrames removeAllObjects];
931         }
932     }
933 }
934
935 static picture_t *DecodeBlock(decoder_t *p_dec, block_t **pp_block)
936 {
937     decoder_sys_t *p_sys = p_dec->p_sys;
938     block_t *p_block;
939     VTDecodeFrameFlags decoderFlags = 0;
940     VTDecodeInfoFlags flagOut;
941     OSStatus status;
942     int i_ret = 0;
943
944     if (!pp_block)
945         return NULL;
946
947     p_block = *pp_block;
948
949     if (likely(p_block != NULL)) {
950         if (unlikely(p_block->i_flags&(BLOCK_FLAG_CORRUPTED))) {
951             Flush(p_dec);
952             block_Release(p_block);
953             goto skip;
954         }
955
956         /* feed to vt */
957         if (likely(p_block->i_buffer)) {
958             if (!p_sys->b_started) {
959                 /* decoding didn't start yet, which is ok for H264, let's see
960                  * if we can use this block to get going */
961                 p_sys->codec = kCMVideoCodecType_H264;
962                 i_ret = StartVideoToolbox(p_dec, p_block);
963             }
964             if (i_ret != VLC_SUCCESS || !p_sys->b_started) {
965                 *pp_block = NULL;
966                 return NULL;
967             }
968
969             if (p_sys->codec == kCMVideoCodecType_H264) {
970                 p_block = H264ProcessBlock(p_dec, p_block);
971                 if (!p_block)
972                 {
973                     *pp_block = NULL;
974                     return NULL;
975                 }
976             }
977
978             CMSampleBufferRef sampleBuffer;
979             sampleBuffer = VTSampleBufferCreate(p_dec,
980                                                 p_sys->videoFormatDescription,
981                                                 p_block->p_buffer,
982                                                 p_block->i_buffer,
983                                                 p_block->i_pts,
984                                                 p_block->i_dts,
985                                                 p_block->i_length);
986             if (likely(sampleBuffer)) {
987                 if (likely(!p_sys->b_enable_temporal_processing))
988                     decoderFlags = kVTDecodeFrame_EnableAsynchronousDecompression;
989                 else
990                     decoderFlags = kVTDecodeFrame_EnableAsynchronousDecompression | kVTDecodeFrame_EnableTemporalProcessing;
991
992                 status = VTDecompressionSessionDecodeFrame(p_sys->session,
993                                                            sampleBuffer,
994                                                            decoderFlags,
995                                                            NULL, // sourceFrameRefCon
996                                                            &flagOut); // infoFlagsOut
997                 if (status != noErr) {
998                     if (status == kCVReturnInvalidSize)
999                         msg_Err(p_dec, "decoder failure: invalid block size");
1000                     else if (status == -666)
1001                         msg_Err(p_dec, "decoder failure: invalid SPS/PPS");
1002                     else if (status == -6661) {
1003                         msg_Err(p_dec, "decoder failure: invalid argument");
1004                         p_dec->b_error = true;
1005                     } else if (status == -8969 || status == -12909) {
1006                         msg_Err(p_dec, "decoder failure: bad data (%i)", status);
1007                         StopVideoToolbox(p_dec);
1008                     } else if (status == -8960 || status == -12911) {
1009                         msg_Err(p_dec, "decoder failure: internal malfunction (%i)", status);
1010                         StopVideoToolbox(p_dec);
1011                     } else
1012                         msg_Dbg(p_dec, "decoding frame failed (%i)", status);
1013                 }
1014
1015                 if (likely(sampleBuffer != nil))
1016                     CFRelease(sampleBuffer);
1017                 sampleBuffer = nil;
1018             }
1019         }
1020
1021         block_Release(p_block);
1022     }
1023
1024 skip:
1025
1026     *pp_block = NULL;
1027
1028     if (unlikely(!p_sys->b_started))
1029         return NULL;
1030
1031     NSUInteger outputFramesCount = [p_sys->outputFrames count];
1032
1033     if (outputFramesCount > 5) {
1034         CVPixelBufferRef imageBuffer = NULL;
1035         id imageBufferObject = nil;
1036         picture_t *p_pic = NULL;
1037
1038         NSString *timeStamp;
1039         @synchronized(p_sys->outputTimeStamps) {
1040             [p_sys->outputTimeStamps sortUsingComparator:^(id obj1, id obj2) {
1041                 if ([obj1 longLongValue] > [obj2 longLongValue]) {
1042                     return (NSComparisonResult)NSOrderedDescending;
1043                 }
1044                 if ([obj1 longLongValue] < [obj2 longLongValue]) {
1045                     return (NSComparisonResult)NSOrderedAscending;
1046                 }
1047                 return (NSComparisonResult)NSOrderedSame;
1048             }];
1049             NSMutableArray *timeStamps = p_sys->outputTimeStamps;
1050             timeStamp = [timeStamps firstObject];
1051             if (timeStamps.count > 0) {
1052                 [timeStamps removeObjectAtIndex:0];
1053             }
1054         }
1055
1056         @synchronized(p_sys->outputFrames) {
1057             imageBufferObject = [p_sys->outputFrames objectForKey:timeStamp];
1058         }
1059         imageBuffer = (__bridge CVPixelBufferRef)imageBufferObject;
1060
1061         if (imageBuffer != NULL) {
1062             if (CVPixelBufferGetDataSize(imageBuffer) > 0) {
1063                 p_pic = decoder_NewPicture(p_dec);
1064
1065                 if (!p_pic)
1066                     return NULL;
1067
1068                 if (!p_sys->b_zero_copy) {
1069                     /* ehm, *cough*, memcpy.. */
1070                     copy420YpCbCr8Planar(p_pic,
1071                                          imageBuffer,
1072                                          CVPixelBufferGetWidthOfPlane(imageBuffer, 0),
1073                                          CVPixelBufferGetHeightOfPlane(imageBuffer, 0));
1074                 } else {
1075                     /* the structure is allocated by the vout's pool */
1076                     if (p_pic->p_sys) {
1077                         /* if we received a recycled picture from the pool
1078                          * we need release the previous reference first,
1079                          * otherwise we would leak it */
1080                         if (p_pic->p_sys->pixelBuffer != nil) {
1081                             CFRelease(p_pic->p_sys->pixelBuffer);
1082                             p_pic->p_sys->pixelBuffer = nil;
1083                         }
1084
1085                         p_pic->p_sys->pixelBuffer = CFBridgingRetain(imageBufferObject);
1086                     }
1087                     /* will be freed by the vout */
1088                 }
1089
1090                 p_pic->date = timeStamp.longLongValue;
1091
1092                 if (imageBufferObject) {
1093                     @synchronized(p_sys->outputFrames) {
1094                         [p_sys->outputFrames removeObjectForKey:timeStamp];
1095                     }
1096                 }
1097             }
1098         }
1099         return p_pic;
1100     }
1101
1102     return NULL;
1103 }
1104
1105 static void DecoderCallback(void *decompressionOutputRefCon,
1106                             void *sourceFrameRefCon,
1107                             OSStatus status,
1108                             VTDecodeInfoFlags infoFlags,
1109                             CVPixelBufferRef imageBuffer,
1110                             CMTime pts,
1111                             CMTime duration)
1112 {
1113     VLC_UNUSED(sourceFrameRefCon);
1114     VLC_UNUSED(duration);
1115     decoder_t *p_dec = (decoder_t *)decompressionOutputRefCon;
1116     decoder_sys_t *p_sys = p_dec->p_sys;
1117
1118     if (unlikely(!p_sys->b_format_propagated)) {
1119         CFDictionaryRef attachments = CVBufferGetAttachments(imageBuffer, kCVAttachmentMode_ShouldPropagate);
1120         NSDictionary *attachmentDict = (NSDictionary *)attachments;
1121 #ifndef NDEBUG
1122         NSLog(@"%@", attachments);
1123 #endif
1124         if (attachmentDict != nil) {
1125             if (attachmentDict.count > 0) {
1126                 p_sys->b_format_propagated = true;
1127
1128                 NSString *colorSpace = attachmentDict[(NSString *)kCVImageBufferYCbCrMatrixKey];
1129                 if (colorSpace != nil) {
1130                     if ([colorSpace isEqualToString:(NSString *)kCVImageBufferYCbCrMatrix_ITU_R_601_4])
1131                         p_dec->fmt_out.video.space = COLOR_SPACE_BT601;
1132                     else if ([colorSpace isEqualToString:(NSString *)kCVImageBufferYCbCrMatrix_ITU_R_709_2])
1133                         p_dec->fmt_out.video.space = COLOR_SPACE_BT709;
1134                     else
1135                         p_dec->fmt_out.video.space = COLOR_SPACE_UNDEF;
1136                 }
1137
1138                 NSString *colorprimary = attachmentDict[(NSString *)kCVImageBufferColorPrimariesKey];
1139                 if (colorprimary != nil) {
1140                     if ([colorprimary isEqualToString:(NSString *)kCVImageBufferColorPrimaries_SMPTE_C] ||
1141                         [colorprimary isEqualToString:(NSString *)kCVImageBufferColorPrimaries_EBU_3213])
1142                         p_dec->fmt_out.video.primaries = COLOR_PRIMARIES_BT601_625;
1143                     else if ([colorprimary isEqualToString:(NSString *)kCVImageBufferColorPrimaries_ITU_R_709_2])
1144                         p_dec->fmt_out.video.primaries = COLOR_PRIMARIES_BT709;
1145                     else if ([colorprimary isEqualToString:(NSString *)kCVImageBufferColorPrimaries_P22])
1146                         p_dec->fmt_out.video.primaries = COLOR_PRIMARIES_DCI_P3;
1147                     else
1148                         p_dec->fmt_out.video.primaries = COLOR_PRIMARIES_UNDEF;
1149                 }
1150
1151                 NSString *transfer = attachmentDict[(NSString *)kCVImageBufferTransferFunctionKey];
1152                 if (transfer != nil) {
1153                     if ([transfer isEqualToString:(NSString *)kCVImageBufferTransferFunction_ITU_R_709_2] ||
1154                         [transfer isEqualToString:(NSString *)kCVImageBufferTransferFunction_SMPTE_240M_1995])
1155                         p_dec->fmt_out.video.transfer = TRANSFER_FUNC_BT709;
1156                     else
1157                         p_dec->fmt_out.video.transfer = TRANSFER_FUNC_UNDEF;
1158                 }
1159
1160                 NSString *chromaLocation = attachmentDict[(NSString *)kCVImageBufferChromaLocationTopFieldKey];
1161                 if (chromaLocation != nil) {
1162                     if ([chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_Left] ||
1163                         [chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_DV420])
1164                         p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_LEFT;
1165                     else if ([chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_Center])
1166                         p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_CENTER;
1167                     else if ([chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_TopLeft])
1168                         p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_TOP_LEFT;
1169                     else if ([chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_Top])
1170                         p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_TOP_CENTER;
1171                     else
1172                         p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_UNDEF;
1173                 }
1174                 if (p_dec->fmt_out.video.chroma_location == CHROMA_LOCATION_UNDEF) {
1175                     chromaLocation = attachmentDict[(NSString *)kCVImageBufferChromaLocationBottomFieldKey];
1176                     if (chromaLocation != nil) {
1177                         if ([chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_BottomLeft])
1178                             p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_BOTTOM_LEFT;
1179                         else if ([chromaLocation isEqualToString:(NSString *)kCVImageBufferChromaLocation_Bottom])
1180                             p_dec->fmt_out.video.chroma_location = CHROMA_LOCATION_BOTTOM_CENTER;
1181                     }
1182                 }
1183                 decoder_UpdateVideoFormat(p_dec);
1184             }
1185         }
1186     }
1187
1188     if (status != noErr) {
1189         msg_Warn(p_dec, "decoding of a frame failed (%i, %u)", status, (unsigned int) infoFlags);
1190         return;
1191     }
1192
1193     if (imageBuffer == nil)
1194         return;
1195
1196     if (infoFlags & kVTDecodeInfo_FrameDropped) {
1197         msg_Dbg(p_dec, "decoder dropped frame");
1198         if (imageBuffer != nil)
1199             CFRelease(imageBuffer);
1200         imageBuffer = nil;
1201         return;
1202     }
1203
1204     NSString *timeStamp = nil;
1205
1206     if (CMTIME_IS_VALID(pts))
1207         timeStamp = [[NSNumber numberWithLongLong:pts.value] stringValue];
1208     else {
1209         msg_Dbg(p_dec, "invalid timestamp, dropping frame");
1210         CFRelease(imageBuffer);
1211         return;
1212     }
1213
1214     if (timeStamp) {
1215         id imageBufferObject = (__bridge id)imageBuffer;
1216         @synchronized(p_sys->outputTimeStamps) {
1217             [p_sys->outputTimeStamps addObject:timeStamp];
1218         }
1219         @synchronized(p_sys->outputFrames) {
1220             [p_sys->outputFrames setObject:imageBufferObject forKey:timeStamp];
1221         }
1222     }
1223 }