lavd/avfoundation: Fix skewed video output
[ffmpeg.git] / libavdevice / avfoundation.m
index b6420fb..98552ac 100644 (file)
@@ -33,7 +33,9 @@
 #include "libavutil/avstring.h"
 #include "libavformat/internal.h"
 #include "libavutil/internal.h"
+#include "libavutil/parseutils.h"
 #include "libavutil/time.h"
+#include "libavutil/imgutils.h"
 #include "avdevice.h"
 
 static const int avf_time_base = 1000000;
@@ -71,7 +73,7 @@ static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
     { AV_PIX_FMT_YUV420P,      kCVPixelFormatType_420YpCbCr8Planar },
     { AV_PIX_FMT_NV12,         kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
     { AV_PIX_FMT_YUYV422,      kCVPixelFormatType_422YpCbCr8_yuvs },
-#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
     { AV_PIX_FMT_GRAY8,        kCVPixelFormatType_OneComponent8 },
 #endif
     { AV_PIX_FMT_NONE, 0 }
@@ -90,6 +92,12 @@ typedef struct
     id              avf_delegate;
     id              avf_audio_delegate;
 
+    AVRational      framerate;
+    int             width, height;
+
+    int             capture_cursor;
+    int             capture_mouse_clicks;
+
     int             list_devices;
     int             video_device_index;
     int             video_stream_index;
@@ -252,7 +260,7 @@ static void destroy_context(AVFContext* ctx)
 static void parse_device_name(AVFormatContext *s)
 {
     AVFContext *ctx = (AVFContext*)s->priv_data;
-    char *tmp = av_strdup(s->filename);
+    char *tmp = av_strdup(s->url);
     char *save;
 
     if (tmp[0] != ':') {
@@ -263,9 +271,103 @@ static void parse_device_name(AVFormatContext *s)
     }
 }
 
+/**
+ * Configure the video device.
+ *
+ * Configure the video device using a run-time approach to access properties
+ * since formats, activeFormat are available since  iOS >= 7.0 or OSX >= 10.7
+ * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
+ *
+ * The NSUndefinedKeyException must be handled by the caller of this function.
+ *
+ */
+static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
+{
+    AVFContext *ctx = (AVFContext*)s->priv_data;
+
+    double framerate = av_q2d(ctx->framerate);
+    NSObject *range = nil;
+    NSObject *format = nil;
+    NSObject *selected_range = nil;
+    NSObject *selected_format = nil;
+
+    for (format in [video_device valueForKey:@"formats"]) {
+        CMFormatDescriptionRef formatDescription;
+        CMVideoDimensions dimensions;
+
+        formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
+        dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+
+        if ((ctx->width == 0 && ctx->height == 0) ||
+            (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
+
+            selected_format = format;
+
+            for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
+                double max_framerate;
+
+                [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
+                if (fabs (framerate - max_framerate) < 0.01) {
+                    selected_range = range;
+                    break;
+                }
+            }
+        }
+    }
+
+    if (!selected_format) {
+        av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device\n",
+            ctx->width, ctx->height);
+        goto unsupported_format;
+    }
+
+    if (!selected_range) {
+        av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device\n",
+            framerate);
+        goto unsupported_format;
+    }
+
+    if ([video_device lockForConfiguration:NULL] == YES) {
+        NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
+
+        [video_device setValue:selected_format forKey:@"activeFormat"];
+        [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
+        [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
+    } else {
+        av_log(s, AV_LOG_ERROR, "Could not lock device for configuration");
+        return AVERROR(EINVAL);
+    }
+
+    return 0;
+
+unsupported_format:
+
+    av_log(s, AV_LOG_ERROR, "Supported modes:\n");
+    for (format in [video_device valueForKey:@"formats"]) {
+        CMFormatDescriptionRef formatDescription;
+        CMVideoDimensions dimensions;
+
+        formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
+        dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+
+        for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
+            double min_framerate;
+            double max_framerate;
+
+            [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
+            [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
+            av_log(s, AV_LOG_ERROR, "  %dx%d@[%f %f]fps\n",
+                dimensions.width, dimensions.height,
+                min_framerate, max_framerate);
+        }
+    }
+    return AVERROR(EINVAL);
+}
+
 static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
 {
     AVFContext *ctx = (AVFContext*)s->priv_data;
+    int ret;
     NSError *error  = nil;
     AVCaptureInput* capture_input = nil;
     struct AVFPixelFormatSpec pxl_fmt_spec;
@@ -300,6 +402,18 @@ static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
         return 1;
     }
 
+    // Configure device framerate and video size
+    @try {
+        if ((ret = configure_video_device(s, video_device)) < 0) {
+            return ret;
+        }
+    } @catch (NSException *exception) {
+        if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
+          av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
+          return AVERROR_EXTERNAL;
+        }
+    }
+
     // select pixel format
     pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
 
@@ -447,11 +561,11 @@ static int get_video_config(AVFormatContext *s)
     image_buffer      = CMSampleBufferGetImageBuffer(ctx->current_frame);
     image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
 
-    stream->codec->codec_id   = AV_CODEC_ID_RAWVIDEO;
-    stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
-    stream->codec->width      = (int)image_buffer_size.width;
-    stream->codec->height     = (int)image_buffer_size.height;
-    stream->codec->pix_fmt    = ctx->pixel_format;
+    stream->codecpar->codec_id   = AV_CODEC_ID_RAWVIDEO;
+    stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+    stream->codecpar->width      = (int)image_buffer_size.width;
+    stream->codecpar->height     = (int)image_buffer_size.height;
+    stream->codecpar->format     = ctx->pixel_format;
 
     CFRelease(ctx->current_frame);
     ctx->current_frame = nil;
@@ -490,10 +604,10 @@ static int get_audio_config(AVFormatContext *s)
         return 1;
     }
 
-    stream->codec->codec_type     = AVMEDIA_TYPE_AUDIO;
-    stream->codec->sample_rate    = basic_desc->mSampleRate;
-    stream->codec->channels       = basic_desc->mChannelsPerFrame;
-    stream->codec->channel_layout = av_get_default_channel_layout(stream->codec->channels);
+    stream->codecpar->codec_type     = AVMEDIA_TYPE_AUDIO;
+    stream->codecpar->sample_rate    = basic_desc->mSampleRate;
+    stream->codecpar->channels       = basic_desc->mChannelsPerFrame;
+    stream->codecpar->channel_layout = av_get_default_channel_layout(stream->codecpar->channels);
 
     ctx->audio_channels        = basic_desc->mChannelsPerFrame;
     ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
@@ -507,22 +621,22 @@ static int get_audio_config(AVFormatContext *s)
         ctx->audio_float &&
         ctx->audio_bits_per_sample == 32 &&
         ctx->audio_packed) {
-        stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
+        stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
     } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
         ctx->audio_signed_integer &&
         ctx->audio_bits_per_sample == 16 &&
         ctx->audio_packed) {
-        stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
+        stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
     } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
         ctx->audio_signed_integer &&
         ctx->audio_bits_per_sample == 24 &&
         ctx->audio_packed) {
-        stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
+        stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
     } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
         ctx->audio_signed_integer &&
         ctx->audio_bits_per_sample == 32 &&
         ctx->audio_packed) {
-        stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
+        stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
     } else {
         av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
         return 1;
@@ -549,6 +663,7 @@ static int get_audio_config(AVFormatContext *s)
 static int avf_read_header(AVFormatContext *s)
 {
     NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
+    int capture_screen      = 0;
     uint32_t num_screens    = 0;
     AVFContext *ctx         = (AVFContext*)s->priv_data;
     AVCaptureDevice *video_device = nil;
@@ -563,7 +678,7 @@ static int avf_read_header(AVFormatContext *s)
     pthread_mutex_init(&ctx->frame_lock, NULL);
     pthread_cond_init(&ctx->frame_wait_cond, NULL);
 
-#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
     CGGetActiveDisplayList(0, NULL, &num_screens);
 #endif
 
@@ -577,7 +692,7 @@ static int avf_read_header(AVFormatContext *s)
             av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
             index++;
         }
-#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
         if (num_screens > 0) {
             CGDirectDisplayID screens[num_screens];
             CGGetActiveDisplayList(num_screens, screens, &num_screens);
@@ -612,11 +727,31 @@ static int avf_read_header(AVFormatContext *s)
         if (ctx->video_device_index < ctx->num_video_devices) {
             video_device = [devices objectAtIndex:ctx->video_device_index];
         } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
-#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
             CGDirectDisplayID screens[num_screens];
             CGGetActiveDisplayList(num_screens, screens, &num_screens);
             AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
+
+            if (ctx->framerate.num > 0) {
+                capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
+            }
+
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
+            if (ctx->capture_cursor) {
+                capture_screen_input.capturesCursor = YES;
+            } else {
+                capture_screen_input.capturesCursor = NO;
+            }
+#endif
+
+            if (ctx->capture_mouse_clicks) {
+                capture_screen_input.capturesMouseClicks = YES;
+            } else {
+                capture_screen_input.capturesMouseClicks = NO;
+            }
+
             video_device = (AVCaptureDevice*) capture_screen_input;
+            capture_screen = 1;
 #endif
          } else {
             av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
@@ -635,7 +770,7 @@ static int avf_read_header(AVFormatContext *s)
             }
         }
 
-#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
         // looking for screen inputs
         if (!video_device) {
             int idx;
@@ -645,6 +780,25 @@ static int avf_read_header(AVFormatContext *s)
                 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
                 video_device = (AVCaptureDevice*) capture_screen_input;
                 ctx->video_device_index = ctx->num_video_devices + idx;
+                capture_screen = 1;
+
+                if (ctx->framerate.num > 0) {
+                    capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
+                }
+
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
+                if (ctx->capture_cursor) {
+                    capture_screen_input.capturesCursor = YES;
+                } else {
+                    capture_screen_input.capturesCursor = NO;
+                }
+#endif
+
+                if (ctx->capture_mouse_clicks) {
+                    capture_screen_input.capturesMouseClicks = YES;
+                } else {
+                    capture_screen_input.capturesMouseClicks = NO;
+                }
             }
         }
 #endif
@@ -715,6 +869,12 @@ static int avf_read_header(AVFormatContext *s)
 
     [ctx->capture_session startRunning];
 
+    /* Unlock device configuration only after the session is started so it
+     * does not reset the capture formats */
+    if (!capture_screen) {
+        [video_device unlockForConfiguration];
+    }
+
     if (video_device && get_video_config(s)) {
         goto fail;
     }
@@ -733,6 +893,49 @@ fail:
     return AVERROR(EIO);
 }
 
+static int copy_cvpixelbuffer(AVFormatContext *s,
+                               CVPixelBufferRef image_buffer,
+                               AVPacket *pkt)
+{
+    AVFContext *ctx = s->priv_data;
+    int src_linesize[4];
+    const uint8_t *src_data[4];
+    int width  = CVPixelBufferGetWidth(image_buffer);
+    int height = CVPixelBufferGetHeight(image_buffer);
+    int status;
+
+    memset(src_linesize, 0, sizeof(src_linesize));
+    memset(src_data, 0, sizeof(src_data));
+
+    status = CVPixelBufferLockBaseAddress(image_buffer, 0);
+    if (status != kCVReturnSuccess) {
+        av_log(s, AV_LOG_ERROR, "Could not lock base address: %d\n", status);
+        return AVERROR_EXTERNAL;
+    }
+
+    if (CVPixelBufferIsPlanar(image_buffer)) {
+        size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
+        int i;
+        for(i = 0; i < plane_count; i++){
+            src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
+            src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
+        }
+    } else {
+        src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
+        src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
+    }
+
+    status = av_image_copy_to_buffer(pkt->data, pkt->size,
+                                     src_data, src_linesize,
+                                     ctx->pixel_format, width, height, 1);
+
+
+
+    CVPixelBufferUnlockBaseAddress(image_buffer, 0);
+
+    return status;
+}
+
 static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
 {
     AVFContext* ctx = (AVFContext*)s->priv_data;
@@ -744,25 +947,28 @@ static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
         image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
 
         if (ctx->current_frame != nil) {
-            void *data;
+            int status;
             if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(image_buffer)) < 0) {
                 return AVERROR(EIO);
             }
 
-            pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts,
-                                               AV_TIME_BASE_Q,
-                                               avf_time_base_q);
-            pkt->stream_index  = ctx->video_stream_index;
-            pkt->flags        |= AV_PKT_FLAG_KEY;
+            CMItemCount count;
+            CMSampleTimingInfo timing_info;
 
-            CVPixelBufferLockBaseAddress(image_buffer, 0);
+            if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
+                AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
+                pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
+            }
 
-            data = CVPixelBufferGetBaseAddress(image_buffer);
-            memcpy(pkt->data, data, pkt->size);
+            pkt->stream_index  = ctx->video_stream_index;
+            pkt->flags        |= AV_PKT_FLAG_KEY;
 
-            CVPixelBufferUnlockBaseAddress(image_buffer, 0);
+            status = copy_cvpixelbuffer(s, image_buffer, pkt);
             CFRelease(ctx->current_frame);
             ctx->current_frame = nil;
+
+            if (status < 0)
+                return status;
         } else if (ctx->current_audio_frame != nil) {
             CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
             int block_buffer_size         = CMBlockBufferGetDataLength(block_buffer);
@@ -779,9 +985,13 @@ static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
                 return AVERROR(EIO);
             }
 
-            pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_audio_pts,
-                                               AV_TIME_BASE_Q,
-                                               avf_time_base_q);
+            CMItemCount count;
+            CMSampleTimingInfo timing_info;
+
+            if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
+                AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
+                pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
+            }
 
             pkt->stream_index  = ctx->audio_stream_index;
             pkt->flags        |= AV_PKT_FLAG_KEY;
@@ -853,6 +1063,11 @@ static const AVOption options[] = {
     { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
     { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
     { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
+    { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
+    { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
+    { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
+    { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
+
     { NULL },
 };