ffplay: fix greenish line on the right edge with some xv sizes
[ffmpeg.git] / ffplay.c
1 /*
2  * Copyright (c) 2003 Fabrice Bellard
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20
21 /**
22  * @file
23  * simple media player based on the FFmpeg libraries
24  */
25
26 #include "config.h"
27 #include <inttypes.h>
28 #include <math.h>
29 #include <limits.h>
30 #include <signal.h>
31 #include "libavutil/avstring.h"
32 #include "libavutil/colorspace.h"
33 #include "libavutil/mathematics.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/imgutils.h"
36 #include "libavutil/dict.h"
37 #include "libavutil/parseutils.h"
38 #include "libavutil/samplefmt.h"
39 #include "libavutil/avassert.h"
40 #include "libavutil/time.h"
41 #include "libavformat/avformat.h"
42 #include "libavdevice/avdevice.h"
43 #include "libswscale/swscale.h"
44 #include "libavutil/opt.h"
45 #include "libavcodec/avfft.h"
46 #include "libswresample/swresample.h"
47
48 #if CONFIG_AVFILTER
49 # include "libavfilter/avcodec.h"
50 # include "libavfilter/avfilter.h"
51 # include "libavfilter/avfiltergraph.h"
52 # include "libavfilter/buffersink.h"
53 # include "libavfilter/buffersrc.h"
54 #endif
55
56 #include <SDL.h>
57 #include <SDL_thread.h>
58
59 #include "cmdutils.h"
60
61 #include <assert.h>
62
63 const char program_name[] = "ffplay";
64 const int program_birth_year = 2003;
65
66 #define MAX_QUEUE_SIZE (15 * 1024 * 1024)
67 #define MIN_FRAMES 5
68
69 /* SDL audio buffer size, in samples. Should be small to have precise
70    A/V sync as SDL does not have hardware buffer fullness info. */
71 #define SDL_AUDIO_BUFFER_SIZE 1024
72
73 /* no AV sync correction is done if below the AV sync threshold */
74 #define AV_SYNC_THRESHOLD 0.01
75 /* no AV correction is done if too big error */
76 #define AV_NOSYNC_THRESHOLD 10.0
77
78 /* maximum audio speed change to get correct sync */
79 #define SAMPLE_CORRECTION_PERCENT_MAX 10
80
81 /* external clock speed adjustment constants for realtime sources based on buffer fullness */
82 #define EXTERNAL_CLOCK_SPEED_MIN  0.900
83 #define EXTERNAL_CLOCK_SPEED_MAX  1.010
84 #define EXTERNAL_CLOCK_SPEED_STEP 0.001
85
86 /* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
87 #define AUDIO_DIFF_AVG_NB   20
88
89 /* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
90 /* TODO: We assume that a decoded and resampled frame fits into this buffer */
91 #define SAMPLE_ARRAY_SIZE (8 * 65536)
92
93 static int64_t sws_flags = SWS_BICUBIC;
94
95 typedef struct MyAVPacketList {
96     AVPacket pkt;
97     struct MyAVPacketList *next;
98     int serial;
99 } MyAVPacketList;
100
101 typedef struct PacketQueue {
102     MyAVPacketList *first_pkt, *last_pkt;
103     int nb_packets;
104     int size;
105     int abort_request;
106     int serial;
107     SDL_mutex *mutex;
108     SDL_cond *cond;
109 } PacketQueue;
110
111 #define VIDEO_PICTURE_QUEUE_SIZE 4
112 #define SUBPICTURE_QUEUE_SIZE 4
113
114 typedef struct VideoPicture {
115     double pts;             // presentation timestamp for this picture
116     int64_t pos;            // byte position in file
117     int skip;
118     SDL_Overlay *bmp;
119     int width, height; /* source height & width */
120     AVRational sample_aspect_ratio;
121     int allocated;
122     int reallocate;
123     int serial;
124
125 #if CONFIG_AVFILTER
126     AVFilterBufferRef *picref;
127 #endif
128 } VideoPicture;
129
130 typedef struct SubPicture {
131     double pts; /* presentation time stamp for this picture */
132     AVSubtitle sub;
133 } SubPicture;
134
135 typedef struct AudioParams {
136     int freq;
137     int channels;
138     int channel_layout;
139     enum AVSampleFormat fmt;
140 } AudioParams;
141
142 enum {
143     AV_SYNC_AUDIO_MASTER, /* default choice */
144     AV_SYNC_VIDEO_MASTER,
145     AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
146 };
147
148 typedef struct VideoState {
149     SDL_Thread *read_tid;
150     SDL_Thread *video_tid;
151     SDL_Thread *refresh_tid;
152     AVInputFormat *iformat;
153     int no_background;
154     int abort_request;
155     int force_refresh;
156     int paused;
157     int last_paused;
158     int que_attachments_req;
159     int seek_req;
160     int seek_flags;
161     int64_t seek_pos;
162     int64_t seek_rel;
163     int read_pause_return;
164     AVFormatContext *ic;
165     int realtime;
166
167     int audio_stream;
168
169     int av_sync_type;
170     double external_clock;                   ///< external clock base
171     double external_clock_drift;             ///< external clock base - time (av_gettime) at which we updated external_clock
172     int64_t external_clock_time;             ///< last reference time
173     double external_clock_speed;             ///< speed of the external clock
174
175     double audio_clock;
176     double audio_diff_cum; /* used for AV difference average computation */
177     double audio_diff_avg_coef;
178     double audio_diff_threshold;
179     int audio_diff_avg_count;
180     AVStream *audio_st;
181     PacketQueue audioq;
182     int audio_hw_buf_size;
183     DECLARE_ALIGNED(16,uint8_t,audio_buf2)[AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
184     uint8_t silence_buf[SDL_AUDIO_BUFFER_SIZE];
185     uint8_t *audio_buf;
186     uint8_t *audio_buf1;
187     unsigned int audio_buf_size; /* in bytes */
188     int audio_buf_index; /* in bytes */
189     int audio_write_buf_size;
190     AVPacket audio_pkt_temp;
191     AVPacket audio_pkt;
192     int audio_pkt_temp_serial;
193     struct AudioParams audio_src;
194     struct AudioParams audio_tgt;
195     struct SwrContext *swr_ctx;
196     double audio_current_pts;
197     double audio_current_pts_drift;
198     int frame_drops_early;
199     int frame_drops_late;
200     AVFrame *frame;
201
202     enum ShowMode {
203         SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB
204     } show_mode;
205     int16_t sample_array[SAMPLE_ARRAY_SIZE];
206     int sample_array_index;
207     int last_i_start;
208     RDFTContext *rdft;
209     int rdft_bits;
210     FFTSample *rdft_data;
211     int xpos;
212
213     SDL_Thread *subtitle_tid;
214     int subtitle_stream;
215     int subtitle_stream_changed;
216     AVStream *subtitle_st;
217     PacketQueue subtitleq;
218     SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
219     int subpq_size, subpq_rindex, subpq_windex;
220     SDL_mutex *subpq_mutex;
221     SDL_cond *subpq_cond;
222
223     double frame_timer;
224     double frame_last_pts;
225     double frame_last_duration;
226     double frame_last_dropped_pts;
227     double frame_last_returned_time;
228     double frame_last_filter_delay;
229     int64_t frame_last_dropped_pos;
230     double video_clock;             // pts of last decoded frame / predicted pts of next decoded frame
231     int video_stream;
232     AVStream *video_st;
233     PacketQueue videoq;
234     double video_current_pts;       // current displayed pts (different from video_clock if frame fifos are used)
235     double video_current_pts_drift; // video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
236     int64_t video_current_pos;      // current displayed file pos
237     double max_frame_duration;      // maximum duration of a frame - above this, we consider the jump a timestamp discontinuity
238     VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
239     int pictq_size, pictq_rindex, pictq_windex;
240     SDL_mutex *pictq_mutex;
241     SDL_cond *pictq_cond;
242 #if !CONFIG_AVFILTER
243     struct SwsContext *img_convert_ctx;
244 #endif
245     SDL_Rect last_display_rect;
246
247     char filename[1024];
248     int width, height, xleft, ytop;
249     int step;
250
251 #if CONFIG_AVFILTER
252     AVFilterContext *in_video_filter;   // the first filter in the video chain
253     AVFilterContext *out_video_filter;  // the last filter in the video chain
254     int use_dr1;
255     FrameBuffer *buffer_pool;
256 #endif
257
258     int refresh;
259     int last_video_stream, last_audio_stream, last_subtitle_stream;
260
261     SDL_cond *continue_read_thread;
262 } VideoState;
263
264 /* options specified by the user */
265 static AVInputFormat *file_iformat;
266 static const char *input_filename;
267 static const char *window_title;
268 static int fs_screen_width;
269 static int fs_screen_height;
270 static int screen_width  = 0;
271 static int screen_height = 0;
272 static int audio_disable;
273 static int video_disable;
274 static int wanted_stream[AVMEDIA_TYPE_NB] = {
275     [AVMEDIA_TYPE_AUDIO]    = -1,
276     [AVMEDIA_TYPE_VIDEO]    = -1,
277     [AVMEDIA_TYPE_SUBTITLE] = -1,
278 };
279 static int seek_by_bytes = -1;
280 static int display_disable;
281 static int show_status = 1;
282 static int av_sync_type = AV_SYNC_AUDIO_MASTER;
283 static int64_t start_time = AV_NOPTS_VALUE;
284 static int64_t duration = AV_NOPTS_VALUE;
285 static int workaround_bugs = 1;
286 static int fast = 0;
287 static int genpts = 0;
288 static int lowres = 0;
289 static int idct = FF_IDCT_AUTO;
290 static enum AVDiscard skip_frame       = AVDISCARD_DEFAULT;
291 static enum AVDiscard skip_idct        = AVDISCARD_DEFAULT;
292 static enum AVDiscard skip_loop_filter = AVDISCARD_DEFAULT;
293 static int error_concealment = 3;
294 static int decoder_reorder_pts = -1;
295 static int autoexit;
296 static int exit_on_keydown;
297 static int exit_on_mousedown;
298 static int loop = 1;
299 static int framedrop = -1;
300 static int infinite_buffer = -1;
301 static enum ShowMode show_mode = SHOW_MODE_NONE;
302 static const char *audio_codec_name;
303 static const char *subtitle_codec_name;
304 static const char *video_codec_name;
305 static int rdftspeed = 20;
306 #if CONFIG_AVFILTER
307 static char *vfilters = NULL;
308 #endif
309
310 /* current context */
311 static int is_full_screen;
312 static int64_t audio_callback_time;
313
314 static AVPacket flush_pkt;
315
316 #define FF_ALLOC_EVENT   (SDL_USEREVENT)
317 #define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
318 #define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
319
320 static SDL_Surface *screen;
321
322 static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
323
324 static int packet_queue_put_private(PacketQueue *q, AVPacket *pkt)
325 {
326     MyAVPacketList *pkt1;
327
328     if (q->abort_request)
329        return -1;
330
331     pkt1 = av_malloc(sizeof(MyAVPacketList));
332     if (!pkt1)
333         return -1;
334     pkt1->pkt = *pkt;
335     pkt1->next = NULL;
336     if (pkt == &flush_pkt)
337         q->serial++;
338     pkt1->serial = q->serial;
339
340     if (!q->last_pkt)
341         q->first_pkt = pkt1;
342     else
343         q->last_pkt->next = pkt1;
344     q->last_pkt = pkt1;
345     q->nb_packets++;
346     q->size += pkt1->pkt.size + sizeof(*pkt1);
347     /* XXX: should duplicate packet data in DV case */
348     SDL_CondSignal(q->cond);
349     return 0;
350 }
351
352 static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
353 {
354     int ret;
355
356     /* duplicate the packet */
357     if (pkt != &flush_pkt && av_dup_packet(pkt) < 0)
358         return -1;
359
360     SDL_LockMutex(q->mutex);
361     ret = packet_queue_put_private(q, pkt);
362     SDL_UnlockMutex(q->mutex);
363
364     if (pkt != &flush_pkt && ret < 0)
365         av_free_packet(pkt);
366
367     return ret;
368 }
369
370 /* packet queue handling */
371 static void packet_queue_init(PacketQueue *q)
372 {
373     memset(q, 0, sizeof(PacketQueue));
374     q->mutex = SDL_CreateMutex();
375     q->cond = SDL_CreateCond();
376     q->abort_request = 1;
377 }
378
379 static void packet_queue_flush(PacketQueue *q)
380 {
381     MyAVPacketList *pkt, *pkt1;
382
383     SDL_LockMutex(q->mutex);
384     for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
385         pkt1 = pkt->next;
386         av_free_packet(&pkt->pkt);
387         av_freep(&pkt);
388     }
389     q->last_pkt = NULL;
390     q->first_pkt = NULL;
391     q->nb_packets = 0;
392     q->size = 0;
393     SDL_UnlockMutex(q->mutex);
394 }
395
396 static void packet_queue_destroy(PacketQueue *q)
397 {
398     packet_queue_flush(q);
399     SDL_DestroyMutex(q->mutex);
400     SDL_DestroyCond(q->cond);
401 }
402
403 static void packet_queue_abort(PacketQueue *q)
404 {
405     SDL_LockMutex(q->mutex);
406
407     q->abort_request = 1;
408
409     SDL_CondSignal(q->cond);
410
411     SDL_UnlockMutex(q->mutex);
412 }
413
414 static void packet_queue_start(PacketQueue *q)
415 {
416     SDL_LockMutex(q->mutex);
417     q->abort_request = 0;
418     packet_queue_put_private(q, &flush_pkt);
419     SDL_UnlockMutex(q->mutex);
420 }
421
422 /* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
423 static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block, int *serial)
424 {
425     MyAVPacketList *pkt1;
426     int ret;
427
428     SDL_LockMutex(q->mutex);
429
430     for (;;) {
431         if (q->abort_request) {
432             ret = -1;
433             break;
434         }
435
436         pkt1 = q->first_pkt;
437         if (pkt1) {
438             q->first_pkt = pkt1->next;
439             if (!q->first_pkt)
440                 q->last_pkt = NULL;
441             q->nb_packets--;
442             q->size -= pkt1->pkt.size + sizeof(*pkt1);
443             *pkt = pkt1->pkt;
444             if (serial)
445                 *serial = pkt1->serial;
446             av_free(pkt1);
447             ret = 1;
448             break;
449         } else if (!block) {
450             ret = 0;
451             break;
452         } else {
453             SDL_CondWait(q->cond, q->mutex);
454         }
455     }
456     SDL_UnlockMutex(q->mutex);
457     return ret;
458 }
459
460 static inline void fill_rectangle(SDL_Surface *screen,
461                                   int x, int y, int w, int h, int color, int update)
462 {
463     SDL_Rect rect;
464     rect.x = x;
465     rect.y = y;
466     rect.w = w;
467     rect.h = h;
468     SDL_FillRect(screen, &rect, color);
469     if (update && w > 0 && h > 0)
470         SDL_UpdateRect(screen, x, y, w, h);
471 }
472
473 /* draw only the border of a rectangle */
474 static void fill_border(int xleft, int ytop, int width, int height, int x, int y, int w, int h, int color, int update)
475 {
476     int w1, w2, h1, h2;
477
478     /* fill the background */
479     w1 = x;
480     if (w1 < 0)
481         w1 = 0;
482     w2 = width - (x + w);
483     if (w2 < 0)
484         w2 = 0;
485     h1 = y;
486     if (h1 < 0)
487         h1 = 0;
488     h2 = height - (y + h);
489     if (h2 < 0)
490         h2 = 0;
491     fill_rectangle(screen,
492                    xleft, ytop,
493                    w1, height,
494                    color, update);
495     fill_rectangle(screen,
496                    xleft + width - w2, ytop,
497                    w2, height,
498                    color, update);
499     fill_rectangle(screen,
500                    xleft + w1, ytop,
501                    width - w1 - w2, h1,
502                    color, update);
503     fill_rectangle(screen,
504                    xleft + w1, ytop + height - h2,
505                    width - w1 - w2, h2,
506                    color, update);
507 }
508
509 #define ALPHA_BLEND(a, oldp, newp, s)\
510 ((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
511
512 #define RGBA_IN(r, g, b, a, s)\
513 {\
514     unsigned int v = ((const uint32_t *)(s))[0];\
515     a = (v >> 24) & 0xff;\
516     r = (v >> 16) & 0xff;\
517     g = (v >> 8) & 0xff;\
518     b = v & 0xff;\
519 }
520
521 #define YUVA_IN(y, u, v, a, s, pal)\
522 {\
523     unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
524     a = (val >> 24) & 0xff;\
525     y = (val >> 16) & 0xff;\
526     u = (val >> 8) & 0xff;\
527     v = val & 0xff;\
528 }
529
530 #define YUVA_OUT(d, y, u, v, a)\
531 {\
532     ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
533 }
534
535
536 #define BPP 1
537
538 static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
539 {
540     int wrap, wrap3, width2, skip2;
541     int y, u, v, a, u1, v1, a1, w, h;
542     uint8_t *lum, *cb, *cr;
543     const uint8_t *p;
544     const uint32_t *pal;
545     int dstx, dsty, dstw, dsth;
546
547     dstw = av_clip(rect->w, 0, imgw);
548     dsth = av_clip(rect->h, 0, imgh);
549     dstx = av_clip(rect->x, 0, imgw - dstw);
550     dsty = av_clip(rect->y, 0, imgh - dsth);
551     lum = dst->data[0] + dsty * dst->linesize[0];
552     cb  = dst->data[1] + (dsty >> 1) * dst->linesize[1];
553     cr  = dst->data[2] + (dsty >> 1) * dst->linesize[2];
554
555     width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
556     skip2 = dstx >> 1;
557     wrap = dst->linesize[0];
558     wrap3 = rect->pict.linesize[0];
559     p = rect->pict.data[0];
560     pal = (const uint32_t *)rect->pict.data[1];  /* Now in YCrCb! */
561
562     if (dsty & 1) {
563         lum += dstx;
564         cb += skip2;
565         cr += skip2;
566
567         if (dstx & 1) {
568             YUVA_IN(y, u, v, a, p, pal);
569             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
570             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
571             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
572             cb++;
573             cr++;
574             lum++;
575             p += BPP;
576         }
577         for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
578             YUVA_IN(y, u, v, a, p, pal);
579             u1 = u;
580             v1 = v;
581             a1 = a;
582             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
583
584             YUVA_IN(y, u, v, a, p + BPP, pal);
585             u1 += u;
586             v1 += v;
587             a1 += a;
588             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
589             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
590             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
591             cb++;
592             cr++;
593             p += 2 * BPP;
594             lum += 2;
595         }
596         if (w) {
597             YUVA_IN(y, u, v, a, p, pal);
598             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
599             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
600             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
601             p++;
602             lum++;
603         }
604         p += wrap3 - dstw * BPP;
605         lum += wrap - dstw - dstx;
606         cb += dst->linesize[1] - width2 - skip2;
607         cr += dst->linesize[2] - width2 - skip2;
608     }
609     for (h = dsth - (dsty & 1); h >= 2; h -= 2) {
610         lum += dstx;
611         cb += skip2;
612         cr += skip2;
613
614         if (dstx & 1) {
615             YUVA_IN(y, u, v, a, p, pal);
616             u1 = u;
617             v1 = v;
618             a1 = a;
619             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
620             p += wrap3;
621             lum += wrap;
622             YUVA_IN(y, u, v, a, p, pal);
623             u1 += u;
624             v1 += v;
625             a1 += a;
626             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
627             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
628             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
629             cb++;
630             cr++;
631             p += -wrap3 + BPP;
632             lum += -wrap + 1;
633         }
634         for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
635             YUVA_IN(y, u, v, a, p, pal);
636             u1 = u;
637             v1 = v;
638             a1 = a;
639             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
640
641             YUVA_IN(y, u, v, a, p + BPP, pal);
642             u1 += u;
643             v1 += v;
644             a1 += a;
645             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
646             p += wrap3;
647             lum += wrap;
648
649             YUVA_IN(y, u, v, a, p, pal);
650             u1 += u;
651             v1 += v;
652             a1 += a;
653             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
654
655             YUVA_IN(y, u, v, a, p + BPP, pal);
656             u1 += u;
657             v1 += v;
658             a1 += a;
659             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
660
661             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
662             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
663
664             cb++;
665             cr++;
666             p += -wrap3 + 2 * BPP;
667             lum += -wrap + 2;
668         }
669         if (w) {
670             YUVA_IN(y, u, v, a, p, pal);
671             u1 = u;
672             v1 = v;
673             a1 = a;
674             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
675             p += wrap3;
676             lum += wrap;
677             YUVA_IN(y, u, v, a, p, pal);
678             u1 += u;
679             v1 += v;
680             a1 += a;
681             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
682             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
683             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
684             cb++;
685             cr++;
686             p += -wrap3 + BPP;
687             lum += -wrap + 1;
688         }
689         p += wrap3 + (wrap3 - dstw * BPP);
690         lum += wrap + (wrap - dstw - dstx);
691         cb += dst->linesize[1] - width2 - skip2;
692         cr += dst->linesize[2] - width2 - skip2;
693     }
694     /* handle odd height */
695     if (h) {
696         lum += dstx;
697         cb += skip2;
698         cr += skip2;
699
700         if (dstx & 1) {
701             YUVA_IN(y, u, v, a, p, pal);
702             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
703             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
704             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
705             cb++;
706             cr++;
707             lum++;
708             p += BPP;
709         }
710         for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
711             YUVA_IN(y, u, v, a, p, pal);
712             u1 = u;
713             v1 = v;
714             a1 = a;
715             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
716
717             YUVA_IN(y, u, v, a, p + BPP, pal);
718             u1 += u;
719             v1 += v;
720             a1 += a;
721             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
722             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
723             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
724             cb++;
725             cr++;
726             p += 2 * BPP;
727             lum += 2;
728         }
729         if (w) {
730             YUVA_IN(y, u, v, a, p, pal);
731             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
732             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
733             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
734         }
735     }
736 }
737
738 static void free_subpicture(SubPicture *sp)
739 {
740     avsubtitle_free(&sp->sub);
741 }
742
743 static void calculate_display_rect(SDL_Rect *rect, int scr_xleft, int scr_ytop, int scr_width, int scr_height, VideoPicture *vp)
744 {
745     float aspect_ratio;
746     int width, height, x, y;
747
748     if (vp->sample_aspect_ratio.num == 0)
749         aspect_ratio = 0;
750     else
751         aspect_ratio = av_q2d(vp->sample_aspect_ratio);
752
753     if (aspect_ratio <= 0.0)
754         aspect_ratio = 1.0;
755     aspect_ratio *= (float)vp->width / (float)vp->height;
756
757     /* XXX: we suppose the screen has a 1.0 pixel ratio */
758     height = scr_height;
759     width = ((int)rint(height * aspect_ratio)) & ~1;
760     if (width > scr_width) {
761         width = scr_width;
762         height = ((int)rint(width / aspect_ratio)) & ~1;
763     }
764     x = (scr_width - width) / 2;
765     y = (scr_height - height) / 2;
766     rect->x = scr_xleft + x;
767     rect->y = scr_ytop  + y;
768     rect->w = FFMAX(width,  1);
769     rect->h = FFMAX(height, 1);
770 }
771
772 static void video_image_display(VideoState *is)
773 {
774     VideoPicture *vp;
775     SubPicture *sp;
776     AVPicture pict;
777     SDL_Rect rect;
778     int i;
779
780     vp = &is->pictq[is->pictq_rindex];
781     if (vp->bmp) {
782         if (is->subtitle_st) {
783             if (is->subpq_size > 0) {
784                 sp = &is->subpq[is->subpq_rindex];
785
786                 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
787                     SDL_LockYUVOverlay (vp->bmp);
788
789                     pict.data[0] = vp->bmp->pixels[0];
790                     pict.data[1] = vp->bmp->pixels[2];
791                     pict.data[2] = vp->bmp->pixels[1];
792
793                     pict.linesize[0] = vp->bmp->pitches[0];
794                     pict.linesize[1] = vp->bmp->pitches[2];
795                     pict.linesize[2] = vp->bmp->pitches[1];
796
797                     for (i = 0; i < sp->sub.num_rects; i++)
798                         blend_subrect(&pict, sp->sub.rects[i],
799                                       vp->bmp->w, vp->bmp->h);
800
801                     SDL_UnlockYUVOverlay (vp->bmp);
802                 }
803             }
804         }
805
806         calculate_display_rect(&rect, is->xleft, is->ytop, is->width, is->height, vp);
807
808         SDL_DisplayYUVOverlay(vp->bmp, &rect);
809
810         if (rect.x != is->last_display_rect.x || rect.y != is->last_display_rect.y || rect.w != is->last_display_rect.w || rect.h != is->last_display_rect.h || is->force_refresh) {
811             int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
812             fill_border(is->xleft, is->ytop, is->width, is->height, rect.x, rect.y, rect.w, rect.h, bgcolor, 1);
813             is->last_display_rect = rect;
814         }
815     }
816 }
817
818 static inline int compute_mod(int a, int b)
819 {
820     return a < 0 ? a%b + b : a%b;
821 }
822
823 static void video_audio_display(VideoState *s)
824 {
825     int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
826     int ch, channels, h, h2, bgcolor, fgcolor;
827     int16_t time_diff;
828     int rdft_bits, nb_freq;
829
830     for (rdft_bits = 1; (1 << rdft_bits) < 2 * s->height; rdft_bits++)
831         ;
832     nb_freq = 1 << (rdft_bits - 1);
833
834     /* compute display index : center on currently output samples */
835     channels = s->audio_tgt.channels;
836     nb_display_channels = channels;
837     if (!s->paused) {
838         int data_used= s->show_mode == SHOW_MODE_WAVES ? s->width : (2*nb_freq);
839         n = 2 * channels;
840         delay = s->audio_write_buf_size;
841         delay /= n;
842
843         /* to be more precise, we take into account the time spent since
844            the last buffer computation */
845         if (audio_callback_time) {
846             time_diff = av_gettime() - audio_callback_time;
847             delay -= (time_diff * s->audio_tgt.freq) / 1000000;
848         }
849
850         delay += 2 * data_used;
851         if (delay < data_used)
852             delay = data_used;
853
854         i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
855         if (s->show_mode == SHOW_MODE_WAVES) {
856             h = INT_MIN;
857             for (i = 0; i < 1000; i += channels) {
858                 int idx = (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
859                 int a = s->sample_array[idx];
860                 int b = s->sample_array[(idx + 4 * channels) % SAMPLE_ARRAY_SIZE];
861                 int c = s->sample_array[(idx + 5 * channels) % SAMPLE_ARRAY_SIZE];
862                 int d = s->sample_array[(idx + 9 * channels) % SAMPLE_ARRAY_SIZE];
863                 int score = a - d;
864                 if (h < score && (b ^ c) < 0) {
865                     h = score;
866                     i_start = idx;
867                 }
868             }
869         }
870
871         s->last_i_start = i_start;
872     } else {
873         i_start = s->last_i_start;
874     }
875
876     bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
877     if (s->show_mode == SHOW_MODE_WAVES) {
878         fill_rectangle(screen,
879                        s->xleft, s->ytop, s->width, s->height,
880                        bgcolor, 0);
881
882         fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
883
884         /* total height for one channel */
885         h = s->height / nb_display_channels;
886         /* graph height / 2 */
887         h2 = (h * 9) / 20;
888         for (ch = 0; ch < nb_display_channels; ch++) {
889             i = i_start + ch;
890             y1 = s->ytop + ch * h + (h / 2); /* position of center line */
891             for (x = 0; x < s->width; x++) {
892                 y = (s->sample_array[i] * h2) >> 15;
893                 if (y < 0) {
894                     y = -y;
895                     ys = y1 - y;
896                 } else {
897                     ys = y1;
898                 }
899                 fill_rectangle(screen,
900                                s->xleft + x, ys, 1, y,
901                                fgcolor, 0);
902                 i += channels;
903                 if (i >= SAMPLE_ARRAY_SIZE)
904                     i -= SAMPLE_ARRAY_SIZE;
905             }
906         }
907
908         fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
909
910         for (ch = 1; ch < nb_display_channels; ch++) {
911             y = s->ytop + ch * h;
912             fill_rectangle(screen,
913                            s->xleft, y, s->width, 1,
914                            fgcolor, 0);
915         }
916         SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
917     } else {
918         nb_display_channels= FFMIN(nb_display_channels, 2);
919         if (rdft_bits != s->rdft_bits) {
920             av_rdft_end(s->rdft);
921             av_free(s->rdft_data);
922             s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
923             s->rdft_bits = rdft_bits;
924             s->rdft_data = av_malloc(4 * nb_freq * sizeof(*s->rdft_data));
925         }
926         {
927             FFTSample *data[2];
928             for (ch = 0; ch < nb_display_channels; ch++) {
929                 data[ch] = s->rdft_data + 2 * nb_freq * ch;
930                 i = i_start + ch;
931                 for (x = 0; x < 2 * nb_freq; x++) {
932                     double w = (x-nb_freq) * (1.0 / nb_freq);
933                     data[ch][x] = s->sample_array[i] * (1.0 - w * w);
934                     i += channels;
935                     if (i >= SAMPLE_ARRAY_SIZE)
936                         i -= SAMPLE_ARRAY_SIZE;
937                 }
938                 av_rdft_calc(s->rdft, data[ch]);
939             }
940             // least efficient way to do this, we should of course directly access it but its more than fast enough
941             for (y = 0; y < s->height; y++) {
942                 double w = 1 / sqrt(nb_freq);
943                 int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1]));
944                 int b = (nb_display_channels == 2 ) ? sqrt(w * sqrt(data[1][2 * y + 0] * data[1][2 * y + 0]
945                        + data[1][2 * y + 1] * data[1][2 * y + 1])) : a;
946                 a = FFMIN(a, 255);
947                 b = FFMIN(b, 255);
948                 fgcolor = SDL_MapRGB(screen->format, a, b, (a + b) / 2);
949
950                 fill_rectangle(screen,
951                             s->xpos, s->height-y, 1, 1,
952                             fgcolor, 0);
953             }
954         }
955         SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
956         if (!s->paused)
957             s->xpos++;
958         if (s->xpos >= s->width)
959             s->xpos= s->xleft;
960     }
961 }
962
963 static void stream_close(VideoState *is)
964 {
965     VideoPicture *vp;
966     int i;
967     /* XXX: use a special url_shutdown call to abort parse cleanly */
968     is->abort_request = 1;
969     SDL_WaitThread(is->read_tid, NULL);
970     SDL_WaitThread(is->refresh_tid, NULL);
971     packet_queue_destroy(&is->videoq);
972     packet_queue_destroy(&is->audioq);
973     packet_queue_destroy(&is->subtitleq);
974
975     /* free all pictures */
976     for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
977         vp = &is->pictq[i];
978 #if CONFIG_AVFILTER
979         avfilter_unref_bufferp(&vp->picref);
980 #endif
981         if (vp->bmp) {
982             SDL_FreeYUVOverlay(vp->bmp);
983             vp->bmp = NULL;
984         }
985     }
986     SDL_DestroyMutex(is->pictq_mutex);
987     SDL_DestroyCond(is->pictq_cond);
988     SDL_DestroyMutex(is->subpq_mutex);
989     SDL_DestroyCond(is->subpq_cond);
990     SDL_DestroyCond(is->continue_read_thread);
991 #if !CONFIG_AVFILTER
992     if (is->img_convert_ctx)
993         sws_freeContext(is->img_convert_ctx);
994 #endif
995     av_free(is);
996 }
997
998 static void do_exit(VideoState *is)
999 {
1000     if (is) {
1001         stream_close(is);
1002     }
1003     av_lockmgr_register(NULL);
1004     uninit_opts();
1005 #if CONFIG_AVFILTER
1006     avfilter_uninit();
1007     av_freep(&vfilters);
1008 #endif
1009     avformat_network_deinit();
1010     if (show_status)
1011         printf("\n");
1012     SDL_Quit();
1013     av_log(NULL, AV_LOG_QUIET, "%s", "");
1014     exit(0);
1015 }
1016
1017 static void sigterm_handler(int sig)
1018 {
1019     exit(123);
1020 }
1021
1022 static int video_open(VideoState *is, int force_set_video_mode)
1023 {
1024     int flags = SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL;
1025     int w,h;
1026     VideoPicture *vp = &is->pictq[is->pictq_rindex];
1027     SDL_Rect rect;
1028
1029     if (is_full_screen) flags |= SDL_FULLSCREEN;
1030     else                flags |= SDL_RESIZABLE;
1031
1032     if (is_full_screen && fs_screen_width) {
1033         w = fs_screen_width;
1034         h = fs_screen_height;
1035     } else if (!is_full_screen && screen_width) {
1036         w = screen_width;
1037         h = screen_height;
1038     } else if (vp->width) {
1039         calculate_display_rect(&rect, 0, 0, INT_MAX, vp->height, vp);
1040         w = rect.w;
1041         h = rect.h;
1042     } else {
1043         w = 640;
1044         h = 480;
1045     }
1046     if (screen && is->width == screen->w && screen->w == w
1047        && is->height== screen->h && screen->h == h && !force_set_video_mode)
1048         return 0;
1049     screen = SDL_SetVideoMode(w, h, 0, flags);
1050     if (!screen) {
1051         fprintf(stderr, "SDL: could not set video mode - exiting\n");
1052         do_exit(is);
1053     }
1054     if (!window_title)
1055         window_title = input_filename;
1056     SDL_WM_SetCaption(window_title, window_title);
1057
1058     is->width  = screen->w;
1059     is->height = screen->h;
1060
1061     return 0;
1062 }
1063
1064 /* display the current picture, if any */
1065 static void video_display(VideoState *is)
1066 {
1067     if (!screen)
1068         video_open(is, 0);
1069     if (is->audio_st && is->show_mode != SHOW_MODE_VIDEO)
1070         video_audio_display(is);
1071     else if (is->video_st)
1072         video_image_display(is);
1073 }
1074
1075 static int refresh_thread(void *opaque)
1076 {
1077     VideoState *is= opaque;
1078     while (!is->abort_request) {
1079         SDL_Event event;
1080         event.type = FF_REFRESH_EVENT;
1081         event.user.data1 = opaque;
1082         if (!is->refresh && (!is->paused || is->force_refresh)) {
1083             is->refresh = 1;
1084             SDL_PushEvent(&event);
1085         }
1086         //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
1087         av_usleep(is->audio_st && is->show_mode != SHOW_MODE_VIDEO ? rdftspeed*1000 : 5000);
1088     }
1089     return 0;
1090 }
1091
1092 /* get the current audio clock value */
1093 static double get_audio_clock(VideoState *is)
1094 {
1095     if (is->paused) {
1096         return is->audio_current_pts;
1097     } else {
1098         return is->audio_current_pts_drift + av_gettime() / 1000000.0;
1099     }
1100 }
1101
1102 /* get the current video clock value */
1103 static double get_video_clock(VideoState *is)
1104 {
1105     if (is->paused) {
1106         return is->video_current_pts;
1107     } else {
1108         return is->video_current_pts_drift + av_gettime() / 1000000.0;
1109     }
1110 }
1111
1112 /* get the current external clock value */
1113 static double get_external_clock(VideoState *is)
1114 {
1115     if (is->paused) {
1116         return is->external_clock;
1117     } else {
1118         double time = av_gettime() / 1000000.0;
1119         return is->external_clock_drift + time - (time - is->external_clock_time / 1000000.0) * (1.0 - is->external_clock_speed);
1120     }
1121 }
1122
1123 static int get_master_sync_type(VideoState *is) {
1124     if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1125         if (is->video_st)
1126             return AV_SYNC_VIDEO_MASTER;
1127         else
1128             return AV_SYNC_AUDIO_MASTER;
1129     } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1130         if (is->audio_st)
1131             return AV_SYNC_AUDIO_MASTER;
1132         else
1133             return AV_SYNC_EXTERNAL_CLOCK;
1134     } else {
1135         return AV_SYNC_EXTERNAL_CLOCK;
1136     }
1137 }
1138
1139 /* get the current master clock value */
1140 static double get_master_clock(VideoState *is)
1141 {
1142     double val;
1143
1144     switch (get_master_sync_type(is)) {
1145         case AV_SYNC_VIDEO_MASTER:
1146             val = get_video_clock(is);
1147             break;
1148         case AV_SYNC_AUDIO_MASTER:
1149             val = get_audio_clock(is);
1150             break;
1151         default:
1152             val = get_external_clock(is);
1153             break;
1154     }
1155     return val;
1156 }
1157
1158 static void update_external_clock_pts(VideoState *is, double pts)
1159 {
1160    is->external_clock_time = av_gettime();
1161    is->external_clock = pts;
1162    is->external_clock_drift = pts - is->external_clock_time / 1000000.0;
1163 }
1164
1165 static void check_external_clock_sync(VideoState *is, double pts) {
1166     if (fabs(get_external_clock(is) - pts) > AV_NOSYNC_THRESHOLD) {
1167         update_external_clock_pts(is, pts);
1168     }
1169 }
1170
1171 static void update_external_clock_speed(VideoState *is, double speed) {
1172     update_external_clock_pts(is, get_external_clock(is));
1173     is->external_clock_speed = speed;
1174 }
1175
1176 static void check_external_clock_speed(VideoState *is) {
1177    if (is->video_stream >= 0 && is->videoq.nb_packets <= MIN_FRAMES / 2 ||
1178        is->audio_stream >= 0 && is->audioq.nb_packets <= MIN_FRAMES / 2) {
1179        update_external_clock_speed(is, FFMAX(EXTERNAL_CLOCK_SPEED_MIN, is->external_clock_speed - EXTERNAL_CLOCK_SPEED_STEP));
1180    } else if ((is->video_stream < 0 || is->videoq.nb_packets > MIN_FRAMES * 2) &&
1181               (is->audio_stream < 0 || is->audioq.nb_packets > MIN_FRAMES * 2)) {
1182        update_external_clock_speed(is, FFMIN(EXTERNAL_CLOCK_SPEED_MAX, is->external_clock_speed + EXTERNAL_CLOCK_SPEED_STEP));
1183    } else {
1184        double speed = is->external_clock_speed;
1185        if (speed != 1.0)
1186            update_external_clock_speed(is, speed + EXTERNAL_CLOCK_SPEED_STEP * (1.0 - speed) / fabs(1.0 - speed));
1187    }
1188 }
1189
1190 /* seek in the stream */
1191 static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
1192 {
1193     if (!is->seek_req) {
1194         is->seek_pos = pos;
1195         is->seek_rel = rel;
1196         is->seek_flags &= ~AVSEEK_FLAG_BYTE;
1197         if (seek_by_bytes)
1198             is->seek_flags |= AVSEEK_FLAG_BYTE;
1199         is->seek_req = 1;
1200     }
1201 }
1202
1203 /* pause or resume the video */
1204 static void stream_toggle_pause(VideoState *is)
1205 {
1206     if (is->paused) {
1207         is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
1208         if (is->read_pause_return != AVERROR(ENOSYS)) {
1209             is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
1210         }
1211         is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
1212     }
1213     update_external_clock_pts(is, get_external_clock(is));
1214     is->paused = !is->paused;
1215 }
1216
1217 static double compute_target_delay(double delay, VideoState *is)
1218 {
1219     double sync_threshold, diff;
1220
1221     /* update delay to follow master synchronisation source */
1222     if (get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER) {
1223         /* if video is slave, we try to correct big delays by
1224            duplicating or deleting a frame */
1225         diff = get_video_clock(is) - get_master_clock(is);
1226
1227         /* skip or repeat frame. We take into account the
1228            delay to compute the threshold. I still don't know
1229            if it is the best guess */
1230         sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1231         if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1232             if (diff <= -sync_threshold)
1233                 delay = 0;
1234             else if (diff >= sync_threshold)
1235                 delay = 2 * delay;
1236         }
1237     }
1238
1239     av_dlog(NULL, "video: delay=%0.3f A-V=%f\n",
1240             delay, -diff);
1241
1242     return delay;
1243 }
1244
1245 static void pictq_next_picture(VideoState *is) {
1246     /* update queue size and signal for next picture */
1247     if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1248         is->pictq_rindex = 0;
1249
1250     SDL_LockMutex(is->pictq_mutex);
1251     is->pictq_size--;
1252     SDL_CondSignal(is->pictq_cond);
1253     SDL_UnlockMutex(is->pictq_mutex);
1254 }
1255
1256 static void pictq_prev_picture(VideoState *is) {
1257     VideoPicture *prevvp;
1258     /* update queue size and signal for the previous picture */
1259     prevvp = &is->pictq[(is->pictq_rindex + VIDEO_PICTURE_QUEUE_SIZE - 1) % VIDEO_PICTURE_QUEUE_SIZE];
1260     if (prevvp->allocated && !prevvp->skip) {
1261         SDL_LockMutex(is->pictq_mutex);
1262         if (is->pictq_size < VIDEO_PICTURE_QUEUE_SIZE - 1) {
1263             if (--is->pictq_rindex == -1)
1264                 is->pictq_rindex = VIDEO_PICTURE_QUEUE_SIZE - 1;
1265             is->pictq_size++;
1266         }
1267         SDL_CondSignal(is->pictq_cond);
1268         SDL_UnlockMutex(is->pictq_mutex);
1269     }
1270 }
1271
1272 static void update_video_pts(VideoState *is, double pts, int64_t pos, int serial) {
1273     double time = av_gettime() / 1000000.0;
1274     /* update current video pts */
1275     is->video_current_pts = pts;
1276     is->video_current_pts_drift = is->video_current_pts - time;
1277     is->video_current_pos = pos;
1278     is->frame_last_pts = pts;
1279     if (is->videoq.serial == serial)
1280         check_external_clock_sync(is, is->video_current_pts);
1281 }
1282
1283 /* called to display each frame */
1284 static void video_refresh(void *opaque)
1285 {
1286     VideoState *is = opaque;
1287     VideoPicture *vp;
1288     double time;
1289
1290     SubPicture *sp, *sp2;
1291
1292     if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime)
1293         check_external_clock_speed(is);
1294
1295     if (is->video_st) {
1296         if (is->force_refresh)
1297             pictq_prev_picture(is);
1298 retry:
1299         if (is->pictq_size == 0) {
1300             SDL_LockMutex(is->pictq_mutex);
1301             if (is->frame_last_dropped_pts != AV_NOPTS_VALUE && is->frame_last_dropped_pts > is->frame_last_pts) {
1302                 update_video_pts(is, is->frame_last_dropped_pts, is->frame_last_dropped_pos, 0);
1303                 is->frame_last_dropped_pts = AV_NOPTS_VALUE;
1304             }
1305             SDL_UnlockMutex(is->pictq_mutex);
1306             // nothing to do, no picture to display in the que
1307         } else {
1308             double last_duration, duration, delay;
1309             /* dequeue the picture */
1310             vp = &is->pictq[is->pictq_rindex];
1311
1312             if (vp->skip) {
1313                 pictq_next_picture(is);
1314                 goto retry;
1315             }
1316
1317             if (is->paused)
1318                 goto display;
1319
1320             /* compute nominal last_duration */
1321             last_duration = vp->pts - is->frame_last_pts;
1322             if (last_duration > 0 && last_duration < is->max_frame_duration) {
1323                 /* if duration of the last frame was sane, update last_duration in video state */
1324                 is->frame_last_duration = last_duration;
1325             }
1326             delay = compute_target_delay(is->frame_last_duration, is);
1327
1328             time= av_gettime()/1000000.0;
1329             if (time < is->frame_timer + delay)
1330                 return;
1331
1332             if (delay > 0)
1333                 is->frame_timer += delay * FFMAX(1, floor((time-is->frame_timer) / delay));
1334
1335             SDL_LockMutex(is->pictq_mutex);
1336             update_video_pts(is, vp->pts, vp->pos, vp->serial);
1337             SDL_UnlockMutex(is->pictq_mutex);
1338
1339             if (is->pictq_size > 1) {
1340                 VideoPicture *nextvp = &is->pictq[(is->pictq_rindex + 1) % VIDEO_PICTURE_QUEUE_SIZE];
1341                 duration = nextvp->pts - vp->pts;
1342                 if((framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) && time > is->frame_timer + duration){
1343                     is->frame_drops_late++;
1344                     pictq_next_picture(is);
1345                     goto retry;
1346                 }
1347             }
1348
1349             if (is->subtitle_st) {
1350                 if (is->subtitle_stream_changed) {
1351                     SDL_LockMutex(is->subpq_mutex);
1352
1353                     while (is->subpq_size) {
1354                         free_subpicture(&is->subpq[is->subpq_rindex]);
1355
1356                         /* update queue size and signal for next picture */
1357                         if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1358                             is->subpq_rindex = 0;
1359
1360                         is->subpq_size--;
1361                     }
1362                     is->subtitle_stream_changed = 0;
1363
1364                     SDL_CondSignal(is->subpq_cond);
1365                     SDL_UnlockMutex(is->subpq_mutex);
1366                 } else {
1367                     if (is->subpq_size > 0) {
1368                         sp = &is->subpq[is->subpq_rindex];
1369
1370                         if (is->subpq_size > 1)
1371                             sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1372                         else
1373                             sp2 = NULL;
1374
1375                         if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1376                                 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1377                         {
1378                             free_subpicture(sp);
1379
1380                             /* update queue size and signal for next picture */
1381                             if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1382                                 is->subpq_rindex = 0;
1383
1384                             SDL_LockMutex(is->subpq_mutex);
1385                             is->subpq_size--;
1386                             SDL_CondSignal(is->subpq_cond);
1387                             SDL_UnlockMutex(is->subpq_mutex);
1388                         }
1389                     }
1390                 }
1391             }
1392
1393 display:
1394             /* display picture */
1395             if (!display_disable)
1396                 video_display(is);
1397
1398             pictq_next_picture(is);
1399         }
1400     } else if (is->audio_st) {
1401         /* draw the next audio frame */
1402
1403         /* if only audio stream, then display the audio bars (better
1404            than nothing, just to test the implementation */
1405
1406         /* display picture */
1407         if (!display_disable)
1408             video_display(is);
1409     }
1410     is->force_refresh = 0;
1411     if (show_status) {
1412         static int64_t last_time;
1413         int64_t cur_time;
1414         int aqsize, vqsize, sqsize;
1415         double av_diff;
1416
1417         cur_time = av_gettime();
1418         if (!last_time || (cur_time - last_time) >= 30000) {
1419             aqsize = 0;
1420             vqsize = 0;
1421             sqsize = 0;
1422             if (is->audio_st)
1423                 aqsize = is->audioq.size;
1424             if (is->video_st)
1425                 vqsize = is->videoq.size;
1426             if (is->subtitle_st)
1427                 sqsize = is->subtitleq.size;
1428             av_diff = 0;
1429             if (is->audio_st && is->video_st)
1430                 av_diff = get_audio_clock(is) - get_video_clock(is);
1431             printf("%7.2f A-V:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64"   \r",
1432                    get_master_clock(is),
1433                    av_diff,
1434                    is->frame_drops_early + is->frame_drops_late,
1435                    aqsize / 1024,
1436                    vqsize / 1024,
1437                    sqsize,
1438                    is->video_st ? is->video_st->codec->pts_correction_num_faulty_dts : 0,
1439                    is->video_st ? is->video_st->codec->pts_correction_num_faulty_pts : 0);
1440             fflush(stdout);
1441             last_time = cur_time;
1442         }
1443     }
1444 }
1445
1446 /* allocate a picture (needs to do that in main thread to avoid
1447    potential locking problems */
1448 static void alloc_picture(VideoState *is)
1449 {
1450     VideoPicture *vp;
1451
1452     vp = &is->pictq[is->pictq_windex];
1453
1454     if (vp->bmp)
1455         SDL_FreeYUVOverlay(vp->bmp);
1456
1457 #if CONFIG_AVFILTER
1458     avfilter_unref_bufferp(&vp->picref);
1459 #endif
1460
1461     video_open(is, 0);
1462
1463     vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
1464                                    SDL_YV12_OVERLAY,
1465                                    screen);
1466     if (!vp->bmp || vp->bmp->pitches[0] < vp->width) {
1467         /* SDL allocates a buffer smaller than requested if the video
1468          * overlay hardware is unable to support the requested size. */
1469         fprintf(stderr, "Error: the video system does not support an image\n"
1470                         "size of %dx%d pixels. Try using -lowres or -vf \"scale=w:h\"\n"
1471                         "to reduce the image size.\n", vp->width, vp->height );
1472         do_exit(is);
1473     }
1474
1475     SDL_LockMutex(is->pictq_mutex);
1476     vp->allocated = 1;
1477     SDL_CondSignal(is->pictq_cond);
1478     SDL_UnlockMutex(is->pictq_mutex);
1479 }
1480
1481 static void duplicate_right_border_pixels(SDL_Overlay *bmp) {
1482     int i, width, height;
1483     Uint8 *p, *maxp;
1484     for (i = 0; i < 3; i++) {
1485         width  = bmp->w;
1486         height = bmp->h;
1487         if (i > 0) {
1488             width  >>= 1;
1489             height >>= 1;
1490         }
1491         if (bmp->pitches[i] > width) {
1492             maxp = bmp->pixels[i] + bmp->pitches[i] * height - 1;
1493             for (p = bmp->pixels[i] + width - 1; p < maxp; p += bmp->pitches[i])
1494                 *(p+1) = *p;
1495         }
1496     }
1497 }
1498
1499 static int queue_picture(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos, int serial)
1500 {
1501     VideoPicture *vp;
1502     double frame_delay, pts = pts1;
1503
1504     /* compute the exact PTS for the picture if it is omitted in the stream
1505      * pts1 is the dts of the pkt / pts of the frame */
1506     if (pts != 0) {
1507         /* update video clock with pts, if present */
1508         is->video_clock = pts;
1509     } else {
1510         pts = is->video_clock;
1511     }
1512     /* update video clock for next frame */
1513     frame_delay = av_q2d(is->video_st->codec->time_base);
1514     /* for MPEG2, the frame can be repeated, so we update the
1515        clock accordingly */
1516     frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
1517     is->video_clock += frame_delay;
1518
1519 #if defined(DEBUG_SYNC) && 0
1520     printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1521            av_get_picture_type_char(src_frame->pict_type), pts, pts1);
1522 #endif
1523
1524     /* wait until we have space to put a new picture */
1525     SDL_LockMutex(is->pictq_mutex);
1526
1527     /* keep the last already displayed picture in the queue */
1528     while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE - 2 &&
1529            !is->videoq.abort_request) {
1530         SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1531     }
1532     SDL_UnlockMutex(is->pictq_mutex);
1533
1534     if (is->videoq.abort_request)
1535         return -1;
1536
1537     vp = &is->pictq[is->pictq_windex];
1538
1539 #if CONFIG_AVFILTER
1540     vp->sample_aspect_ratio = ((AVFilterBufferRef *)src_frame->opaque)->video->sample_aspect_ratio;
1541 #else
1542     vp->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, src_frame);
1543 #endif
1544
1545     /* alloc or resize hardware picture buffer */
1546     if (!vp->bmp || vp->reallocate || !vp->allocated ||
1547         vp->width  != src_frame->width ||
1548         vp->height != src_frame->height) {
1549         SDL_Event event;
1550
1551         vp->allocated  = 0;
1552         vp->reallocate = 0;
1553         vp->width = src_frame->width;
1554         vp->height = src_frame->height;
1555
1556         /* the allocation must be done in the main thread to avoid
1557            locking problems. */
1558         event.type = FF_ALLOC_EVENT;
1559         event.user.data1 = is;
1560         SDL_PushEvent(&event);
1561
1562         /* wait until the picture is allocated */
1563         SDL_LockMutex(is->pictq_mutex);
1564         while (!vp->allocated && !is->videoq.abort_request) {
1565             SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1566         }
1567         /* if the queue is aborted, we have to pop the pending ALLOC event or wait for the allocation to complete */
1568         if (is->videoq.abort_request && SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_EVENTMASK(FF_ALLOC_EVENT)) != 1) {
1569             while (!vp->allocated) {
1570                 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1571             }
1572         }
1573         SDL_UnlockMutex(is->pictq_mutex);
1574
1575         if (is->videoq.abort_request)
1576             return -1;
1577     }
1578
1579     /* if the frame is not skipped, then display it */
1580     if (vp->bmp) {
1581         AVPicture pict = { { 0 } };
1582 #if CONFIG_AVFILTER
1583         avfilter_unref_bufferp(&vp->picref);
1584         vp->picref = src_frame->opaque;
1585 #endif
1586
1587         /* get a pointer on the bitmap */
1588         SDL_LockYUVOverlay (vp->bmp);
1589
1590         pict.data[0] = vp->bmp->pixels[0];
1591         pict.data[1] = vp->bmp->pixels[2];
1592         pict.data[2] = vp->bmp->pixels[1];
1593
1594         pict.linesize[0] = vp->bmp->pitches[0];
1595         pict.linesize[1] = vp->bmp->pitches[2];
1596         pict.linesize[2] = vp->bmp->pitches[1];
1597
1598 #if CONFIG_AVFILTER
1599         // FIXME use direct rendering
1600         av_picture_copy(&pict, (AVPicture *)src_frame,
1601                         src_frame->format, vp->width, vp->height);
1602 #else
1603         av_opt_get_int(sws_opts, "sws_flags", 0, &sws_flags);
1604         is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
1605             vp->width, vp->height, src_frame->format, vp->width, vp->height,
1606             AV_PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
1607         if (is->img_convert_ctx == NULL) {
1608             fprintf(stderr, "Cannot initialize the conversion context\n");
1609             exit(1);
1610         }
1611         sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
1612                   0, vp->height, pict.data, pict.linesize);
1613 #endif
1614         /* workaround SDL PITCH_WORKAROUND */
1615         duplicate_right_border_pixels(vp->bmp);
1616         /* update the bitmap content */
1617         SDL_UnlockYUVOverlay(vp->bmp);
1618
1619         vp->pts = pts;
1620         vp->pos = pos;
1621         vp->skip = 0;
1622         vp->serial = serial;
1623
1624         /* now we can update the picture count */
1625         if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1626             is->pictq_windex = 0;
1627         SDL_LockMutex(is->pictq_mutex);
1628         is->pictq_size++;
1629         SDL_UnlockMutex(is->pictq_mutex);
1630     }
1631     return 0;
1632 }
1633
1634 static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt, int *serial)
1635 {
1636     int got_picture, i;
1637
1638     if (packet_queue_get(&is->videoq, pkt, 1, serial) < 0)
1639         return -1;
1640
1641     if (pkt->data == flush_pkt.data) {
1642         avcodec_flush_buffers(is->video_st->codec);
1643
1644         SDL_LockMutex(is->pictq_mutex);
1645         // Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1646         for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
1647             is->pictq[i].skip = 1;
1648         }
1649         while (is->pictq_size && !is->videoq.abort_request) {
1650             SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1651         }
1652         is->video_current_pos = -1;
1653         is->frame_last_pts = AV_NOPTS_VALUE;
1654         is->frame_last_duration = 0;
1655         is->frame_timer = (double)av_gettime() / 1000000.0;
1656         is->frame_last_dropped_pts = AV_NOPTS_VALUE;
1657         SDL_UnlockMutex(is->pictq_mutex);
1658
1659         return 0;
1660     }
1661
1662     if(avcodec_decode_video2(is->video_st->codec, frame, &got_picture, pkt) < 0)
1663         return 0;
1664
1665     if (got_picture) {
1666         int ret = 1;
1667
1668         if (decoder_reorder_pts == -1) {
1669             *pts = av_frame_get_best_effort_timestamp(frame);
1670         } else if (decoder_reorder_pts) {
1671             *pts = frame->pkt_pts;
1672         } else {
1673             *pts = frame->pkt_dts;
1674         }
1675
1676         if (*pts == AV_NOPTS_VALUE) {
1677             *pts = 0;
1678         }
1679
1680         if (framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) {
1681             SDL_LockMutex(is->pictq_mutex);
1682             if (is->frame_last_pts != AV_NOPTS_VALUE && *pts) {
1683                 double clockdiff = get_video_clock(is) - get_master_clock(is);
1684                 double dpts = av_q2d(is->video_st->time_base) * *pts;
1685                 double ptsdiff = dpts - is->frame_last_pts;
1686                 if (fabs(clockdiff) < AV_NOSYNC_THRESHOLD &&
1687                      ptsdiff > 0 && ptsdiff < AV_NOSYNC_THRESHOLD &&
1688                      clockdiff + ptsdiff - is->frame_last_filter_delay < 0) {
1689                     is->frame_last_dropped_pos = pkt->pos;
1690                     is->frame_last_dropped_pts = dpts;
1691                     is->frame_drops_early++;
1692                     ret = 0;
1693                 }
1694             }
1695             SDL_UnlockMutex(is->pictq_mutex);
1696         }
1697
1698         return ret;
1699     }
1700     return 0;
1701 }
1702
1703 #if CONFIG_AVFILTER
1704 static int configure_filtergraph(AVFilterGraph *graph, const char *filtergraph,
1705                                  AVFilterContext *source_ctx, AVFilterContext *sink_ctx)
1706 {
1707     int ret;
1708     AVFilterInOut *outputs = NULL, *inputs = NULL;
1709
1710     if (filtergraph) {
1711         outputs = avfilter_inout_alloc();
1712         inputs  = avfilter_inout_alloc();
1713         if (!outputs || !inputs) {
1714             ret = AVERROR(ENOMEM);
1715             goto fail;
1716         }
1717
1718         outputs->name       = av_strdup("in");
1719         outputs->filter_ctx = source_ctx;
1720         outputs->pad_idx    = 0;
1721         outputs->next       = NULL;
1722
1723         inputs->name        = av_strdup("out");
1724         inputs->filter_ctx  = sink_ctx;
1725         inputs->pad_idx     = 0;
1726         inputs->next        = NULL;
1727
1728         if ((ret = avfilter_graph_parse(graph, filtergraph, &inputs, &outputs, NULL)) < 0)
1729             goto fail;
1730     } else {
1731         if ((ret = avfilter_link(source_ctx, 0, sink_ctx, 0)) < 0)
1732             goto fail;
1733     }
1734
1735     ret = avfilter_graph_config(graph, NULL);
1736 fail:
1737     avfilter_inout_free(&outputs);
1738     avfilter_inout_free(&inputs);
1739     return ret;
1740 }
1741
1742 static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters, AVFrame *frame)
1743 {
1744     static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE };
1745     char sws_flags_str[128];
1746     char buffersrc_args[256];
1747     int ret;
1748     AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();
1749     AVFilterContext *filt_src = NULL, *filt_out = NULL, *filt_crop;
1750     AVCodecContext *codec = is->video_st->codec;
1751
1752     if (!buffersink_params)
1753         return AVERROR(ENOMEM);
1754
1755     snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%"PRId64, sws_flags);
1756     graph->scale_sws_opts = av_strdup(sws_flags_str);
1757
1758     snprintf(buffersrc_args, sizeof(buffersrc_args),
1759              "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
1760              frame->width, frame->height, frame->format,
1761              is->video_st->time_base.num, is->video_st->time_base.den,
1762              codec->sample_aspect_ratio.num, FFMAX(codec->sample_aspect_ratio.den, 1));
1763
1764     if ((ret = avfilter_graph_create_filter(&filt_src,
1765                                             avfilter_get_by_name("buffer"),
1766                                             "ffplay_buffer", buffersrc_args, NULL,
1767                                             graph)) < 0)
1768         goto fail;
1769
1770     buffersink_params->pixel_fmts = pix_fmts;
1771     ret = avfilter_graph_create_filter(&filt_out,
1772                                        avfilter_get_by_name("ffbuffersink"),
1773                                        "ffplay_buffersink", NULL, buffersink_params, graph);
1774     if (ret < 0)
1775         goto fail;
1776
1777     /* SDL YUV code is not handling odd width/height for some driver
1778      * combinations, therefore we crop the picture to an even width/height. */
1779     if ((ret = avfilter_graph_create_filter(&filt_crop,
1780                                             avfilter_get_by_name("crop"),
1781                                             "ffplay_crop", "floor(in_w/2)*2:floor(in_h/2)*2", NULL, graph)) < 0)
1782         goto fail;
1783     if ((ret = avfilter_link(filt_crop, 0, filt_out, 0)) < 0)
1784         goto fail;
1785
1786     if ((ret = configure_filtergraph(graph, vfilters, filt_src, filt_crop)) < 0)
1787         goto fail;
1788
1789     is->in_video_filter  = filt_src;
1790     is->out_video_filter = filt_out;
1791
1792 fail:
1793     av_freep(&buffersink_params);
1794     return ret;
1795 }
1796
1797 #endif  /* CONFIG_AVFILTER */
1798
1799 static int video_thread(void *arg)
1800 {
1801     AVPacket pkt = { 0 };
1802     VideoState *is = arg;
1803     AVFrame *frame = avcodec_alloc_frame();
1804     int64_t pts_int = AV_NOPTS_VALUE, pos = -1;
1805     double pts;
1806     int ret;
1807     int serial = 0;
1808
1809 #if CONFIG_AVFILTER
1810     AVCodecContext *codec = is->video_st->codec;
1811     AVFilterGraph *graph = avfilter_graph_alloc();
1812     AVFilterContext *filt_out = NULL, *filt_in = NULL;
1813     int last_w = 0;
1814     int last_h = 0;
1815     enum AVPixelFormat last_format = -2;
1816
1817     if (codec->codec->capabilities & CODEC_CAP_DR1) {
1818         is->use_dr1 = 1;
1819         codec->get_buffer     = codec_get_buffer;
1820         codec->release_buffer = codec_release_buffer;
1821         codec->opaque         = &is->buffer_pool;
1822     }
1823 #endif
1824
1825     for (;;) {
1826 #if CONFIG_AVFILTER
1827         AVFilterBufferRef *picref;
1828         AVRational tb;
1829 #endif
1830         while (is->paused && !is->videoq.abort_request)
1831             SDL_Delay(10);
1832
1833         avcodec_get_frame_defaults(frame);
1834         av_free_packet(&pkt);
1835
1836         ret = get_video_frame(is, frame, &pts_int, &pkt, &serial);
1837         if (ret < 0)
1838             goto the_end;
1839
1840         if (!ret)
1841             continue;
1842
1843 #if CONFIG_AVFILTER
1844         if (   last_w != frame->width
1845             || last_h != frame->height
1846             || last_format != frame->format) {
1847             av_log(NULL, AV_LOG_INFO, "Frame changed from size:%dx%d to size:%dx%d\n",
1848                    last_w, last_h, frame->width, frame->height);
1849             avfilter_graph_free(&graph);
1850             graph = avfilter_graph_alloc();
1851             if ((ret = configure_video_filters(graph, is, vfilters, frame)) < 0) {
1852                 SDL_Event event;
1853                 event.type = FF_QUIT_EVENT;
1854                 event.user.data1 = is;
1855                 SDL_PushEvent(&event);
1856                 av_free_packet(&pkt);
1857                 goto the_end;
1858             }
1859             filt_in  = is->in_video_filter;
1860             filt_out = is->out_video_filter;
1861             last_w = frame->width;
1862             last_h = frame->height;
1863             last_format = frame->format;
1864         }
1865
1866         frame->pts = pts_int;
1867         frame->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, frame);
1868         if (is->use_dr1 && frame->opaque) {
1869             FrameBuffer      *buf = frame->opaque;
1870             AVFilterBufferRef *fb = avfilter_get_video_buffer_ref_from_arrays(
1871                                         frame->data, frame->linesize,
1872                                         AV_PERM_READ | AV_PERM_PRESERVE,
1873                                         frame->width, frame->height,
1874                                         frame->format);
1875
1876             avfilter_copy_frame_props(fb, frame);
1877             fb->buf->priv           = buf;
1878             fb->buf->free           = filter_release_buffer;
1879
1880             buf->refcount++;
1881             av_buffersrc_add_ref(filt_in, fb, AV_BUFFERSRC_FLAG_NO_COPY);
1882
1883         } else
1884             av_buffersrc_write_frame(filt_in, frame);
1885
1886         av_free_packet(&pkt);
1887
1888         while (ret >= 0) {
1889             is->frame_last_returned_time = av_gettime() / 1000000.0;
1890
1891             ret = av_buffersink_get_buffer_ref(filt_out, &picref, 0);
1892             if (ret < 0) {
1893                 ret = 0;
1894                 break;
1895             }
1896
1897             is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time;
1898             if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
1899                 is->frame_last_filter_delay = 0;
1900
1901             avfilter_copy_buf_props(frame, picref);
1902
1903             pts_int = picref->pts;
1904             tb      = filt_out->inputs[0]->time_base;
1905             pos     = picref->pos;
1906             frame->opaque = picref;
1907
1908             if (av_cmp_q(tb, is->video_st->time_base)) {
1909                 av_unused int64_t pts1 = pts_int;
1910                 pts_int = av_rescale_q(pts_int, tb, is->video_st->time_base);
1911                 av_dlog(NULL, "video_thread(): "
1912                         "tb:%d/%d pts:%"PRId64" -> tb:%d/%d pts:%"PRId64"\n",
1913                         tb.num, tb.den, pts1,
1914                         is->video_st->time_base.num, is->video_st->time_base.den, pts_int);
1915             }
1916             pts = pts_int * av_q2d(is->video_st->time_base);
1917             ret = queue_picture(is, frame, pts, pos, serial);
1918         }
1919 #else
1920         pts = pts_int * av_q2d(is->video_st->time_base);
1921         ret = queue_picture(is, frame, pts, pkt.pos, serial);
1922 #endif
1923
1924         if (ret < 0)
1925             goto the_end;
1926
1927         if (is->step)
1928             stream_toggle_pause(is);
1929     }
1930  the_end:
1931     avcodec_flush_buffers(is->video_st->codec);
1932 #if CONFIG_AVFILTER
1933     avfilter_graph_free(&graph);
1934 #endif
1935     av_free_packet(&pkt);
1936     avcodec_free_frame(&frame);
1937     return 0;
1938 }
1939
1940 static int subtitle_thread(void *arg)
1941 {
1942     VideoState *is = arg;
1943     SubPicture *sp;
1944     AVPacket pkt1, *pkt = &pkt1;
1945     int got_subtitle;
1946     double pts;
1947     int i, j;
1948     int r, g, b, y, u, v, a;
1949
1950     for (;;) {
1951         while (is->paused && !is->subtitleq.abort_request) {
1952             SDL_Delay(10);
1953         }
1954         if (packet_queue_get(&is->subtitleq, pkt, 1, NULL) < 0)
1955             break;
1956
1957         if (pkt->data == flush_pkt.data) {
1958             avcodec_flush_buffers(is->subtitle_st->codec);
1959             continue;
1960         }
1961         SDL_LockMutex(is->subpq_mutex);
1962         while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1963                !is->subtitleq.abort_request) {
1964             SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1965         }
1966         SDL_UnlockMutex(is->subpq_mutex);
1967
1968         if (is->subtitleq.abort_request)
1969             return 0;
1970
1971         sp = &is->subpq[is->subpq_windex];
1972
1973        /* NOTE: ipts is the PTS of the _first_ picture beginning in
1974            this packet, if any */
1975         pts = 0;
1976         if (pkt->pts != AV_NOPTS_VALUE)
1977             pts = av_q2d(is->subtitle_st->time_base) * pkt->pts;
1978
1979         avcodec_decode_subtitle2(is->subtitle_st->codec, &sp->sub,
1980                                  &got_subtitle, pkt);
1981         if (got_subtitle && sp->sub.format == 0) {
1982             if (sp->sub.pts != AV_NOPTS_VALUE)
1983                 pts = sp->sub.pts / (double)AV_TIME_BASE;
1984             sp->pts = pts;
1985
1986             for (i = 0; i < sp->sub.num_rects; i++)
1987             {
1988                 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
1989                 {
1990                     RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
1991                     y = RGB_TO_Y_CCIR(r, g, b);
1992                     u = RGB_TO_U_CCIR(r, g, b, 0);
1993                     v = RGB_TO_V_CCIR(r, g, b, 0);
1994                     YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
1995                 }
1996             }
1997
1998             /* now we can update the picture count */
1999             if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
2000                 is->subpq_windex = 0;
2001             SDL_LockMutex(is->subpq_mutex);
2002             is->subpq_size++;
2003             SDL_UnlockMutex(is->subpq_mutex);
2004         }
2005         av_free_packet(pkt);
2006     }
2007     return 0;
2008 }
2009
2010 /* copy samples for viewing in editor window */
2011 static void update_sample_display(VideoState *is, short *samples, int samples_size)
2012 {
2013     int size, len;
2014
2015     size = samples_size / sizeof(short);
2016     while (size > 0) {
2017         len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
2018         if (len > size)
2019             len = size;
2020         memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
2021         samples += len;
2022         is->sample_array_index += len;
2023         if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
2024             is->sample_array_index = 0;
2025         size -= len;
2026     }
2027 }
2028
2029 /* return the wanted number of samples to get better sync if sync_type is video
2030  * or external master clock */
2031 static int synchronize_audio(VideoState *is, int nb_samples)
2032 {
2033     int wanted_nb_samples = nb_samples;
2034
2035     /* if not master, then we try to remove or add samples to correct the clock */
2036     if (get_master_sync_type(is) != AV_SYNC_AUDIO_MASTER) {
2037         double diff, avg_diff;
2038         int min_nb_samples, max_nb_samples;
2039
2040         diff = get_audio_clock(is) - get_master_clock(is);
2041
2042         if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
2043             is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
2044             if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
2045                 /* not enough measures to have a correct estimate */
2046                 is->audio_diff_avg_count++;
2047             } else {
2048                 /* estimate the A-V difference */
2049                 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
2050
2051                 if (fabs(avg_diff) >= is->audio_diff_threshold) {
2052                     wanted_nb_samples = nb_samples + (int)(diff * is->audio_src.freq);
2053                     min_nb_samples = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100));
2054                     max_nb_samples = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100));
2055                     wanted_nb_samples = FFMIN(FFMAX(wanted_nb_samples, min_nb_samples), max_nb_samples);
2056                 }
2057                 av_dlog(NULL, "diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
2058                         diff, avg_diff, wanted_nb_samples - nb_samples,
2059                         is->audio_clock, is->video_clock, is->audio_diff_threshold);
2060             }
2061         } else {
2062             /* too big difference : may be initial PTS errors, so
2063                reset A-V filter */
2064             is->audio_diff_avg_count = 0;
2065             is->audio_diff_cum       = 0;
2066         }
2067     }
2068
2069     return wanted_nb_samples;
2070 }
2071
2072 /* decode one audio frame and returns its uncompressed size */
2073 static int audio_decode_frame(VideoState *is, double *pts_ptr)
2074 {
2075     AVPacket *pkt_temp = &is->audio_pkt_temp;
2076     AVPacket *pkt = &is->audio_pkt;
2077     AVCodecContext *dec = is->audio_st->codec;
2078     int len1, len2, data_size, resampled_data_size;
2079     int64_t dec_channel_layout;
2080     int got_frame;
2081     double pts;
2082     int new_packet = 0;
2083     int flush_complete = 0;
2084     int wanted_nb_samples;
2085
2086     for (;;) {
2087         /* NOTE: the audio packet can contain several frames */
2088         while (pkt_temp->size > 0 || (!pkt_temp->data && new_packet)) {
2089             if (!is->frame) {
2090                 if (!(is->frame = avcodec_alloc_frame()))
2091                     return AVERROR(ENOMEM);
2092             } else
2093                 avcodec_get_frame_defaults(is->frame);
2094
2095             if (is->paused)
2096                 return -1;
2097
2098             if (flush_complete)
2099                 break;
2100             new_packet = 0;
2101             len1 = avcodec_decode_audio4(dec, is->frame, &got_frame, pkt_temp);
2102             if (len1 < 0) {
2103                 /* if error, we skip the frame */
2104                 pkt_temp->size = 0;
2105                 break;
2106             }
2107
2108             pkt_temp->data += len1;
2109             pkt_temp->size -= len1;
2110
2111             if (!got_frame) {
2112                 /* stop sending empty packets if the decoder is finished */
2113                 if (!pkt_temp->data && dec->codec->capabilities & CODEC_CAP_DELAY)
2114                     flush_complete = 1;
2115                 continue;
2116             }
2117             data_size = av_samples_get_buffer_size(NULL, is->frame->channels,
2118                                                    is->frame->nb_samples,
2119                                                    is->frame->format, 1);
2120
2121             dec_channel_layout =
2122                 (is->frame->channel_layout && is->frame->channels == av_get_channel_layout_nb_channels(is->frame->channel_layout)) ?
2123                 is->frame->channel_layout : av_get_default_channel_layout(is->frame->channels);
2124             wanted_nb_samples = synchronize_audio(is, is->frame->nb_samples);
2125
2126             if (is->frame->format        != is->audio_src.fmt            ||
2127                 dec_channel_layout       != is->audio_src.channel_layout ||
2128                 is->frame->sample_rate   != is->audio_src.freq           ||
2129                 (wanted_nb_samples       != is->frame->nb_samples && !is->swr_ctx)) {
2130                 swr_free(&is->swr_ctx);
2131                 is->swr_ctx = swr_alloc_set_opts(NULL,
2132                                                  is->audio_tgt.channel_layout, is->audio_tgt.fmt, is->audio_tgt.freq,
2133                                                  dec_channel_layout,           is->frame->format, is->frame->sample_rate,
2134                                                  0, NULL);
2135                 if (!is->swr_ctx || swr_init(is->swr_ctx) < 0) {
2136                     fprintf(stderr, "Cannot create sample rate converter for conversion of %d Hz %s %d channels to %d Hz %s %d channels!\n",
2137                         is->frame->sample_rate,   av_get_sample_fmt_name(is->frame->format), (int)is->frame->channels,
2138                         is->audio_tgt.freq, av_get_sample_fmt_name(is->audio_tgt.fmt), is->audio_tgt.channels);
2139                     break;
2140                 }
2141                 is->audio_src.channel_layout = dec_channel_layout;
2142                 is->audio_src.channels = is->frame->channels;
2143                 is->audio_src.freq = is->frame->sample_rate;
2144                 is->audio_src.fmt = is->frame->format;
2145             }
2146
2147             if (is->swr_ctx) {
2148                 const uint8_t **in = (const uint8_t **)is->frame->extended_data;
2149                 uint8_t *out[] = {is->audio_buf2};
2150                 int out_count = sizeof(is->audio_buf2) / is->audio_tgt.channels / av_get_bytes_per_sample(is->audio_tgt.fmt);
2151                 if (wanted_nb_samples != is->frame->nb_samples) {
2152                     if (swr_set_compensation(is->swr_ctx, (wanted_nb_samples - is->frame->nb_samples) * is->audio_tgt.freq / is->frame->sample_rate,
2153                                                 wanted_nb_samples * is->audio_tgt.freq / is->frame->sample_rate) < 0) {
2154                         fprintf(stderr, "swr_set_compensation() failed\n");
2155                         break;
2156                     }
2157                 }
2158                 len2 = swr_convert(is->swr_ctx, out, out_count, in, is->frame->nb_samples);
2159                 if (len2 < 0) {
2160                     fprintf(stderr, "swr_convert() failed\n");
2161                     break;
2162                 }
2163                 if (len2 == out_count) {
2164                     fprintf(stderr, "warning: audio buffer is probably too small\n");
2165                     swr_init(is->swr_ctx);
2166                 }
2167                 is->audio_buf = is->audio_buf2;
2168                 resampled_data_size = len2 * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt);
2169             } else {
2170                 is->audio_buf = is->frame->data[0];
2171                 resampled_data_size = data_size;
2172             }
2173
2174             /* if no pts, then compute it */
2175             pts = is->audio_clock;
2176             *pts_ptr = pts;
2177             is->audio_clock += (double)data_size /
2178                 (is->frame->channels * is->frame->sample_rate * av_get_bytes_per_sample(is->frame->format));
2179 #ifdef DEBUG
2180             {
2181                 static double last_clock;
2182                 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2183                        is->audio_clock - last_clock,
2184                        is->audio_clock, pts);
2185                 last_clock = is->audio_clock;
2186             }
2187 #endif
2188             return resampled_data_size;
2189         }
2190
2191         /* free the current packet */
2192         if (pkt->data)
2193             av_free_packet(pkt);
2194         memset(pkt_temp, 0, sizeof(*pkt_temp));
2195
2196         if (is->paused || is->audioq.abort_request) {
2197             return -1;
2198         }
2199
2200         if (is->audioq.nb_packets == 0)
2201             SDL_CondSignal(is->continue_read_thread);
2202
2203         /* read next packet */
2204         if ((new_packet = packet_queue_get(&is->audioq, pkt, 1, &is->audio_pkt_temp_serial)) < 0)
2205             return -1;
2206
2207         if (pkt->data == flush_pkt.data) {
2208             avcodec_flush_buffers(dec);
2209             flush_complete = 0;
2210         }
2211
2212         *pkt_temp = *pkt;
2213
2214         /* if update the audio clock with the pts */
2215         if (pkt->pts != AV_NOPTS_VALUE) {
2216             is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
2217         }
2218     }
2219 }
2220
2221 /* prepare a new audio buffer */
2222 static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
2223 {
2224     VideoState *is = opaque;
2225     int audio_size, len1;
2226     int bytes_per_sec;
2227     int frame_size = av_samples_get_buffer_size(NULL, is->audio_tgt.channels, 1, is->audio_tgt.fmt, 1);
2228     double pts;
2229
2230     audio_callback_time = av_gettime();
2231
2232     while (len > 0) {
2233         if (is->audio_buf_index >= is->audio_buf_size) {
2234            audio_size = audio_decode_frame(is, &pts);
2235            if (audio_size < 0) {
2236                 /* if error, just output silence */
2237                is->audio_buf      = is->silence_buf;
2238                is->audio_buf_size = sizeof(is->silence_buf) / frame_size * frame_size;
2239            } else {
2240                if (is->show_mode != SHOW_MODE_VIDEO)
2241                    update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
2242                is->audio_buf_size = audio_size;
2243            }
2244            is->audio_buf_index = 0;
2245         }
2246         len1 = is->audio_buf_size - is->audio_buf_index;
2247         if (len1 > len)
2248             len1 = len;
2249         memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2250         len -= len1;
2251         stream += len1;
2252         is->audio_buf_index += len1;
2253     }
2254     bytes_per_sec = is->audio_tgt.freq * is->audio_tgt.channels * av_get_bytes_per_sample(is->audio_tgt.fmt);
2255     is->audio_write_buf_size = is->audio_buf_size - is->audio_buf_index;
2256     /* Let's assume the audio driver that is used by SDL has two periods. */
2257     is->audio_current_pts = is->audio_clock - (double)(2 * is->audio_hw_buf_size + is->audio_write_buf_size) / bytes_per_sec;
2258     is->audio_current_pts_drift = is->audio_current_pts - audio_callback_time / 1000000.0;
2259     if (is->audioq.serial == is->audio_pkt_temp_serial)
2260         check_external_clock_sync(is, is->audio_current_pts);
2261 }
2262
2263 static int audio_open(void *opaque, int64_t wanted_channel_layout, int wanted_nb_channels, int wanted_sample_rate, struct AudioParams *audio_hw_params)
2264 {
2265     SDL_AudioSpec wanted_spec, spec;
2266     const char *env;
2267     const int next_nb_channels[] = {0, 0, 1, 6, 2, 6, 4, 6};
2268
2269     env = SDL_getenv("SDL_AUDIO_CHANNELS");
2270     if (env) {
2271         wanted_nb_channels = atoi(env);
2272         wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
2273     }
2274     if (!wanted_channel_layout || wanted_nb_channels != av_get_channel_layout_nb_channels(wanted_channel_layout)) {
2275         wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
2276         wanted_channel_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX;
2277     }
2278     wanted_spec.channels = av_get_channel_layout_nb_channels(wanted_channel_layout);
2279     wanted_spec.freq = wanted_sample_rate;
2280     if (wanted_spec.freq <= 0 || wanted_spec.channels <= 0) {
2281         fprintf(stderr, "Invalid sample rate or channel count!\n");
2282         return -1;
2283     }
2284     wanted_spec.format = AUDIO_S16SYS;
2285     wanted_spec.silence = 0;
2286     wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2287     wanted_spec.callback = sdl_audio_callback;
2288     wanted_spec.userdata = opaque;
2289     while (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2290         fprintf(stderr, "SDL_OpenAudio (%d channels): %s\n", wanted_spec.channels, SDL_GetError());
2291         wanted_spec.channels = next_nb_channels[FFMIN(7, wanted_spec.channels)];
2292         if (!wanted_spec.channels) {
2293             fprintf(stderr, "No more channel combinations to try, audio open failed\n");
2294             return -1;
2295         }
2296         wanted_channel_layout = av_get_default_channel_layout(wanted_spec.channels);
2297     }
2298     if (spec.format != AUDIO_S16SYS) {
2299         fprintf(stderr, "SDL advised audio format %d is not supported!\n", spec.format);
2300         return -1;
2301     }
2302     if (spec.channels != wanted_spec.channels) {
2303         wanted_channel_layout = av_get_default_channel_layout(spec.channels);
2304         if (!wanted_channel_layout) {
2305             fprintf(stderr, "SDL advised channel count %d is not supported!\n", spec.channels);
2306             return -1;
2307         }
2308     }
2309
2310     audio_hw_params->fmt = AV_SAMPLE_FMT_S16;
2311     audio_hw_params->freq = spec.freq;
2312     audio_hw_params->channel_layout = wanted_channel_layout;
2313     audio_hw_params->channels =  spec.channels;
2314     return spec.size;
2315 }
2316
2317 /* open a given stream. Return 0 if OK */
2318 static int stream_component_open(VideoState *is, int stream_index)
2319 {
2320     AVFormatContext *ic = is->ic;
2321     AVCodecContext *avctx;
2322     AVCodec *codec;
2323     const char *forced_codec_name = NULL;
2324     AVDictionary *opts;
2325     AVDictionaryEntry *t = NULL;
2326
2327     if (stream_index < 0 || stream_index >= ic->nb_streams)
2328         return -1;
2329     avctx = ic->streams[stream_index]->codec;
2330
2331     codec = avcodec_find_decoder(avctx->codec_id);
2332
2333     switch(avctx->codec_type){
2334         case AVMEDIA_TYPE_AUDIO   : is->last_audio_stream    = stream_index; forced_codec_name =    audio_codec_name; break;
2335         case AVMEDIA_TYPE_SUBTITLE: is->last_subtitle_stream = stream_index; forced_codec_name = subtitle_codec_name; break;
2336         case AVMEDIA_TYPE_VIDEO   : is->last_video_stream    = stream_index; forced_codec_name =    video_codec_name; break;
2337     }
2338     if (forced_codec_name)
2339         codec = avcodec_find_decoder_by_name(forced_codec_name);
2340     if (!codec) {
2341         if (forced_codec_name) fprintf(stderr, "No codec could be found with name '%s'\n", forced_codec_name);
2342         else                   fprintf(stderr, "No codec could be found with id %d\n", avctx->codec_id);
2343         return -1;
2344     }
2345
2346     avctx->codec_id = codec->id;
2347     avctx->workaround_bugs   = workaround_bugs;
2348     avctx->lowres            = lowres;
2349     if(avctx->lowres > codec->max_lowres){
2350         av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n",
2351                 codec->max_lowres);
2352         avctx->lowres= codec->max_lowres;
2353     }
2354     avctx->idct_algo         = idct;
2355     avctx->skip_frame        = skip_frame;
2356     avctx->skip_idct         = skip_idct;
2357     avctx->skip_loop_filter  = skip_loop_filter;
2358     avctx->error_concealment = error_concealment;
2359
2360     if(avctx->lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2361     if (fast)   avctx->flags2 |= CODEC_FLAG2_FAST;
2362     if(codec->capabilities & CODEC_CAP_DR1)
2363         avctx->flags |= CODEC_FLAG_EMU_EDGE;
2364
2365     opts = filter_codec_opts(codec_opts, avctx->codec_id, ic, ic->streams[stream_index], codec);
2366     if (!av_dict_get(opts, "threads", NULL, 0))
2367         av_dict_set(&opts, "threads", "auto", 0);
2368     if (avcodec_open2(avctx, codec, &opts) < 0)
2369         return -1;
2370     if ((t = av_dict_get(opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
2371         av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
2372         return AVERROR_OPTION_NOT_FOUND;
2373     }
2374
2375     /* prepare audio output */
2376     if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
2377         int audio_hw_buf_size = audio_open(is, avctx->channel_layout, avctx->channels, avctx->sample_rate, &is->audio_src);
2378         if (audio_hw_buf_size < 0)
2379             return -1;
2380         is->audio_hw_buf_size = audio_hw_buf_size;
2381         is->audio_tgt = is->audio_src;
2382     }
2383
2384     ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
2385     switch (avctx->codec_type) {
2386     case AVMEDIA_TYPE_AUDIO:
2387         is->audio_stream = stream_index;
2388         is->audio_st = ic->streams[stream_index];
2389         is->audio_buf_size  = 0;
2390         is->audio_buf_index = 0;
2391
2392         /* init averaging filter */
2393         is->audio_diff_avg_coef  = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2394         is->audio_diff_avg_count = 0;
2395         /* since we do not have a precise anough audio fifo fullness,
2396            we correct audio sync only if larger than this threshold */
2397         is->audio_diff_threshold = 2.0 * is->audio_hw_buf_size / av_samples_get_buffer_size(NULL, is->audio_tgt.channels, is->audio_tgt.freq, is->audio_tgt.fmt, 1);
2398
2399         memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2400         memset(&is->audio_pkt_temp, 0, sizeof(is->audio_pkt_temp));
2401         packet_queue_start(&is->audioq);
2402         SDL_PauseAudio(0);
2403         break;
2404     case AVMEDIA_TYPE_VIDEO:
2405         is->video_stream = stream_index;
2406         is->video_st = ic->streams[stream_index];
2407
2408         packet_queue_start(&is->videoq);
2409         is->video_tid = SDL_CreateThread(video_thread, is);
2410         break;
2411     case AVMEDIA_TYPE_SUBTITLE:
2412         is->subtitle_stream = stream_index;
2413         is->subtitle_st = ic->streams[stream_index];
2414         packet_queue_start(&is->subtitleq);
2415
2416         is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2417         break;
2418     default:
2419         break;
2420     }
2421     return 0;
2422 }
2423
2424 static void stream_component_close(VideoState *is, int stream_index)
2425 {
2426     AVFormatContext *ic = is->ic;
2427     AVCodecContext *avctx;
2428
2429     if (stream_index < 0 || stream_index >= ic->nb_streams)
2430         return;
2431     avctx = ic->streams[stream_index]->codec;
2432
2433     switch (avctx->codec_type) {
2434     case AVMEDIA_TYPE_AUDIO:
2435         packet_queue_abort(&is->audioq);
2436
2437         SDL_CloseAudio();
2438
2439         packet_queue_flush(&is->audioq);
2440         av_free_packet(&is->audio_pkt);
2441         swr_free(&is->swr_ctx);
2442         av_freep(&is->audio_buf1);
2443         is->audio_buf = NULL;
2444         avcodec_free_frame(&is->frame);
2445
2446         if (is->rdft) {
2447             av_rdft_end(is->rdft);
2448             av_freep(&is->rdft_data);
2449             is->rdft = NULL;
2450             is->rdft_bits = 0;
2451         }
2452         break;
2453     case AVMEDIA_TYPE_VIDEO:
2454         packet_queue_abort(&is->videoq);
2455
2456         /* note: we also signal this mutex to make sure we deblock the
2457            video thread in all cases */
2458         SDL_LockMutex(is->pictq_mutex);
2459         SDL_CondSignal(is->pictq_cond);
2460         SDL_UnlockMutex(is->pictq_mutex);
2461
2462         SDL_WaitThread(is->video_tid, NULL);
2463
2464         packet_queue_flush(&is->videoq);
2465         break;
2466     case AVMEDIA_TYPE_SUBTITLE:
2467         packet_queue_abort(&is->subtitleq);
2468
2469         /* note: we also signal this mutex to make sure we deblock the
2470            video thread in all cases */
2471         SDL_LockMutex(is->subpq_mutex);
2472         is->subtitle_stream_changed = 1;
2473
2474         SDL_CondSignal(is->subpq_cond);
2475         SDL_UnlockMutex(is->subpq_mutex);
2476
2477         SDL_WaitThread(is->subtitle_tid, NULL);
2478
2479         packet_queue_flush(&is->subtitleq);
2480         break;
2481     default:
2482         break;
2483     }
2484
2485     ic->streams[stream_index]->discard = AVDISCARD_ALL;
2486     avcodec_close(avctx);
2487 #if CONFIG_AVFILTER
2488     free_buffer_pool(&is->buffer_pool);
2489 #endif
2490     switch (avctx->codec_type) {
2491     case AVMEDIA_TYPE_AUDIO:
2492         is->audio_st = NULL;
2493         is->audio_stream = -1;
2494         break;
2495     case AVMEDIA_TYPE_VIDEO:
2496         is->video_st = NULL;
2497         is->video_stream = -1;
2498         break;
2499     case AVMEDIA_TYPE_SUBTITLE:
2500         is->subtitle_st = NULL;
2501         is->subtitle_stream = -1;
2502         break;
2503     default:
2504         break;
2505     }
2506 }
2507
2508 static int decode_interrupt_cb(void *ctx)
2509 {
2510     VideoState *is = ctx;
2511     return is->abort_request;
2512 }
2513
2514 static int is_realtime(AVFormatContext *s)
2515 {
2516     if(   !strcmp(s->iformat->name, "rtp")
2517        || !strcmp(s->iformat->name, "rtsp")
2518        || !strcmp(s->iformat->name, "sdp")
2519     )
2520         return 1;
2521
2522     if(s->pb && (   !strncmp(s->filename, "rtp:", 4)
2523                  || !strncmp(s->filename, "udp:", 4)
2524                 )
2525     )
2526         return 1;
2527     return 0;
2528 }
2529
2530 /* this thread gets the stream from the disk or the network */
2531 static int read_thread(void *arg)
2532 {
2533     VideoState *is = arg;
2534     AVFormatContext *ic = NULL;
2535     int err, i, ret;
2536     int st_index[AVMEDIA_TYPE_NB];
2537     AVPacket pkt1, *pkt = &pkt1;
2538     int eof = 0;
2539     int pkt_in_play_range = 0;
2540     AVDictionaryEntry *t;
2541     AVDictionary **opts;
2542     int orig_nb_streams;
2543     SDL_mutex *wait_mutex = SDL_CreateMutex();
2544
2545     memset(st_index, -1, sizeof(st_index));
2546     is->last_video_stream = is->video_stream = -1;
2547     is->last_audio_stream = is->audio_stream = -1;
2548     is->last_subtitle_stream = is->subtitle_stream = -1;
2549
2550     ic = avformat_alloc_context();
2551     ic->interrupt_callback.callback = decode_interrupt_cb;
2552     ic->interrupt_callback.opaque = is;
2553     err = avformat_open_input(&ic, is->filename, is->iformat, &format_opts);
2554     if (err < 0) {
2555         print_error(is->filename, err);
2556         ret = -1;
2557         goto fail;
2558     }
2559     if ((t = av_dict_get(format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
2560         av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
2561         ret = AVERROR_OPTION_NOT_FOUND;
2562         goto fail;
2563     }
2564     is->ic = ic;
2565
2566     if (genpts)
2567         ic->flags |= AVFMT_FLAG_GENPTS;
2568
2569     opts = setup_find_stream_info_opts(ic, codec_opts);
2570     orig_nb_streams = ic->nb_streams;
2571
2572     err = avformat_find_stream_info(ic, opts);
2573     if (err < 0) {
2574         fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2575         ret = -1;
2576         goto fail;
2577     }
2578     for (i = 0; i < orig_nb_streams; i++)
2579         av_dict_free(&opts[i]);
2580     av_freep(&opts);
2581
2582     if (ic->pb)
2583         ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use url_feof() to test for the end
2584
2585     if (seek_by_bytes < 0)
2586         seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2587
2588     is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0;
2589
2590     /* if seeking requested, we execute it */
2591     if (start_time != AV_NOPTS_VALUE) {
2592         int64_t timestamp;
2593
2594         timestamp = start_time;
2595         /* add the stream start time */
2596         if (ic->start_time != AV_NOPTS_VALUE)
2597             timestamp += ic->start_time;
2598         ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
2599         if (ret < 0) {
2600             fprintf(stderr, "%s: could not seek to position %0.3f\n",
2601                     is->filename, (double)timestamp / AV_TIME_BASE);
2602         }
2603     }
2604
2605     is->realtime = is_realtime(ic);
2606
2607     for (i = 0; i < ic->nb_streams; i++)
2608         ic->streams[i]->discard = AVDISCARD_ALL;
2609     if (!video_disable)
2610         st_index[AVMEDIA_TYPE_VIDEO] =
2611             av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
2612                                 wanted_stream[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
2613     if (!audio_disable)
2614         st_index[AVMEDIA_TYPE_AUDIO] =
2615             av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
2616                                 wanted_stream[AVMEDIA_TYPE_AUDIO],
2617                                 st_index[AVMEDIA_TYPE_VIDEO],
2618                                 NULL, 0);
2619     if (!video_disable)
2620         st_index[AVMEDIA_TYPE_SUBTITLE] =
2621             av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
2622                                 wanted_stream[AVMEDIA_TYPE_SUBTITLE],
2623                                 (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
2624                                  st_index[AVMEDIA_TYPE_AUDIO] :
2625                                  st_index[AVMEDIA_TYPE_VIDEO]),
2626                                 NULL, 0);
2627     if (show_status) {
2628         av_dump_format(ic, 0, is->filename, 0);
2629     }
2630
2631     is->show_mode = show_mode;
2632
2633     /* open the streams */
2634     if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
2635         stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
2636     }
2637
2638     ret = -1;
2639     if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
2640         ret = stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
2641     }
2642     is->refresh_tid = SDL_CreateThread(refresh_thread, is);
2643     if (is->show_mode == SHOW_MODE_NONE)
2644         is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
2645
2646     if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
2647         stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
2648     }
2649
2650     if (is->video_stream < 0 && is->audio_stream < 0) {
2651         fprintf(stderr, "%s: could not open codecs\n", is->filename);
2652         ret = -1;
2653         goto fail;
2654     }
2655
2656     if (infinite_buffer < 0 && is->realtime)
2657         infinite_buffer = 1;
2658
2659     for (;;) {
2660         if (is->abort_request)
2661             break;
2662         if (is->paused != is->last_paused) {
2663             is->last_paused = is->paused;
2664             if (is->paused)
2665                 is->read_pause_return = av_read_pause(ic);
2666             else
2667                 av_read_play(ic);
2668         }
2669 #if CONFIG_RTSP_DEMUXER || CONFIG_MMSH_PROTOCOL
2670         if (is->paused &&
2671                 (!strcmp(ic->iformat->name, "rtsp") ||
2672                  (ic->pb && !strncmp(input_filename, "mmsh:", 5)))) {
2673             /* wait 10 ms to avoid trying to get another packet */
2674             /* XXX: horrible */
2675             SDL_Delay(10);
2676             continue;
2677         }
2678 #endif
2679         if (is->seek_req) {
2680             int64_t seek_target = is->seek_pos;
2681             int64_t seek_min    = is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2682             int64_t seek_max    = is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2683 // FIXME the +-2 is due to rounding being not done in the correct direction in generation
2684 //      of the seek_pos/seek_rel variables
2685
2686             ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
2687             if (ret < 0) {
2688                 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
2689             } else {
2690                 if (is->audio_stream >= 0) {
2691                     packet_queue_flush(&is->audioq);
2692                     packet_queue_put(&is->audioq, &flush_pkt);
2693                 }
2694                 if (is->subtitle_stream >= 0) {
2695                     packet_queue_flush(&is->subtitleq);
2696                     packet_queue_put(&is->subtitleq, &flush_pkt);
2697                 }
2698                 if (is->video_stream >= 0) {
2699                     packet_queue_flush(&is->videoq);
2700                     packet_queue_put(&is->videoq, &flush_pkt);
2701                 }
2702                 if (is->seek_flags & AVSEEK_FLAG_BYTE) {
2703                    //FIXME: use a cleaner way to signal obsolete external clock...
2704                    update_external_clock_pts(is, (double)AV_NOPTS_VALUE);
2705                 } else {
2706                    update_external_clock_pts(is, seek_target / (double)AV_TIME_BASE);
2707                 }
2708             }
2709             is->seek_req = 0;
2710             eof = 0;
2711         }
2712         if (is->que_attachments_req) {
2713             avformat_queue_attached_pictures(ic);
2714             is->que_attachments_req = 0;
2715         }
2716
2717         /* if the queue are full, no need to read more */
2718         if (infinite_buffer<1 &&
2719               (is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2720             || (   (is->audioq   .nb_packets > MIN_FRAMES || is->audio_stream < 0 || is->audioq.abort_request)
2721                 && (is->videoq   .nb_packets > MIN_FRAMES || is->video_stream < 0 || is->videoq.abort_request)
2722                 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream < 0 || is->subtitleq.abort_request)))) {
2723             /* wait 10 ms */
2724             SDL_LockMutex(wait_mutex);
2725             SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10);
2726             SDL_UnlockMutex(wait_mutex);
2727             continue;
2728         }
2729         if (eof) {
2730             if (is->video_stream >= 0) {
2731                 av_init_packet(pkt);
2732                 pkt->data = NULL;
2733                 pkt->size = 0;
2734                 pkt->stream_index = is->video_stream;
2735                 packet_queue_put(&is->videoq, pkt);
2736             }
2737             if (is->audio_stream >= 0 &&
2738                 is->audio_st->codec->codec->capabilities & CODEC_CAP_DELAY) {
2739                 av_init_packet(pkt);
2740                 pkt->data = NULL;
2741                 pkt->size = 0;
2742                 pkt->stream_index = is->audio_stream;
2743                 packet_queue_put(&is->audioq, pkt);
2744             }
2745             SDL_Delay(10);
2746             if (is->audioq.size + is->videoq.size + is->subtitleq.size == 0) {
2747                 if (loop != 1 && (!loop || --loop)) {
2748                     stream_seek(is, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
2749                 } else if (autoexit) {
2750                     ret = AVERROR_EOF;
2751                     goto fail;
2752                 }
2753             }
2754             eof=0;
2755             continue;
2756         }
2757         ret = av_read_frame(ic, pkt);
2758         if (ret < 0) {
2759             if (ret == AVERROR_EOF || url_feof(ic->pb))
2760                 eof = 1;
2761             if (ic->pb && ic->pb->error)
2762                 break;
2763             SDL_LockMutex(wait_mutex);
2764             SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10);
2765             SDL_UnlockMutex(wait_mutex);
2766             continue;
2767         }
2768         /* check if packet is in play range specified by user, then queue, otherwise discard */
2769         pkt_in_play_range = duration == AV_NOPTS_VALUE ||
2770                 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
2771                 av_q2d(ic->streams[pkt->stream_index]->time_base) -
2772                 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0) / 1000000
2773                 <= ((double)duration / 1000000);
2774         if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
2775             packet_queue_put(&is->audioq, pkt);
2776         } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
2777             packet_queue_put(&is->videoq, pkt);
2778         } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
2779             packet_queue_put(&is->subtitleq, pkt);
2780         } else {
2781             av_free_packet(pkt);
2782         }
2783     }
2784     /* wait until the end */
2785     while (!is->abort_request) {
2786         SDL_Delay(100);
2787     }
2788
2789     ret = 0;
2790  fail:
2791     /* close each stream */
2792     if (is->audio_stream >= 0)
2793         stream_component_close(is, is->audio_stream);
2794     if (is->video_stream >= 0)
2795         stream_component_close(is, is->video_stream);
2796     if (is->subtitle_stream >= 0)
2797         stream_component_close(is, is->subtitle_stream);
2798     if (is->ic) {
2799         avformat_close_input(&is->ic);
2800     }
2801
2802     if (ret != 0) {
2803         SDL_Event event;
2804
2805         event.type = FF_QUIT_EVENT;
2806         event.user.data1 = is;
2807         SDL_PushEvent(&event);
2808     }
2809     SDL_DestroyMutex(wait_mutex);
2810     return 0;
2811 }
2812
2813 static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
2814 {
2815     VideoState *is;
2816
2817     is = av_mallocz(sizeof(VideoState));
2818     if (!is)
2819         return NULL;
2820     av_strlcpy(is->filename, filename, sizeof(is->filename));
2821     is->iformat = iformat;
2822     is->ytop    = 0;
2823     is->xleft   = 0;
2824
2825     /* start video display */
2826     is->pictq_mutex = SDL_CreateMutex();
2827     is->pictq_cond  = SDL_CreateCond();
2828
2829     is->subpq_mutex = SDL_CreateMutex();
2830     is->subpq_cond  = SDL_CreateCond();
2831
2832     packet_queue_init(&is->videoq);
2833     packet_queue_init(&is->audioq);
2834     packet_queue_init(&is->subtitleq);
2835
2836     is->continue_read_thread = SDL_CreateCond();
2837
2838     update_external_clock_pts(is, 0.0);
2839     update_external_clock_speed(is, 1.0);
2840     is->audio_current_pts_drift = -av_gettime() / 1000000.0;
2841     is->video_current_pts_drift = is->audio_current_pts_drift;
2842     is->av_sync_type = av_sync_type;
2843     is->read_tid     = SDL_CreateThread(read_thread, is);
2844     if (!is->read_tid) {
2845         av_free(is);
2846         return NULL;
2847     }
2848     return is;
2849 }
2850
2851 static void stream_cycle_channel(VideoState *is, int codec_type)
2852 {
2853     AVFormatContext *ic = is->ic;
2854     int start_index, stream_index;
2855     int old_index;
2856     AVStream *st;
2857
2858     if (codec_type == AVMEDIA_TYPE_VIDEO) {
2859         start_index = is->last_video_stream;
2860         old_index = is->video_stream;
2861     } else if (codec_type == AVMEDIA_TYPE_AUDIO) {
2862         start_index = is->last_audio_stream;
2863         old_index = is->audio_stream;
2864     } else {
2865         start_index = is->last_subtitle_stream;
2866         old_index = is->subtitle_stream;
2867     }
2868     stream_index = start_index;
2869     for (;;) {
2870         if (++stream_index >= is->ic->nb_streams)
2871         {
2872             if (codec_type == AVMEDIA_TYPE_SUBTITLE)
2873             {
2874                 stream_index = -1;
2875                 is->last_subtitle_stream = -1;
2876                 goto the_end;
2877             }
2878             if (start_index == -1)
2879                 return;
2880             stream_index = 0;
2881         }
2882         if (stream_index == start_index)
2883             return;
2884         st = ic->streams[stream_index];
2885         if (st->codec->codec_type == codec_type) {
2886             /* check that parameters are OK */
2887             switch (codec_type) {
2888             case AVMEDIA_TYPE_AUDIO:
2889                 if (st->codec->sample_rate != 0 &&
2890                     st->codec->channels != 0)
2891                     goto the_end;
2892                 break;
2893             case AVMEDIA_TYPE_VIDEO:
2894             case AVMEDIA_TYPE_SUBTITLE:
2895                 goto the_end;
2896             default:
2897                 break;
2898             }
2899         }
2900     }
2901  the_end:
2902     stream_component_close(is, old_index);
2903     stream_component_open(is, stream_index);
2904     if (codec_type == AVMEDIA_TYPE_VIDEO)
2905         is->que_attachments_req = 1;
2906 }
2907
2908
2909 static void toggle_full_screen(VideoState *is)
2910 {
2911 #if defined(__APPLE__) && SDL_VERSION_ATLEAST(1, 2, 14)
2912     /* OS X needs to reallocate the SDL overlays */
2913     int i;
2914     for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++)
2915         is->pictq[i].reallocate = 1;
2916 #endif
2917     is_full_screen = !is_full_screen;
2918     video_open(is, 1);
2919 }
2920
2921 static void toggle_pause(VideoState *is)
2922 {
2923     stream_toggle_pause(is);
2924     is->step = 0;
2925 }
2926
2927 static void step_to_next_frame(VideoState *is)
2928 {
2929     /* if the stream is paused unpause it, then step */
2930     if (is->paused)
2931         stream_toggle_pause(is);
2932     is->step = 1;
2933 }
2934
2935 static void toggle_audio_display(VideoState *is)
2936 {
2937     int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
2938     is->show_mode = (is->show_mode + 1) % SHOW_MODE_NB;
2939     fill_rectangle(screen,
2940                 is->xleft, is->ytop, is->width, is->height,
2941                 bgcolor, 1);
2942 }
2943
2944 /* handle an event sent by the GUI */
2945 static void event_loop(VideoState *cur_stream)
2946 {
2947     SDL_Event event;
2948     double incr, pos, frac;
2949
2950     for (;;) {
2951         double x;
2952         SDL_WaitEvent(&event);
2953         switch (event.type) {
2954         case SDL_KEYDOWN:
2955             if (exit_on_keydown) {
2956                 do_exit(cur_stream);
2957                 break;
2958             }
2959             switch (event.key.keysym.sym) {
2960             case SDLK_ESCAPE:
2961             case SDLK_q:
2962                 do_exit(cur_stream);
2963                 break;
2964             case SDLK_f:
2965                 toggle_full_screen(cur_stream);
2966                 cur_stream->force_refresh = 1;
2967                 break;
2968             case SDLK_p:
2969             case SDLK_SPACE:
2970                 toggle_pause(cur_stream);
2971                 break;
2972             case SDLK_s: // S: Step to next frame
2973                 step_to_next_frame(cur_stream);
2974                 break;
2975             case SDLK_a:
2976                 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
2977                 break;
2978             case SDLK_v:
2979                 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
2980                 break;
2981             case SDLK_t:
2982                 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
2983                 break;
2984             case SDLK_w:
2985                 toggle_audio_display(cur_stream);
2986                 cur_stream->force_refresh = 1;
2987                 break;
2988             case SDLK_PAGEUP:
2989                 incr = 600.0;
2990                 goto do_seek;
2991             case SDLK_PAGEDOWN:
2992                 incr = -600.0;
2993                 goto do_seek;
2994             case SDLK_LEFT:
2995                 incr = -10.0;
2996                 goto do_seek;
2997             case SDLK_RIGHT:
2998                 incr = 10.0;
2999                 goto do_seek;
3000             case SDLK_UP:
3001                 incr = 60.0;
3002                 goto do_seek;
3003             case SDLK_DOWN:
3004                 incr = -60.0;
3005             do_seek:
3006                     if (seek_by_bytes) {
3007                         if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos >= 0) {
3008                             pos = cur_stream->video_current_pos;
3009                         } else if (cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos >= 0) {
3010                             pos = cur_stream->audio_pkt.pos;
3011                         } else
3012                             pos = avio_tell(cur_stream->ic->pb);
3013                         if (cur_stream->ic->bit_rate)
3014                             incr *= cur_stream->ic->bit_rate / 8.0;
3015                         else
3016                             incr *= 180000.0;
3017                         pos += incr;
3018                         stream_seek(cur_stream, pos, incr, 1);
3019                     } else {
3020                         pos = get_master_clock(cur_stream);
3021                         pos += incr;
3022                         if (cur_stream->ic->start_time != AV_NOPTS_VALUE && pos < cur_stream->ic->start_time / (double)AV_TIME_BASE)
3023                             pos = cur_stream->ic->start_time / (double)AV_TIME_BASE;
3024                         stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
3025                     }
3026                 break;
3027             default:
3028                 break;
3029             }
3030             break;
3031         case SDL_VIDEOEXPOSE:
3032             cur_stream->force_refresh = 1;
3033             break;
3034         case SDL_MOUSEBUTTONDOWN:
3035             if (exit_on_mousedown) {
3036                 do_exit(cur_stream);
3037                 break;
3038             }
3039         case SDL_MOUSEMOTION:
3040             if (event.type == SDL_MOUSEBUTTONDOWN) {
3041                 x = event.button.x;
3042             } else {
3043                 if (event.motion.state != SDL_PRESSED)
3044                     break;
3045                 x = event.motion.x;
3046             }
3047                 if (seek_by_bytes || cur_stream->ic->duration <= 0) {
3048                     uint64_t size =  avio_size(cur_stream->ic->pb);
3049                     stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
3050                 } else {
3051                     int64_t ts;
3052                     int ns, hh, mm, ss;
3053                     int tns, thh, tmm, tss;
3054                     tns  = cur_stream->ic->duration / 1000000LL;
3055                     thh  = tns / 3600;
3056                     tmm  = (tns % 3600) / 60;
3057                     tss  = (tns % 60);
3058                     frac = x / cur_stream->width;
3059                     ns   = frac * tns;
3060                     hh   = ns / 3600;
3061                     mm   = (ns % 3600) / 60;
3062                     ss   = (ns % 60);
3063                     fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d)       \n", frac*100,
3064                             hh, mm, ss, thh, tmm, tss);
3065                     ts = frac * cur_stream->ic->duration;
3066                     if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
3067                         ts += cur_stream->ic->start_time;
3068                     stream_seek(cur_stream, ts, 0, 0);
3069                 }
3070             break;
3071         case SDL_VIDEORESIZE:
3072                 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
3073                                           SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
3074                 screen_width  = cur_stream->width  = event.resize.w;
3075                 screen_height = cur_stream->height = event.resize.h;
3076                 cur_stream->force_refresh = 1;
3077             break;
3078         case SDL_QUIT:
3079         case FF_QUIT_EVENT:
3080             do_exit(cur_stream);
3081             break;
3082         case FF_ALLOC_EVENT:
3083             alloc_picture(event.user.data1);
3084             break;
3085         case FF_REFRESH_EVENT:
3086             video_refresh(event.user.data1);
3087             cur_stream->refresh = 0;
3088             break;
3089         default:
3090             break;
3091         }
3092     }
3093 }
3094
3095 static int opt_frame_size(void *optctx, const char *opt, const char *arg)
3096 {
3097     av_log(NULL, AV_LOG_WARNING, "Option -s is deprecated, use -video_size.\n");
3098     return opt_default(NULL, "video_size", arg);
3099 }
3100
3101 static int opt_width(void *optctx, const char *opt, const char *arg)
3102 {
3103     screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
3104     return 0;
3105 }
3106
3107 static int opt_height(void *optctx, const char *opt, const char *arg)
3108 {
3109     screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
3110     return 0;
3111 }
3112
3113 static int opt_format(void *optctx, const char *opt, const char *arg)
3114 {
3115     file_iformat = av_find_input_format(arg);
3116     if (!file_iformat) {
3117         fprintf(stderr, "Unknown input format: %s\n", arg);
3118         return AVERROR(EINVAL);
3119     }
3120     return 0;
3121 }
3122
3123 static int opt_frame_pix_fmt(void *optctx, const char *opt, const char *arg)
3124 {
3125     av_log(NULL, AV_LOG_WARNING, "Option -pix_fmt is deprecated, use -pixel_format.\n");
3126     return opt_default(NULL, "pixel_format", arg);
3127 }
3128
3129 static int opt_sync(void *optctx, const char *opt, const char *arg)
3130 {
3131     if (!strcmp(arg, "audio"))
3132         av_sync_type = AV_SYNC_AUDIO_MASTER;
3133     else if (!strcmp(arg, "video"))
3134         av_sync_type = AV_SYNC_VIDEO_MASTER;
3135     else if (!strcmp(arg, "ext"))
3136         av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
3137     else {
3138         fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
3139         exit(1);
3140     }
3141     return 0;
3142 }
3143
3144 static int opt_seek(void *optctx, const char *opt, const char *arg)
3145 {
3146     start_time = parse_time_or_die(opt, arg, 1);
3147     return 0;
3148 }
3149
3150 static int opt_duration(void *optctx, const char *opt, const char *arg)
3151 {
3152     duration = parse_time_or_die(opt, arg, 1);
3153     return 0;
3154 }
3155
3156 static int opt_show_mode(void *optctx, const char *opt, const char *arg)
3157 {
3158     show_mode = !strcmp(arg, "video") ? SHOW_MODE_VIDEO :
3159                 !strcmp(arg, "waves") ? SHOW_MODE_WAVES :
3160                 !strcmp(arg, "rdft" ) ? SHOW_MODE_RDFT  :
3161                 parse_number_or_die(opt, arg, OPT_INT, 0, SHOW_MODE_NB-1);
3162     return 0;
3163 }
3164
3165 static void opt_input_file(void *optctx, const char *filename)
3166 {
3167     if (input_filename) {
3168         fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3169                 filename, input_filename);
3170         exit(1);
3171     }
3172     if (!strcmp(filename, "-"))
3173         filename = "pipe:";
3174     input_filename = filename;
3175 }
3176
3177 static int opt_codec(void *optctx, const char *opt, const char *arg)
3178 {
3179    const char *spec = strchr(opt, ':');
3180    if (!spec) {
3181        fprintf(stderr, "No media specifier was specified in '%s' in option '%s'\n",
3182                arg, opt);
3183        return AVERROR(EINVAL);
3184    }
3185    spec++;
3186    switch (spec[0]) {
3187    case 'a' :    audio_codec_name = arg; break;
3188    case 's' : subtitle_codec_name = arg; break;
3189    case 'v' :    video_codec_name = arg; break;
3190    default:
3191        fprintf(stderr, "Invalid media specifier '%s' in option '%s'\n", spec, opt);
3192        return AVERROR(EINVAL);
3193    }
3194    return 0;
3195 }
3196
3197 static int dummy;
3198
3199 static const OptionDef options[] = {
3200 #include "cmdutils_common_opts.h"
3201     { "x", HAS_ARG, { .func_arg = opt_width }, "force displayed width", "width" },
3202     { "y", HAS_ARG, { .func_arg = opt_height }, "force displayed height", "height" },
3203     { "s", HAS_ARG | OPT_VIDEO, { .func_arg = opt_frame_size }, "set frame size (WxH or abbreviation)", "size" },
3204     { "fs", OPT_BOOL, { &is_full_screen }, "force full screen" },
3205     { "an", OPT_BOOL, { &audio_disable }, "disable audio" },
3206     { "vn", OPT_BOOL, { &video_disable }, "disable video" },
3207     { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_AUDIO] }, "select desired audio stream", "stream_number" },
3208     { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_VIDEO] }, "select desired video stream", "stream_number" },
3209     { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, { &wanted_stream[AVMEDIA_TYPE_SUBTITLE] }, "select desired subtitle stream", "stream_number" },
3210     { "ss", HAS_ARG, { .func_arg = opt_seek }, "seek to a given position in seconds", "pos" },
3211     { "t", HAS_ARG, { .func_arg = opt_duration }, "play  \"duration\" seconds of audio/video", "duration" },
3212     { "bytes", OPT_INT | HAS_ARG, { &seek_by_bytes }, "seek by bytes 0=off 1=on -1=auto", "val" },
3213     { "nodisp", OPT_BOOL, { &display_disable }, "disable graphical display" },
3214     { "f", HAS_ARG, { .func_arg = opt_format }, "force format", "fmt" },
3215     { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, { .func_arg = opt_frame_pix_fmt }, "set pixel format", "format" },
3216     { "stats", OPT_BOOL | OPT_EXPERT, { &show_status }, "show status", "" },
3217     { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, { &workaround_bugs }, "workaround bugs", "" },
3218     { "fast", OPT_BOOL | OPT_EXPERT, { &fast }, "non spec compliant optimizations", "" },
3219     { "genpts", OPT_BOOL | OPT_EXPERT, { &genpts }, "generate pts", "" },
3220     { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, { &decoder_reorder_pts }, "let decoder reorder pts 0=off 1=on -1=auto", ""},
3221     { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, { &lowres }, "", "" },
3222     { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, { &skip_loop_filter }, "", "" },
3223     { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, { &skip_frame }, "", "" },
3224     { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, { &skip_idct }, "", "" },
3225     { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, { &idct }, "set idct algo",  "algo" },
3226     { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, { &error_concealment }, "set error concealment options",  "bit_mask" },
3227     { "sync", HAS_ARG | OPT_EXPERT, { .func_arg = opt_sync }, "set audio-video sync. type (type=audio/video/ext)", "type" },
3228     { "autoexit", OPT_BOOL | OPT_EXPERT, { &autoexit }, "exit at the end", "" },
3229     { "exitonkeydown", OPT_BOOL | OPT_EXPERT, { &exit_on_keydown }, "exit on key down", "" },
3230     { "exitonmousedown", OPT_BOOL | OPT_EXPERT, { &exit_on_mousedown }, "exit on mouse down", "" },
3231     { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, { &loop }, "set number of times the playback shall be looped", "loop count" },
3232     { "framedrop", OPT_BOOL | OPT_EXPERT, { &framedrop }, "drop frames when cpu is too slow", "" },
3233     { "infbuf", OPT_BOOL | OPT_EXPERT, { &infinite_buffer }, "don't limit the input buffer size (useful with realtime streams)", "" },
3234     { "window_title", OPT_STRING | HAS_ARG, { &window_title }, "set window title", "window title" },
3235 #if CONFIG_AVFILTER
3236     { "vf", OPT_STRING | HAS_ARG, { &vfilters }, "video filters", "filter list" },
3237 #endif
3238     { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, { &rdftspeed }, "rdft speed", "msecs" },
3239     { "showmode", HAS_ARG, { .func_arg = opt_show_mode}, "select show mode (0 = video, 1 = waves, 2 = RDFT)", "mode" },
3240     { "default", HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, { .func_arg = opt_default }, "generic catch all option", "" },
3241     { "i", OPT_BOOL, { &dummy}, "read specified file", "input_file"},
3242     { "codec", HAS_ARG, { .func_arg = opt_codec}, "force decoder", "decoder_name" },
3243     { "acodec", HAS_ARG | OPT_STRING | OPT_EXPERT, {    &audio_codec_name }, "force audio decoder",    "decoder_name" },
3244     { "scodec", HAS_ARG | OPT_STRING | OPT_EXPERT, { &subtitle_codec_name }, "force subtitle decoder", "decoder_name" },
3245     { "vcodec", HAS_ARG | OPT_STRING | OPT_EXPERT, {    &video_codec_name }, "force video decoder",    "decoder_name" },
3246     { NULL, },
3247 };
3248
3249 static void show_usage(void)
3250 {
3251     av_log(NULL, AV_LOG_INFO, "Simple media player\n");
3252     av_log(NULL, AV_LOG_INFO, "usage: %s [options] input_file\n", program_name);
3253     av_log(NULL, AV_LOG_INFO, "\n");
3254 }
3255
3256 void show_help_default(const char *opt, const char *arg)
3257 {
3258     av_log_set_callback(log_callback_help);
3259     show_usage();
3260     show_help_options(options, "Main options:", 0, OPT_EXPERT, 0);
3261     show_help_options(options, "Advanced options:", OPT_EXPERT, 0, 0);
3262     printf("\n");
3263     show_help_children(avcodec_get_class(), AV_OPT_FLAG_DECODING_PARAM);
3264     show_help_children(avformat_get_class(), AV_OPT_FLAG_DECODING_PARAM);
3265 #if !CONFIG_AVFILTER
3266     show_help_children(sws_get_class(), AV_OPT_FLAG_ENCODING_PARAM);
3267 #else
3268     show_help_children(avfilter_get_class(), AV_OPT_FLAG_FILTERING_PARAM);
3269 #endif
3270     printf("\nWhile playing:\n"
3271            "q, ESC              quit\n"
3272            "f                   toggle full screen\n"
3273            "p, SPC              pause\n"
3274            "a                   cycle audio channel\n"
3275            "v                   cycle video channel\n"
3276            "t                   cycle subtitle channel\n"
3277            "w                   show audio waves\n"
3278            "s                   activate frame-step mode\n"
3279            "left/right          seek backward/forward 10 seconds\n"
3280            "down/up             seek backward/forward 1 minute\n"
3281            "page down/page up   seek backward/forward 10 minutes\n"
3282            "mouse click         seek to percentage in file corresponding to fraction of width\n"
3283            );
3284 }
3285
3286 static int lockmgr(void **mtx, enum AVLockOp op)
3287 {
3288    switch(op) {
3289       case AV_LOCK_CREATE:
3290           *mtx = SDL_CreateMutex();
3291           if(!*mtx)
3292               return 1;
3293           return 0;
3294       case AV_LOCK_OBTAIN:
3295           return !!SDL_LockMutex(*mtx);
3296       case AV_LOCK_RELEASE:
3297           return !!SDL_UnlockMutex(*mtx);
3298       case AV_LOCK_DESTROY:
3299           SDL_DestroyMutex(*mtx);
3300           return 0;
3301    }
3302    return 1;
3303 }
3304
3305 /* Called from the main */
3306 int main(int argc, char **argv)
3307 {
3308     int flags;
3309     VideoState *is;
3310     char dummy_videodriver[] = "SDL_VIDEODRIVER=dummy";
3311
3312     av_log_set_flags(AV_LOG_SKIP_REPEATED);
3313     parse_loglevel(argc, argv, options);
3314
3315     /* register all codecs, demux and protocols */
3316     avcodec_register_all();
3317 #if CONFIG_AVDEVICE
3318     avdevice_register_all();
3319 #endif
3320 #if CONFIG_AVFILTER
3321     avfilter_register_all();
3322 #endif
3323     av_register_all();
3324     avformat_network_init();
3325
3326     init_opts();
3327
3328     signal(SIGINT , sigterm_handler); /* Interrupt (ANSI).    */
3329     signal(SIGTERM, sigterm_handler); /* Termination (ANSI).  */
3330
3331     show_banner(argc, argv, options);
3332
3333     parse_options(NULL, argc, argv, options, opt_input_file);
3334
3335     if (!input_filename) {
3336         show_usage();
3337         fprintf(stderr, "An input file must be specified\n");
3338         fprintf(stderr, "Use -h to get full help or, even better, run 'man %s'\n", program_name);
3339         exit(1);
3340     }
3341
3342     if (display_disable) {
3343         video_disable = 1;
3344     }
3345     flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
3346     if (audio_disable)
3347         flags &= ~SDL_INIT_AUDIO;
3348     if (display_disable)
3349         SDL_putenv(dummy_videodriver); /* For the event queue, we always need a video driver. */
3350 #if !defined(__MINGW32__) && !defined(__APPLE__)
3351     flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
3352 #endif
3353     if (SDL_Init (flags)) {
3354         fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
3355         fprintf(stderr, "(Did you set the DISPLAY variable?)\n");
3356         exit(1);
3357     }
3358
3359     if (!display_disable) {
3360 #if HAVE_SDL_VIDEO_SIZE
3361         const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3362         fs_screen_width = vi->current_w;
3363         fs_screen_height = vi->current_h;
3364 #endif
3365     }
3366
3367     SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
3368     SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3369     SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3370
3371     if (av_lockmgr_register(lockmgr)) {
3372         fprintf(stderr, "Could not initialize lock manager!\n");
3373         do_exit(NULL);
3374     }
3375
3376     av_init_packet(&flush_pkt);
3377     flush_pkt.data = (char *)(intptr_t)"FLUSH";
3378
3379     is = stream_open(input_filename, file_iformat);
3380     if (!is) {
3381         fprintf(stderr, "Failed to initialize VideoState!\n");
3382         do_exit(NULL);
3383     }
3384
3385     event_loop(is);
3386
3387     /* never returns */
3388
3389     return 0;
3390 }