ffplay.c
01310af2
 /*
  * Copyright (c) 2003 Fabrice Bellard
  *
b78e7197
  * This file is part of FFmpeg.
  *
  * FFmpeg is free software; you can redistribute it and/or
01310af2
  * modify it under the terms of the GNU Lesser General Public
  * License as published by the Free Software Foundation; either
b78e7197
  * version 2.1 of the License, or (at your option) any later version.
01310af2
  *
b78e7197
  * FFmpeg is distributed in the hope that it will be useful,
01310af2
  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  * Lesser General Public License for more details.
  *
  * You should have received a copy of the GNU Lesser General Public
b78e7197
  * License along with FFmpeg; if not, write to the Free Software
5509bffa
  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2
  */
364a9607
 
93613338
 /**
  * @file
  * simple media player based on the FFmpeg libraries
  */
 
ba11257e
 #include "config.h"
8a3ceaf4
 #include <inttypes.h>
0f4e8165
 #include <math.h>
 #include <limits.h>
73f2cf4e
 #include <signal.h>
245976da
 #include "libavutil/avstring.h"
2b4abbd6
 #include "libavutil/colorspace.h"
0ebcdf5c
 #include "libavutil/mathematics.h"
718c7b18
 #include "libavutil/pixdesc.h"
7ffe76e5
 #include "libavutil/imgutils.h"
b6bde8c7
 #include "libavutil/dict.h"
7ffe76e5
 #include "libavutil/parseutils.h"
 #include "libavutil/samplefmt.h"
f6d71b39
 #include "libavutil/avassert.h"
245976da
 #include "libavformat/avformat.h"
 #include "libavdevice/avdevice.h"
 #include "libswscale/swscale.h"
41d0eb1c
 #include "libavutil/opt.h"
166621ab
 #include "libavcodec/avfft.h"
1dd3c473
 #include "libswresample/swresample.h"
01310af2
 
917d2bb3
 #if CONFIG_AVFILTER
566666ca
 # include "libavfilter/avcodec.h"
917d2bb3
 # include "libavfilter/avfilter.h"
 # include "libavfilter/avfiltergraph.h"
4f7dfe12
 # include "libavfilter/buffersink.h"
917d2bb3
 #endif
 
01310af2
 #include <SDL.h>
 #include <SDL_thread.h>
 
25c32d08
 #include "cmdutils.h"
31319a8c
 
d38c9e7a
 #include <unistd.h>
 #include <assert.h>
 
89b503b5
 const char program_name[] = "ffplay";
ea9c581f
 const int program_birth_year = 2003;
4cfac5bc
 
79ee4683
 #define MAX_QUEUE_SIZE (15 * 1024 * 1024)
 #define MIN_FRAMES 5
01310af2
 
638c9d91
 /* SDL audio buffer size, in samples. Should be small to have precise
    A/V sync as SDL does not have hardware buffer fullness info. */
 #define SDL_AUDIO_BUFFER_SIZE 1024
 
 /* no AV sync correction is done if below the AV sync threshold */
7e0140cb
 #define AV_SYNC_THRESHOLD 0.01
638c9d91
 /* no AV correction is done if too big error */
 #define AV_NOSYNC_THRESHOLD 10.0
 
 /* maximum audio speed change to get correct sync */
 #define SAMPLE_CORRECTION_PERCENT_MAX 10
 
 /* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
 #define AUDIO_DIFF_AVG_NB   20
 
01310af2
 /* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
da7c65f0
 #define SAMPLE_ARRAY_SIZE (2 * 65536)
01310af2
 
03ae87a3
 static int sws_flags = SWS_BICUBIC;
 
01310af2
 typedef struct PacketQueue {
     AVPacketList *first_pkt, *last_pkt;
     int nb_packets;
     int size;
     int abort_request;
     SDL_mutex *mutex;
     SDL_cond *cond;
 } PacketQueue;
 
562f382c
 #define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b
 #define SUBPICTURE_QUEUE_SIZE 4
01310af2
 
 typedef struct VideoPicture {
da7c65f0
     double pts;                                  ///< presentation time stamp for this picture
41211483
     double duration;                             ///< expected duration of the frame
da7c65f0
     int64_t pos;                                 ///< byte position in file
4e268aae
     int skip;
01310af2
     SDL_Overlay *bmp;
     int width, height; /* source height & width */
825ec16d
     AVRational sample_aspect_ratio;
01310af2
     int allocated;
8085a5b7
     int reallocate;
917d2bb3
     enum PixelFormat pix_fmt;
 
 #if CONFIG_AVFILTER
ecc8dada
     AVFilterBufferRef *picref;
917d2bb3
 #endif
01310af2
 } VideoPicture;
 
72ce053b
 typedef struct SubPicture {
     double pts; /* presentation time stamp for this picture */
     AVSubtitle sub;
 } SubPicture;
 
01310af2
 enum {
     AV_SYNC_AUDIO_MASTER, /* default choice */
     AV_SYNC_VIDEO_MASTER,
638c9d91
     AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
 };
 
 typedef struct VideoState {
8adf9bb2
     SDL_Thread *read_tid;
01310af2
     SDL_Thread *video_tid;
d38c9e7a
     SDL_Thread *refresh_tid;
638c9d91
     AVInputFormat *iformat;
01310af2
     int no_background;
     int abort_request;
dbe7170e
     int force_refresh;
01310af2
     int paused;
416e3508
     int last_paused;
72ea344b
     int seek_req;
3ba1438d
     int seek_flags;
72ea344b
     int64_t seek_pos;
4ed29207
     int64_t seek_rel;
f5668147
     int read_pause_return;
01310af2
     AVFormatContext *ic;
 
     int audio_stream;
115329f1
 
01310af2
     int av_sync_type;
638c9d91
     double external_clock; /* external clock base */
     int64_t external_clock_time;
115329f1
 
638c9d91
     double audio_clock;
     double audio_diff_cum; /* used for AV difference average computation */
     double audio_diff_avg_coef;
     double audio_diff_threshold;
     int audio_diff_avg_count;
01310af2
     AVStream *audio_st;
     PacketQueue audioq;
     int audio_hw_buf_size;
1dd3c473
     DECLARE_ALIGNED(16,uint8_t,audio_buf2)[AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
e2a2c49f
     uint8_t silence_buf[SDL_AUDIO_BUFFER_SIZE];
5a4476e2
     uint8_t *audio_buf;
f199f385
     uint8_t *audio_buf1;
7fea94ce
     unsigned int audio_buf_size; /* in bytes */
01310af2
     int audio_buf_index; /* in bytes */
10b7b4a6
     int audio_write_buf_size;
bea18375
     AVPacket audio_pkt_temp;
01310af2
     AVPacket audio_pkt;
5d6e4c16
     enum AVSampleFormat audio_src_fmt;
1dd3c473
     enum AVSampleFormat audio_tgt_fmt;
     int audio_src_channels;
     int audio_tgt_channels;
     int64_t audio_src_channel_layout;
     int64_t audio_tgt_channel_layout;
     int audio_src_freq;
     int audio_tgt_freq;
     struct SwrContext *swr_ctx;
10b7b4a6
     double audio_current_pts;
     double audio_current_pts_drift;
d54af906
     int frame_drops_early;
     int frame_drops_late;
f199f385
     AVFrame *frame;
115329f1
 
54ad8e06
     enum ShowMode {
1d6c82d4
         SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB
f8b8c694
     } show_mode;
01310af2
     int16_t sample_array[SAMPLE_ARRAY_SIZE];
     int sample_array_index;
5e0257e3
     int last_i_start;
166621ab
     RDFTContext *rdft;
12eeda34
     int rdft_bits;
7dbbf6a1
     FFTSample *rdft_data;
12eeda34
     int xpos;
115329f1
 
72ce053b
     SDL_Thread *subtitle_tid;
     int subtitle_stream;
     int subtitle_stream_changed;
     AVStream *subtitle_st;
     PacketQueue subtitleq;
     SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
     int subpq_size, subpq_rindex, subpq_windex;
     SDL_mutex *subpq_mutex;
     SDL_cond *subpq_cond;
115329f1
 
638c9d91
     double frame_timer;
     double frame_last_pts;
4e268aae
     double frame_last_duration;
223cba6e
     double frame_last_dropped_pts;
8f17a8ef
     double frame_last_returned_time;
     double frame_last_filter_delay;
223cba6e
     int64_t frame_last_dropped_pos;
da7c65f0
     double video_clock;                          ///< pts of last decoded frame / predicted pts of next decoded frame
01310af2
     int video_stream;
     AVStream *video_st;
     PacketQueue videoq;
da7c65f0
     double video_current_pts;                    ///< current displayed pts (different from video_clock if frame fifos are used)
     double video_current_pts_drift;              ///< video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
     int64_t video_current_pos;                   ///< current displayed file pos
01310af2
     VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
     int pictq_size, pictq_rindex, pictq_windex;
     SDL_mutex *pictq_mutex;
     SDL_cond *pictq_cond;
917d2bb3
 #if !CONFIG_AVFILTER
3ac56e28
     struct SwsContext *img_convert_ctx;
917d2bb3
 #endif
115329f1
 
01310af2
     char filename[1024];
     int width, height, xleft, ytop;
5db1f94b
     int step;
41db429d
 
917d2bb3
 #if CONFIG_AVFILTER
da7c65f0
     AVFilterContext *out_video_filter;          ///< the last filter in the video chain
917d2bb3
 #endif
d38c9e7a
 
     int refresh;
01310af2
 } VideoState;
 
843509e2
 typedef struct AllocEventProps {
     VideoState *is;
     AVFrame *frame;
 } AllocEventProps;
 
a0991833
 static int opt_help(const char *opt, const char *arg);
01310af2
 
 /* options specified by the user */
 static AVInputFormat *file_iformat;
 static const char *input_filename;
076db5ed
 static const char *window_title;
01310af2
 static int fs_screen_width;
 static int fs_screen_height;
da7c65f0
 static int screen_width  = 0;
fccb19e3
 static int screen_height = 0;
01310af2
 static int audio_disable;
 static int video_disable;
da7c65f0
 static int wanted_stream[AVMEDIA_TYPE_NB] = {
     [AVMEDIA_TYPE_AUDIO]    = -1,
     [AVMEDIA_TYPE_VIDEO]    = -1,
     [AVMEDIA_TYPE_SUBTITLE] = -1,
5b369983
 };
da7c65f0
 static int seek_by_bytes = -1;
01310af2
 static int display_disable;
1e1a0b18
 static int show_status = 1;
638c9d91
 static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b
 static int64_t start_time = AV_NOPTS_VALUE;
d834d63b
 static int64_t duration = AV_NOPTS_VALUE;
6387c3e6
 static int workaround_bugs = 1;
6fc5b059
 static int fast = 0;
30bc6613
 static int genpts = 0;
70d54392
 static int lowres = 0;
178fcca8
 static int idct = FF_IDCT_AUTO;
da7c65f0
 static enum AVDiscard skip_frame       = AVDISCARD_DEFAULT;
 static enum AVDiscard skip_idct        = AVDISCARD_DEFAULT;
 static enum AVDiscard skip_loop_filter = AVDISCARD_DEFAULT;
1b51e051
 static int error_concealment = 3;
da7c65f0
 static int decoder_reorder_pts = -1;
2d1653b0
 static int autoexit;
066ce8c9
 static int exit_on_keydown;
 static int exit_on_mousedown;
da7c65f0
 static int loop = 1;
41211483
 static int framedrop = -1;
1d6c82d4
 static enum ShowMode show_mode = SHOW_MODE_NONE;
5eda0967
 static const char *audio_codec_name;
 static const char *subtitle_codec_name;
 static const char *video_codec_name;
da7c65f0
 static int rdftspeed = 20;
917d2bb3
 #if CONFIG_AVFILTER
 static char *vfilters = NULL;
 #endif
01310af2
 
 /* current context */
 static int is_full_screen;
5e0257e3
 static int64_t audio_callback_time;
01310af2
 
2c676c33
 static AVPacket flush_pkt;
39c6a118
 
01310af2
 #define FF_ALLOC_EVENT   (SDL_USEREVENT)
 #define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91
 #define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
01310af2
 
2c676c33
 static SDL_Surface *screen;
01310af2
 
109d23e0
 void av_noreturn exit_program(int ret)
dad09ff9
 {
     exit(ret);
 }
 
eef16966
 static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
 {
     AVPacketList *pkt1;
 
     /* duplicate the packet */
41211483
     if (pkt != &flush_pkt && av_dup_packet(pkt) < 0)
eef16966
         return -1;
 
     pkt1 = av_malloc(sizeof(AVPacketList));
     if (!pkt1)
         return -1;
     pkt1->pkt = *pkt;
     pkt1->next = NULL;
 
 
     SDL_LockMutex(q->mutex);
 
     if (!q->last_pkt)
 
         q->first_pkt = pkt1;
     else
         q->last_pkt->next = pkt1;
     q->last_pkt = pkt1;
     q->nb_packets++;
     q->size += pkt1->pkt.size + sizeof(*pkt1);
     /* XXX: should duplicate packet data in DV case */
     SDL_CondSignal(q->cond);
 
     SDL_UnlockMutex(q->mutex);
     return 0;
 }
515bd00e
 
01310af2
 /* packet queue handling */
 static void packet_queue_init(PacketQueue *q)
 {
     memset(q, 0, sizeof(PacketQueue));
     q->mutex = SDL_CreateMutex();
     q->cond = SDL_CreateCond();
515bd00e
     packet_queue_put(q, &flush_pkt);
01310af2
 }
 
72ea344b
 static void packet_queue_flush(PacketQueue *q)
01310af2
 {
     AVPacketList *pkt, *pkt1;
 
687fae2b
     SDL_LockMutex(q->mutex);
da7c65f0
     for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
01310af2
         pkt1 = pkt->next;
         av_free_packet(&pkt->pkt);
da6c4573
         av_freep(&pkt);
01310af2
     }
72ea344b
     q->last_pkt = NULL;
     q->first_pkt = NULL;
     q->nb_packets = 0;
     q->size = 0;
687fae2b
     SDL_UnlockMutex(q->mutex);
72ea344b
 }
 
 static void packet_queue_end(PacketQueue *q)
 {
     packet_queue_flush(q);
01310af2
     SDL_DestroyMutex(q->mutex);
     SDL_DestroyCond(q->cond);
 }
 
 static void packet_queue_abort(PacketQueue *q)
 {
     SDL_LockMutex(q->mutex);
 
     q->abort_request = 1;
115329f1
 
01310af2
     SDL_CondSignal(q->cond);
 
     SDL_UnlockMutex(q->mutex);
 }
 
 /* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
 static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
 {
     AVPacketList *pkt1;
     int ret;
 
     SDL_LockMutex(q->mutex);
 
da7c65f0
     for (;;) {
01310af2
         if (q->abort_request) {
             ret = -1;
             break;
         }
115329f1
 
01310af2
         pkt1 = q->first_pkt;
         if (pkt1) {
             q->first_pkt = pkt1->next;
             if (!q->first_pkt)
                 q->last_pkt = NULL;
             q->nb_packets--;
7b776589
             q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
             *pkt = pkt1->pkt;
             av_free(pkt1);
             ret = 1;
             break;
         } else if (!block) {
             ret = 0;
             break;
         } else {
             SDL_CondWait(q->cond, q->mutex);
         }
     }
     SDL_UnlockMutex(q->mutex);
     return ret;
 }
 
115329f1
 static inline void fill_rectangle(SDL_Surface *screen,
01310af2
                                   int x, int y, int w, int h, int color)
 {
     SDL_Rect rect;
     rect.x = x;
     rect.y = y;
     rect.w = w;
     rect.h = h;
     SDL_FillRect(screen, &rect, color);
 }
 
72ce053b
 #define ALPHA_BLEND(a, oldp, newp, s)\
 ((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
 
 #define RGBA_IN(r, g, b, a, s)\
 {\
     unsigned int v = ((const uint32_t *)(s))[0];\
     a = (v >> 24) & 0xff;\
     r = (v >> 16) & 0xff;\
     g = (v >> 8) & 0xff;\
     b = v & 0xff;\
 }
 
 #define YUVA_IN(y, u, v, a, s, pal)\
 {\
57cf99f2
     unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
     a = (val >> 24) & 0xff;\
     y = (val >> 16) & 0xff;\
     u = (val >> 8) & 0xff;\
     v = val & 0xff;\
 }
 
 #define YUVA_OUT(d, y, u, v, a)\
 {\
     ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
 }
 
 
 #define BPP 1
 
0a8cd696
 static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
 {
     int wrap, wrap3, width2, skip2;
     int y, u, v, a, u1, v1, a1, w, h;
     uint8_t *lum, *cb, *cr;
     const uint8_t *p;
     const uint32_t *pal;
9cb5a11e
     int dstx, dsty, dstw, dsth;
 
7cf9c6ae
     dstw = av_clip(rect->w, 0, imgw);
     dsth = av_clip(rect->h, 0, imgh);
     dstx = av_clip(rect->x, 0, imgw - dstw);
     dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
     lum = dst->data[0] + dsty * dst->linesize[0];
da7c65f0
     cb  = dst->data[1] + (dsty >> 1) * dst->linesize[1];
     cr  = dst->data[2] + (dsty >> 1) * dst->linesize[2];
9cb5a11e
 
f54b31b9
     width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e
     skip2 = dstx >> 1;
72ce053b
     wrap = dst->linesize[0];
25b4c651
     wrap3 = rect->pict.linesize[0];
     p = rect->pict.data[0];
     pal = (const uint32_t *)rect->pict.data[1];  /* Now in YCrCb! */
115329f1
 
9cb5a11e
     if (dsty & 1) {
         lum += dstx;
72ce053b
         cb += skip2;
         cr += skip2;
115329f1
 
9cb5a11e
         if (dstx & 1) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
             cb++;
             cr++;
             lum++;
             p += BPP;
         }
da7c65f0
         for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += 2 * BPP;
             lum += 2;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
             p++;
             lum++;
72ce053b
         }
4606a059
         p += wrap3 - dstw * BPP;
         lum += wrap - dstw - dstx;
72ce053b
         cb += dst->linesize[1] - width2 - skip2;
         cr += dst->linesize[2] - width2 - skip2;
     }
da7c65f0
     for (h = dsth - (dsty & 1); h >= 2; h -= 2) {
9cb5a11e
         lum += dstx;
72ce053b
         cb += skip2;
         cr += skip2;
115329f1
 
9cb5a11e
         if (dstx & 1) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             p += wrap3;
             lum += wrap;
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += -wrap3 + BPP;
             lum += -wrap + 1;
         }
da7c65f0
         for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
f8ca63e8
             YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
             p += wrap3;
             lum += wrap;
 
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
f8ca63e8
             YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
 
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
 
             cb++;
             cr++;
             p += -wrap3 + 2 * BPP;
             lum += -wrap + 2;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             p += wrap3;
             lum += wrap;
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += -wrap3 + BPP;
             lum += -wrap + 1;
         }
9cb5a11e
         p += wrap3 + (wrap3 - dstw * BPP);
         lum += wrap + (wrap - dstw - dstx);
72ce053b
         cb += dst->linesize[1] - width2 - skip2;
         cr += dst->linesize[2] - width2 - skip2;
     }
     /* handle odd height */
     if (h) {
9cb5a11e
         lum += dstx;
72ce053b
         cb += skip2;
         cr += skip2;
115329f1
 
9cb5a11e
         if (dstx & 1) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
             cb++;
             cr++;
             lum++;
             p += BPP;
         }
da7c65f0
         for (w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
             cb++;
             cr++;
             p += 2 * BPP;
             lum += 2;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
         }
     }
 }
 
 static void free_subpicture(SubPicture *sp)
 {
e1d7c883
     avsubtitle_free(&sp->sub);
72ce053b
 }
 
01310af2
 static void video_image_display(VideoState *is)
 {
     VideoPicture *vp;
72ce053b
     SubPicture *sp;
     AVPicture pict;
01310af2
     float aspect_ratio;
     int width, height, x, y;
     SDL_Rect rect;
72ce053b
     int i;
01310af2
 
     vp = &is->pictq[is->pictq_rindex];
     if (vp->bmp) {
825ec16d
         if (vp->sample_aspect_ratio.num == 0)
c30a4489
             aspect_ratio = 0;
825ec16d
         else
             aspect_ratio = av_q2d(vp->sample_aspect_ratio);
 
01310af2
         if (aspect_ratio <= 0.0)
c30a4489
             aspect_ratio = 1.0;
917d2bb3
         aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
 
df149f6c
         if (is->subtitle_st) {
             if (is->subpq_size > 0) {
72ce053b
                 sp = &is->subpq[is->subpq_rindex];
 
df149f6c
                 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
72ce053b
                     SDL_LockYUVOverlay (vp->bmp);
 
                     pict.data[0] = vp->bmp->pixels[0];
                     pict.data[1] = vp->bmp->pixels[2];
                     pict.data[2] = vp->bmp->pixels[1];
 
                     pict.linesize[0] = vp->bmp->pitches[0];
                     pict.linesize[1] = vp->bmp->pitches[2];
                     pict.linesize[2] = vp->bmp->pitches[1];
 
                     for (i = 0; i < sp->sub.num_rects; i++)
db4fac64
                         blend_subrect(&pict, sp->sub.rects[i],
0a8cd696
                                       vp->bmp->w, vp->bmp->h);
72ce053b
 
                     SDL_UnlockYUVOverlay (vp->bmp);
                 }
             }
         }
 
 
01310af2
         /* XXX: we suppose the screen has a 1.0 pixel ratio */
         height = is->height;
bb6c34e5
         width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
         if (width > is->width) {
             width = is->width;
bb6c34e5
             height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
         }
         x = (is->width - width) / 2;
         y = (is->height - height) / 2;
6c6c976f
         is->no_background = 0;
01310af2
         rect.x = is->xleft + x;
2f6547fb
         rect.y = is->ytop  + y;
69f58958
         rect.w = FFMAX(width,  1);
         rect.h = FFMAX(height, 1);
01310af2
         SDL_DisplayYUVOverlay(vp->bmp, &rect);
     }
 }
 
 static inline int compute_mod(int a, int b)
 {
91b27e49
     return a < 0 ? a%b + b : a%b;
01310af2
 }
 
 static void video_audio_display(VideoState *s)
 {
     int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
     int ch, channels, h, h2, bgcolor, fgcolor;
     int16_t time_diff;
4c7c7645
     int rdft_bits, nb_freq;
 
da7c65f0
     for (rdft_bits = 1; (1 << rdft_bits) < 2 * s->height; rdft_bits++)
4c7c7645
         ;
da7c65f0
     nb_freq = 1 << (rdft_bits - 1);
115329f1
 
01310af2
     /* compute display index : center on currently output samples */
1dd3c473
     channels = s->audio_tgt_channels;
01310af2
     nb_display_channels = channels;
5e0257e3
     if (!s->paused) {
f8b8c694
         int data_used= s->show_mode == SHOW_MODE_WAVES ? s->width : (2*nb_freq);
5e0257e3
         n = 2 * channels;
10b7b4a6
         delay = s->audio_write_buf_size;
5e0257e3
         delay /= n;
115329f1
 
5e0257e3
         /* to be more precise, we take into account the time spent since
            the last buffer computation */
         if (audio_callback_time) {
             time_diff = av_gettime() - audio_callback_time;
1dd3c473
             delay -= (time_diff * s->audio_tgt_freq) / 1000000;
5e0257e3
         }
115329f1
 
da7c65f0
         delay += 2 * data_used;
4c7c7645
         if (delay < data_used)
             delay = data_used;
ac50bcc8
 
         i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
f8b8c694
         if (s->show_mode == SHOW_MODE_WAVES) {
da7c65f0
             h = INT_MIN;
             for (i = 0; i < 1000; i += channels) {
                 int idx = (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
                 int a = s->sample_array[idx];
                 int b = s->sample_array[(idx + 4 * channels) % SAMPLE_ARRAY_SIZE];
                 int c = s->sample_array[(idx + 5 * channels) % SAMPLE_ARRAY_SIZE];
                 int d = s->sample_array[(idx + 9 * channels) % SAMPLE_ARRAY_SIZE];
                 int score = a - d;
                 if (h < score && (b ^ c) < 0) {
                     h = score;
                     i_start = idx;
6c7165c7
                 }
ac50bcc8
             }
         }
 
5e0257e3
         s->last_i_start = i_start;
     } else {
         i_start = s->last_i_start;
01310af2
     }
 
     bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
f8b8c694
     if (s->show_mode == SHOW_MODE_WAVES) {
6c7165c7
         fill_rectangle(screen,
                        s->xleft, s->ytop, s->width, s->height,
                        bgcolor);
 
         fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
 
         /* total height for one channel */
         h = s->height / nb_display_channels;
         /* graph height / 2 */
         h2 = (h * 9) / 20;
da7c65f0
         for (ch = 0; ch < nb_display_channels; ch++) {
6c7165c7
             i = i_start + ch;
             y1 = s->ytop + ch * h + (h / 2); /* position of center line */
da7c65f0
             for (x = 0; x < s->width; x++) {
6c7165c7
                 y = (s->sample_array[i] * h2) >> 15;
                 if (y < 0) {
                     y = -y;
                     ys = y1 - y;
                 } else {
                     ys = y1;
                 }
                 fill_rectangle(screen,
                                s->xleft + x, ys, 1, y,
                                fgcolor);
                 i += channels;
                 if (i >= SAMPLE_ARRAY_SIZE)
                     i -= SAMPLE_ARRAY_SIZE;
01310af2
             }
         }
 
6c7165c7
         fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2
 
da7c65f0
         for (ch = 1; ch < nb_display_channels; ch++) {
6c7165c7
             y = s->ytop + ch * h;
             fill_rectangle(screen,
                            s->xleft, y, s->width, 1,
                            fgcolor);
         }
         SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
da7c65f0
     } else {
12eeda34
         nb_display_channels= FFMIN(nb_display_channels, 2);
da7c65f0
         if (rdft_bits != s->rdft_bits) {
166621ab
             av_rdft_end(s->rdft);
7dbbf6a1
             av_free(s->rdft_data);
166621ab
             s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
da7c65f0
             s->rdft_bits = rdft_bits;
             s->rdft_data = av_malloc(4 * nb_freq * sizeof(*s->rdft_data));
12eeda34
         }
         {
7dbbf6a1
             FFTSample *data[2];
da7c65f0
             for (ch = 0; ch < nb_display_channels; ch++) {
                 data[ch] = s->rdft_data + 2 * nb_freq * ch;
12eeda34
                 i = i_start + ch;
da7c65f0
                 for (x = 0; x < 2 * nb_freq; x++) {
                     double w = (x-nb_freq) * (1.0 / nb_freq);
                     data[ch][x] = s->sample_array[i] * (1.0 - w * w);
12eeda34
                     i += channels;
                     if (i >= SAMPLE_ARRAY_SIZE)
                         i -= SAMPLE_ARRAY_SIZE;
                 }
166621ab
                 av_rdft_calc(s->rdft, data[ch]);
12eeda34
             }
da7c65f0
             // least efficient way to do this, we should of course directly access it but its more than fast enough
             for (y = 0; y < s->height; y++) {
                 double w = 1 / sqrt(nb_freq);
                 int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1]));
                 int b = (nb_display_channels == 2 ) ? sqrt(w * sqrt(data[1][2 * y + 0] * data[1][2 * y + 0]
                        + data[1][2 * y + 1] * data[1][2 * y + 1])) : a;
                 a = FFMIN(a, 255);
                 b = FFMIN(b, 255);
                 fgcolor = SDL_MapRGB(screen->format, a, b, (a + b) / 2);
12eeda34
 
                 fill_rectangle(screen,
                             s->xpos, s->height-y, 1, 1,
                             fgcolor);
             }
         }
         SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
e6093e36
         if (!s->paused)
             s->xpos++;
da7c65f0
         if (s->xpos >= s->width)
12eeda34
             s->xpos= s->xleft;
     }
01310af2
 }
 
d5708923
 static void stream_close(VideoState *is)
 {
     VideoPicture *vp;
     int i;
     /* XXX: use a special url_shutdown call to abort parse cleanly */
     is->abort_request = 1;
     SDL_WaitThread(is->read_tid, NULL);
     SDL_WaitThread(is->refresh_tid, NULL);
 
     /* free all pictures */
41211483
     for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
d5708923
         vp = &is->pictq[i];
 #if CONFIG_AVFILTER
         if (vp->picref) {
             avfilter_unref_buffer(vp->picref);
             vp->picref = NULL;
         }
 #endif
         if (vp->bmp) {
             SDL_FreeYUVOverlay(vp->bmp);
             vp->bmp = NULL;
         }
     }
     SDL_DestroyMutex(is->pictq_mutex);
     SDL_DestroyCond(is->pictq_cond);
     SDL_DestroyMutex(is->subpq_mutex);
     SDL_DestroyCond(is->subpq_cond);
 #if !CONFIG_AVFILTER
     if (is->img_convert_ctx)
         sws_freeContext(is->img_convert_ctx);
 #endif
     av_free(is);
 }
 
84506ebd
 static void do_exit(VideoState *is)
d5708923
 {
84506ebd
     if (is) {
         stream_close(is);
d5708923
     }
ee0ff051
     av_lockmgr_register(NULL);
d5708923
     uninit_opts();
 #if CONFIG_AVFILTER
     avfilter_uninit();
 #endif
13b7781e
     avformat_network_deinit();
d5708923
     if (show_status)
         printf("\n");
     SDL_Quit();
4a34e54b
     av_log(NULL, AV_LOG_QUIET, "%s", "");
d5708923
     exit(0);
 }
 
73f2cf4e
 static void sigterm_handler(int sig)
 {
     exit(123);
 }
 
41211483
 static int video_open(VideoState *is, int force_set_video_mode)
da7c65f0
 {
     int flags = SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL;
990c8438
     int w,h;
843509e2
     VideoPicture *vp = &is->pictq[is->pictq_rindex];
990c8438
 
da7c65f0
     if (is_full_screen) flags |= SDL_FULLSCREEN;
     else                flags |= SDL_RESIZABLE;
fb84155b
 
990c8438
     if (is_full_screen && fs_screen_width) {
         w = fs_screen_width;
         h = fs_screen_height;
da7c65f0
     } else if (!is_full_screen && screen_width) {
fb84155b
         w = screen_width;
         h = screen_height;
843509e2
     } else if (vp->width) {
         w = vp->width;
         h = vp->height;
990c8438
     } else {
fb84155b
         w = 640;
         h = 480;
990c8438
     }
da7c65f0
     if (screen && is->width == screen->w && screen->w == w
9fb2b412
        && is->height== screen->h && screen->h == h && !force_set_video_mode)
d3d7b12e
         return 0;
990c8438
     screen = SDL_SetVideoMode(w, h, 0, flags);
     if (!screen) {
         fprintf(stderr, "SDL: could not set video mode - exiting\n");
84506ebd
         do_exit(is);
990c8438
     }
076db5ed
     if (!window_title)
         window_title = input_filename;
     SDL_WM_SetCaption(window_title, window_title);
990c8438
 
da7c65f0
     is->width  = screen->w;
990c8438
     is->height = screen->h;
 
     return 0;
 }
8c982c5d
 
01310af2
 /* display the current picture, if any */
 static void video_display(VideoState *is)
 {
da7c65f0
     if (!screen)
9fb2b412
         video_open(is, 0);
f8b8c694
     if (is->audio_st && is->show_mode != SHOW_MODE_VIDEO)
01310af2
         video_audio_display(is);
     else if (is->video_st)
         video_image_display(is);
 }
 
d38c9e7a
 static int refresh_thread(void *opaque)
01310af2
 {
d38c9e7a
     VideoState *is= opaque;
da7c65f0
     while (!is->abort_request) {
d881a0e8
         SDL_Event event;
         event.type = FF_REFRESH_EVENT;
         event.user.data1 = opaque;
dbe7170e
         if (!is->refresh && (!is->paused || is->force_refresh)) {
da7c65f0
             is->refresh = 1;
d881a0e8
             SDL_PushEvent(&event);
d38c9e7a
         }
f8b8c694
         //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
         usleep(is->audio_st && is->show_mode != SHOW_MODE_VIDEO ? rdftspeed*1000 : 5000);
d38c9e7a
     }
     return 0;
01310af2
 }
 
638c9d91
 /* get the current audio clock value */
 static double get_audio_clock(VideoState *is)
 {
10b7b4a6
     if (is->paused) {
         return is->audio_current_pts;
     } else {
         return is->audio_current_pts_drift + av_gettime() / 1000000.0;
638c9d91
     }
 }
 
 /* get the current video clock value */
 static double get_video_clock(VideoState *is)
 {
04108619
     if (is->paused) {
41a4cd0c
         return is->video_current_pts;
72ea344b
     } else {
68aefbe8
         return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b
     }
638c9d91
 }
 
 /* get the current external clock value */
 static double get_external_clock(VideoState *is)
 {
     int64_t ti;
     ti = av_gettime();
     return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
 }
 
 /* get the current master clock value */
 static double get_master_clock(VideoState *is)
 {
     double val;
 
72ea344b
     if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
         if (is->video_st)
             val = get_video_clock(is);
         else
             val = get_audio_clock(is);
     } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
         if (is->audio_st)
             val = get_audio_clock(is);
         else
             val = get_video_clock(is);
     } else {
638c9d91
         val = get_external_clock(is);
72ea344b
     }
638c9d91
     return val;
 }
 
72ea344b
 /* seek in the stream */
2ef46053
 static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b
 {
687fae2b
     if (!is->seek_req) {
         is->seek_pos = pos;
4ed29207
         is->seek_rel = rel;
3890dd3a
         is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
         if (seek_by_bytes)
             is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
         is->seek_req = 1;
     }
72ea344b
 }
 
 /* pause or resume the video */
ab7fdbab
 static void stream_toggle_pause(VideoState *is)
72ea344b
 {
68aefbe8
     if (is->paused) {
         is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
da7c65f0
         if (is->read_pause_return != AVERROR(ENOSYS)) {
68aefbe8
             is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147
         }
68aefbe8
         is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b
     }
68aefbe8
     is->paused = !is->paused;
72ea344b
 }
 
4e268aae
 static double compute_target_delay(double delay, VideoState *is)
49410784
 {
4e268aae
     double sync_threshold, diff;
49410784
 
     /* update delay to follow master synchronisation source */
     if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
          is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
         /* if video is slave, we try to correct big delays by
            duplicating or deleting a frame */
f04c6e35
         diff = get_video_clock(is) - get_master_clock(is);
49410784
 
         /* skip or repeat frame. We take into account the
            delay to compute the threshold. I still don't know
            if it is the best guess */
         sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
         if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
             if (diff <= -sync_threshold)
                 delay = 0;
             else if (diff >= sync_threshold)
                 delay = 2 * delay;
         }
     }
8543f0f9
 
4e268aae
     av_dlog(NULL, "video: delay=%0.3f A-V=%f\n",
             delay, -diff);
 
     return delay;
 }
 
 static void pictq_next_picture(VideoState *is) {
     /* update queue size and signal for next picture */
     if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
         is->pictq_rindex = 0;
eecc17a7
 
4e268aae
     SDL_LockMutex(is->pictq_mutex);
     is->pictq_size--;
     SDL_CondSignal(is->pictq_cond);
     SDL_UnlockMutex(is->pictq_mutex);
49410784
 }
 
223cba6e
 static void update_video_pts(VideoState *is, double pts, int64_t pos) {
     double time = av_gettime() / 1000000.0;
     /* update current video pts */
     is->video_current_pts = pts;
     is->video_current_pts_drift = is->video_current_pts - time;
     is->video_current_pos = pos;
     is->frame_last_pts = pts;
 }
 
01310af2
 /* called to display each frame */
4a22ea4d
 static void video_refresh(void *opaque)
01310af2
 {
     VideoState *is = opaque;
     VideoPicture *vp;
223cba6e
     double time;
638c9d91
 
72ce053b
     SubPicture *sp, *sp2;
01310af2
 
     if (is->video_st) {
d38c9e7a
 retry:
01310af2
         if (is->pictq_size == 0) {
223cba6e
             SDL_LockMutex(is->pictq_mutex);
             if (is->frame_last_dropped_pts != AV_NOPTS_VALUE && is->frame_last_dropped_pts > is->frame_last_pts) {
                 update_video_pts(is, is->frame_last_dropped_pts, is->frame_last_dropped_pos);
                 is->frame_last_dropped_pts = AV_NOPTS_VALUE;
             }
             SDL_UnlockMutex(is->pictq_mutex);
da7c65f0
             // nothing to do, no picture to display in the que
01310af2
         } else {
4e268aae
             double last_duration, duration, delay;
638c9d91
             /* dequeue the picture */
01310af2
             vp = &is->pictq[is->pictq_rindex];
638c9d91
 
4e268aae
             if (vp->skip) {
                 pictq_next_picture(is);
                 goto retry;
             }
 
dbe7170e
             if (is->paused)
                 goto display;
 
4e268aae
             /* compute nominal last_duration */
             last_duration = vp->pts - is->frame_last_pts;
             if (last_duration > 0 && last_duration < 10.0) {
                 /* if duration of the last frame was sane, update last_duration in video state */
                 is->frame_last_duration = last_duration;
             }
             delay = compute_target_delay(is->frame_last_duration, is);
 
223cba6e
             time= av_gettime()/1000000.0;
41211483
             if (time < is->frame_timer + delay)
d38c9e7a
                 return;
4e268aae
 
abb0e4f6
             if (delay > 0)
                 is->frame_timer += delay * FFMAX(1, floor((time-is->frame_timer) / delay));
4e268aae
 
223cba6e
             SDL_LockMutex(is->pictq_mutex);
             update_video_pts(is, vp->pts, vp->pos);
             SDL_UnlockMutex(is->pictq_mutex);
4e268aae
 
da7c65f0
             if (is->pictq_size > 1) {
                 VideoPicture *nextvp = &is->pictq[(is->pictq_rindex + 1) % VIDEO_PICTURE_QUEUE_SIZE];
41211483
                 duration = nextvp->pts - vp->pts; // More accurate this way, 1/time_base is often not reflecting FPS
4e268aae
             } else {
41211483
                 duration = vp->duration;
d38c9e7a
             }
4e268aae
 
             if((framedrop>0 || (framedrop && is->audio_st)) && time > is->frame_timer + duration){
a30ef633
                 if(is->pictq_size > 1){
d54af906
                     is->frame_drops_late++;
4e268aae
                     pictq_next_picture(is);
d38c9e7a
                     goto retry;
                 }
             }
638c9d91
 
da7c65f0
             if (is->subtitle_st) {
72ce053b
                 if (is->subtitle_stream_changed) {
                     SDL_LockMutex(is->subpq_mutex);
115329f1
 
72ce053b
                     while (is->subpq_size) {
                         free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1
 
72ce053b
                         /* update queue size and signal for next picture */
                         if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
                             is->subpq_rindex = 0;
115329f1
 
72ce053b
                         is->subpq_size--;
                     }
                     is->subtitle_stream_changed = 0;
 
                     SDL_CondSignal(is->subpq_cond);
                     SDL_UnlockMutex(is->subpq_mutex);
                 } else {
                     if (is->subpq_size > 0) {
                         sp = &is->subpq[is->subpq_rindex];
 
                         if (is->subpq_size > 1)
                             sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
                         else
                             sp2 = NULL;
 
                         if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
                                 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
                         {
                             free_subpicture(sp);
 
                             /* update queue size and signal for next picture */
                             if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
                                 is->subpq_rindex = 0;
 
                             SDL_LockMutex(is->subpq_mutex);
                             is->subpq_size--;
                             SDL_CondSignal(is->subpq_cond);
                             SDL_UnlockMutex(is->subpq_mutex);
                         }
                     }
                 }
             }
 
dbe7170e
 display:
01310af2
             /* display picture */
112c4b87
             if (!display_disable)
24d13ebc
                 video_display(is);
115329f1
 
dbe7170e
             if (!is->paused)
                 pictq_next_picture(is);
01310af2
         }
     } else if (is->audio_st) {
         /* draw the next audio frame */
 
         /* if only audio stream, then display the audio bars (better
            than nothing, just to test the implementation */
115329f1
 
01310af2
         /* display picture */
112c4b87
         if (!display_disable)
24d13ebc
             video_display(is);
01310af2
     }
dbe7170e
     is->force_refresh = 0;
01310af2
     if (show_status) {
         static int64_t last_time;
         int64_t cur_time;
72ce053b
         int aqsize, vqsize, sqsize;
638c9d91
         double av_diff;
115329f1
 
01310af2
         cur_time = av_gettime();
1e1a0b18
         if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
             aqsize = 0;
             vqsize = 0;
72ce053b
             sqsize = 0;
01310af2
             if (is->audio_st)
                 aqsize = is->audioq.size;
             if (is->video_st)
                 vqsize = is->videoq.size;
72ce053b
             if (is->subtitle_st)
                 sqsize = is->subtitleq.size;
638c9d91
             av_diff = 0;
             if (is->audio_st && is->video_st)
                 av_diff = get_audio_clock(is) - get_video_clock(is);
d54af906
             printf("%7.2f A-V:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64"   \r",
d6705a27
                    get_master_clock(is),
                    av_diff,
d54af906
                    is->frame_drops_early + is->frame_drops_late,
d6705a27
                    aqsize / 1024,
                    vqsize / 1024,
                    sqsize,
                    is->video_st ? is->video_st->codec->pts_correction_num_faulty_dts : 0,
                    is->video_st ? is->video_st->codec->pts_correction_num_faulty_pts : 0);
01310af2
             fflush(stdout);
             last_time = cur_time;
         }
     }
 }
 
 /* allocate a picture (needs to do that in main thread to avoid
    potential locking problems */
843509e2
 static void alloc_picture(AllocEventProps *event_props)
01310af2
 {
843509e2
     VideoState *is = event_props->is;
     AVFrame *frame = event_props->frame;
01310af2
     VideoPicture *vp;
 
     vp = &is->pictq[is->pictq_windex];
 
     if (vp->bmp)
         SDL_FreeYUVOverlay(vp->bmp);
 
917d2bb3
 #if CONFIG_AVFILTER
     if (vp->picref)
7fce481a
         avfilter_unref_buffer(vp->picref);
917d2bb3
     vp->picref = NULL;
 #endif
 
843509e2
     vp->width   = frame->width;
     vp->height  = frame->height;
     vp->pix_fmt = frame->format;
 
     video_open(event_props->is, 0);
 
917d2bb3
     vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1
                                    SDL_YV12_OVERLAY,
61890b02
                                    screen);
cb036f90
     if (!vp->bmp || vp->bmp->pitches[0] < vp->width) {
         /* SDL allocates a buffer smaller than requested if the video
          * overlay hardware is unable to support the requested size. */
         fprintf(stderr, "Error: the video system does not support an image\n"
70d54392
                         "size of %dx%d pixels. Try using -lowres or -vf \"scale=w:h\"\n"
cb036f90
                         "to reduce the image size.\n", vp->width, vp->height );
84506ebd
         do_exit(is);
cb036f90
     }
01310af2
 
     SDL_LockMutex(is->pictq_mutex);
     vp->allocated = 1;
     SDL_CondSignal(is->pictq_cond);
     SDL_UnlockMutex(is->pictq_mutex);
 }
 
c2606259
 static int queue_picture(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
01310af2
 {
     VideoPicture *vp;
c2606259
     double frame_delay, pts = pts1;
 
     /* compute the exact PTS for the picture if it is omitted in the stream
      * pts1 is the dts of the pkt / pts of the frame */
     if (pts != 0) {
         /* update video clock with pts, if present */
         is->video_clock = pts;
     } else {
         pts = is->video_clock;
     }
     /* update video clock for next frame */
     frame_delay = av_q2d(is->video_st->codec->time_base);
     /* for MPEG2, the frame can be repeated, so we update the
        clock accordingly */
     frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
     is->video_clock += frame_delay;
 
 #if defined(DEBUG_SYNC) && 0
     printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
301183d9
            av_get_picture_type_char(src_frame->pict_type), pts, pts1);
c2606259
 #endif
a6f395d6
 
01310af2
     /* wait until we have space to put a new picture */
     SDL_LockMutex(is->pictq_mutex);
d38c9e7a
 
01310af2
     while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
            !is->videoq.abort_request) {
         SDL_CondWait(is->pictq_cond, is->pictq_mutex);
     }
     SDL_UnlockMutex(is->pictq_mutex);
115329f1
 
01310af2
     if (is->videoq.abort_request)
         return -1;
 
     vp = &is->pictq[is->pictq_windex];
 
c369ddb7
     vp->duration = frame_delay;
 
01310af2
     /* alloc or resize hardware picture buffer */
8085a5b7
     if (!vp->bmp || vp->reallocate ||
843509e2
         vp->width  != src_frame->width ||
         vp->height != src_frame->height) {
01310af2
         SDL_Event event;
843509e2
         AllocEventProps event_props;
 
         event_props.frame = src_frame;
         event_props.is = is;
01310af2
 
fec5777e
         vp->allocated  = 0;
8085a5b7
         vp->reallocate = 0;
01310af2
 
         /* the allocation must be done in the main thread to avoid
843509e2
            locking problems. We wait in this block for the event to complete,
            so we can pass a pointer to event_props to it. */
01310af2
         event.type = FF_ALLOC_EVENT;
843509e2
         event.user.data1 = &event_props;
01310af2
         SDL_PushEvent(&event);
115329f1
 
01310af2
         /* wait until the picture is allocated */
         SDL_LockMutex(is->pictq_mutex);
         while (!vp->allocated && !is->videoq.abort_request) {
             SDL_CondWait(is->pictq_cond, is->pictq_mutex);
         }
0a1cf662
         /* if the queue is aborted, we have to pop the pending ALLOC event or wait for the allocation to complete */
         if (is->videoq.abort_request && SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_EVENTMASK(FF_ALLOC_EVENT)) != 1) {
             while (!vp->allocated) {
                 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
             }
         }
01310af2
         SDL_UnlockMutex(is->pictq_mutex);
 
         if (is->videoq.abort_request)
             return -1;
     }
 
638c9d91
     /* if the frame is not skipped, then display it */
01310af2
     if (vp->bmp) {
a92be9b8
         AVPicture pict = { { 0 } };
917d2bb3
 #if CONFIG_AVFILTER
da7c65f0
         if (vp->picref)
7fce481a
             avfilter_unref_buffer(vp->picref);
917d2bb3
         vp->picref = src_frame->opaque;
 #endif
fbf1b885
 
01310af2
         /* get a pointer on the bitmap */
         SDL_LockYUVOverlay (vp->bmp);
 
         pict.data[0] = vp->bmp->pixels[0];
         pict.data[1] = vp->bmp->pixels[2];
         pict.data[2] = vp->bmp->pixels[1];
 
         pict.linesize[0] = vp->bmp->pitches[0];
         pict.linesize[1] = vp->bmp->pitches[2];
         pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
 
 #if CONFIG_AVFILTER
da7c65f0
         // FIXME use direct rendering
a6f395d6
         av_picture_copy(&pict, (AVPicture *)src_frame,
917d2bb3
                         vp->pix_fmt, vp->width, vp->height);
825ec16d
         vp->sample_aspect_ratio = vp->picref->video->sample_aspect_ratio;
917d2bb3
 #else
e43d7a18
         sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28
         is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3
             vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
05d33d86
             PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
3ac56e28
         if (is->img_convert_ctx == NULL) {
26ba8235
             fprintf(stderr, "Cannot initialize the conversion context\n");
             exit(1);
         }
3ac56e28
         sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
                   0, vp->height, pict.data, pict.linesize);
825ec16d
         vp->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, src_frame);
917d2bb3
 #endif
01310af2
         /* update the bitmap content */
         SDL_UnlockYUVOverlay(vp->bmp);
 
638c9d91
         vp->pts = pts;
1a620dd7
         vp->pos = pos;
4e268aae
         vp->skip = 0;
01310af2
 
         /* now we can update the picture count */
         if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
             is->pictq_windex = 0;
         SDL_LockMutex(is->pictq_mutex);
         is->pictq_size++;
         SDL_UnlockMutex(is->pictq_mutex);
     }
638c9d91
     return 0;
 }
 
3966a574
 static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2
 {
98704131
     int got_picture, i;
01310af2
 
199c18a7
     if (packet_queue_get(&is->videoq, pkt, 1) < 0)
         return -1;
6c7d3ead
 
199c18a7
     if (pkt->data == flush_pkt.data) {
         avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
 
199c18a7
         SDL_LockMutex(is->pictq_mutex);
da7c65f0
         // Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
199c18a7
         for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
4e268aae
             is->pictq[i].skip = 1;
199c18a7
         }
         while (is->pictq_size && !is->videoq.abort_request) {
             SDL_CondWait(is->pictq_cond, is->pictq_mutex);
39c6a118
         }
199c18a7
         is->video_current_pos = -1;
         is->frame_last_pts = AV_NOPTS_VALUE;
4e268aae
         is->frame_last_duration = 0;
199c18a7
         is->frame_timer = (double)av_gettime() / 1000000.0;
223cba6e
         is->frame_last_dropped_pts = AV_NOPTS_VALUE;
         SDL_UnlockMutex(is->pictq_mutex);
 
199c18a7
         return 0;
     }
7a8bfa5d
 
98704131
     avcodec_decode_video2(is->video_st->codec, frame, &got_picture, pkt);
199c18a7
 
     if (got_picture) {
223cba6e
         int ret = 1;
 
199c18a7
         if (decoder_reorder_pts == -1) {
234e0025
             *pts = av_frame_get_best_effort_timestamp(frame);
199c18a7
         } else if (decoder_reorder_pts) {
2fa1d7b3
             *pts = frame->pkt_pts;
199c18a7
         } else {
fd0ae17a
             *pts = frame->pkt_dts;
199c18a7
         }
 
         if (*pts == AV_NOPTS_VALUE) {
             *pts = 0;
99e0b12b
         }
41db429d
 
223cba6e
         if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) || is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK) &&
              (framedrop>0 || (framedrop && is->audio_st))) {
             SDL_LockMutex(is->pictq_mutex);
             if (is->frame_last_pts != AV_NOPTS_VALUE && *pts) {
                 double clockdiff = get_video_clock(is) - get_master_clock(is);
                 double dpts = av_q2d(is->video_st->time_base) * *pts;
                 double ptsdiff = dpts - is->frame_last_pts;
                 if (fabs(clockdiff) < AV_NOSYNC_THRESHOLD &&
                      ptsdiff > 0 && ptsdiff < AV_NOSYNC_THRESHOLD &&
8f17a8ef
                      clockdiff + ptsdiff - is->frame_last_filter_delay < 0) {
223cba6e
                     is->frame_last_dropped_pos = pkt->pos;
                     is->frame_last_dropped_pts = dpts;
d54af906
                     is->frame_drops_early++;
223cba6e
                     ret = 0;
                 }
             }
             SDL_UnlockMutex(is->pictq_mutex);
         }
d38c9e7a
 
8f17a8ef
         if (ret)
             is->frame_last_returned_time = av_gettime() / 1000000.0;
 
223cba6e
         return ret;
d38c9e7a
     }
917d2bb3
     return 0;
 }
 
 #if CONFIG_AVFILTER
 typedef struct {
     VideoState *is;
     AVFrame *frame;
dd0c789b
     int use_dr1;
917d2bb3
 } FilterPriv;
 
dd0c789b
 static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
 {
     AVFilterContext *ctx = codec->opaque;
ecc8dada
     AVFilterBufferRef  *ref;
dd0c789b
     int perms = AV_PERM_WRITE;
e37f161e
     int i, w, h, stride[AV_NUM_DATA_POINTERS];
dd0c789b
     unsigned edge;
dc172ecc
     int pixel_size;
dd0c789b
 
f6d71b39
     av_assert0(codec->flags & CODEC_FLAG_EMU_EDGE);
 
0ccabeea
     if (codec->codec->capabilities & CODEC_CAP_NEG_LINESIZES)
         perms |= AV_PERM_NEG_LINESIZES;
 
da7c65f0
     if (pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
         if (pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
         if (pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
         if (pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
dd0c789b
     }
da7c65f0
     if (pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
dd0c789b
 
     w = codec->width;
     h = codec->height;
9f8008a9
 
c2500635
     if(av_image_check_size(w, h, 0, codec) || codec->pix_fmt<0)
9f8008a9
         return -1;
 
dd0c789b
     avcodec_align_dimensions2(codec, &w, &h, stride);
     edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
     w += edge << 1;
     h += edge << 1;
2c28e269
     if (codec->pix_fmt != ctx->outputs[0]->format) {
         av_log(codec, AV_LOG_ERROR, "Pixel format mismatches %d %d\n", codec->pix_fmt, ctx->outputs[0]->format);
         return -1;
     }
da7c65f0
     if (!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
dd0c789b
         return -1;
 
da7c65f0
     pixel_size = av_pix_fmt_descriptors[ref->format].comp[0].step_minus1 + 1;
cc80caff
     ref->video->w = codec->width;
     ref->video->h = codec->height;
da7c65f0
     for (i = 0; i < 4; i ++) {
d54e0948
         unsigned hshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_w : 0;
         unsigned vshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_h : 0;
dd0c789b
 
afe4d3bd
         pic->base[i]     = ref->data[i];
3635c07b
         if (ref->data[i]) {
dc172ecc
             ref->data[i]    += ((edge * pixel_size) >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b
         }
dd0c789b
         pic->data[i]     = ref->data[i];
         pic->linesize[i] = ref->linesize[i];
     }
     pic->opaque = ref;
     pic->type   = FF_BUFFER_TYPE_USER;
867ab7fb
     pic->reordered_opaque = codec->reordered_opaque;
6943fb47
     pic->width               = codec->width;
     pic->height              = codec->height;
     pic->format              = codec->pix_fmt;
     pic->sample_aspect_ratio = codec->sample_aspect_ratio;
da7c65f0
     if (codec->pkt) pic->pkt_pts = codec->pkt->pts;
     else            pic->pkt_pts = AV_NOPTS_VALUE;
dd0c789b
     return 0;
 }
 
 static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
 {
     memset(pic->data, 0, sizeof(pic->data));
7fce481a
     avfilter_unref_buffer(pic->opaque);
dd0c789b
 }
 
12bd3c1f
 static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
 {
ecc8dada
     AVFilterBufferRef *ref = pic->opaque;
12bd3c1f
 
     if (pic->data[0] == NULL) {
         pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
         return codec->get_buffer(codec, pic);
     }
 
cc80caff
     if ((codec->width != ref->video->w) || (codec->height != ref->video->h) ||
d54e0948
         (codec->pix_fmt != ref->format)) {
12bd3c1f
         av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
         return -1;
     }
 
     pic->reordered_opaque = codec->reordered_opaque;
da7c65f0
     if (codec->pkt) pic->pkt_pts = codec->pkt->pts;
     else            pic->pkt_pts = AV_NOPTS_VALUE;
12bd3c1f
     return 0;
 }
 
917d2bb3
 static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
 {
     FilterPriv *priv = ctx->priv;
dd0c789b
     AVCodecContext *codec;
da7c65f0
     if (!opaque) return -1;
917d2bb3
 
     priv->is = opaque;
dd0c789b
     codec    = priv->is->video_st->codec;
     codec->opaque = ctx;
da7c65f0
     if (codec->codec->capabilities & CODEC_CAP_DR1) {
175714c0
         av_assert0(codec->flags & CODEC_FLAG_EMU_EDGE);
dd0c789b
         priv->use_dr1 = 1;
         codec->get_buffer     = input_get_buffer;
         codec->release_buffer = input_release_buffer;
12bd3c1f
         codec->reget_buffer   = input_reget_buffer;
b38f008e
         codec->thread_safe_callbacks = 1;
dd0c789b
     }
 
917d2bb3
     priv->frame = avcodec_alloc_frame();
 
     return 0;
 }
 
 static void input_uninit(AVFilterContext *ctx)
 {
     FilterPriv *priv = ctx->priv;
     av_free(priv->frame);
 }
 
 static int input_request_frame(AVFilterLink *link)
 {
     FilterPriv *priv = link->src->priv;
ecc8dada
     AVFilterBufferRef *picref;
3966a574
     int64_t pts = 0;
917d2bb3
     AVPacket pkt;
     int ret;
 
     while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
         av_free_packet(&pkt);
     if (ret < 0)
         return -1;
 
41211483
     if (priv->use_dr1 && priv->frame->opaque) {
7fce481a
         picref = avfilter_ref_buffer(priv->frame->opaque, ~0);
dd0c789b
     } else {
4b5ff9b6
         picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, priv->frame->width, priv->frame->height);
34017fd9
         av_image_copy(picref->data, picref->linesize,
fe9818d0
                       (const uint8_t **)(void **)priv->frame->data, priv->frame->linesize,
4b5ff9b6
                       picref->format, priv->frame->width, priv->frame->height);
dd0c789b
     }
917d2bb3
     av_free_packet(&pkt);
 
566666ca
     avfilter_copy_frame_props(picref, priv->frame);
84087b24
     picref->video->sample_aspect_ratio = av_guess_sample_aspect_ratio(priv->is->ic, priv->is->video_st, priv->frame);
917d2bb3
     picref->pts = pts;
566666ca
 
c41c5b02
     avfilter_start_frame(link, picref);
4b5ff9b6
     avfilter_draw_slice(link, 0, picref->video->h, 1);
917d2bb3
     avfilter_end_frame(link);
 
     return 0;
 }
 
 static int input_query_formats(AVFilterContext *ctx)
 {
     FilterPriv *priv = ctx->priv;
     enum PixelFormat pix_fmts[] = {
         priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
     };
 
fd2c0a5d
     avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
917d2bb3
     return 0;
 }
 
 static int input_config_props(AVFilterLink *link)
 {
     FilterPriv *priv  = link->src->priv;
f8eaa006
     AVStream *s = priv->is->video_st;
917d2bb3
 
c6ef3f44
     link->w = s->codec->width;
     link->h = s->codec->height;
f8eaa006
     link->sample_aspect_ratio = s->sample_aspect_ratio.num ?
c6ef3f44
         s->sample_aspect_ratio : s->codec->sample_aspect_ratio;
     link->time_base = s->time_base;
917d2bb3
 
     return 0;
 }
 
 static AVFilter input_filter =
 {
     .name      = "ffplay_input",
 
     .priv_size = sizeof(FilterPriv),
 
     .init      = input_init,
     .uninit    = input_uninit,
 
     .query_formats = input_query_formats,
 
     .inputs    = (AVFilterPad[]) {{ .name = NULL }},
     .outputs   = (AVFilterPad[]) {{ .name = "default",
72415b2a
                                     .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
                                     .request_frame = input_request_frame,
                                     .config_props  = input_config_props, },
                                   { .name = NULL }},
 };
 
8904a0f1
 static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
917d2bb3
 {
b7327887
     static const enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
3f073fa2
     char sws_flags_str[128];
8904a0f1
     int ret;
c4415f6e
     AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();
61930bd0
     AVFilterContext *filt_src = NULL, *filt_out = NULL, *filt_format;;
3f073fa2
     snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
     graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3
 
8904a0f1
     if ((ret = avfilter_graph_create_filter(&filt_src, &input_filter, "src",
                                             NULL, is, graph)) < 0)
71a1d111
         return ret;
3c5fe5b5
 
c4415f6e
 #if FF_API_OLD_VSINK_API
61930bd0
     ret = avfilter_graph_create_filter(&filt_out,
                                        avfilter_get_by_name("buffersink"),
                                        "out", NULL, pix_fmts, graph);
c4415f6e
 #else
     buffersink_params->pixel_fmts = pix_fmts;
61930bd0
     ret = avfilter_graph_create_filter(&filt_out,
                                        avfilter_get_by_name("buffersink"),
                                        "out", NULL, buffersink_params, graph);
c4415f6e
 #endif
     av_freep(&buffersink_params);
     if (ret < 0)
71a1d111
         return ret;
917d2bb3
 
ac712309
     if ((ret = avfilter_graph_create_filter(&filt_format,
                                             avfilter_get_by_name("format"),
                                             "format", "yuv420p", NULL, graph)) < 0)
         return ret;
     if ((ret = avfilter_link(filt_format, 0, filt_out, 0)) < 0)
         return ret;
 
 
da7c65f0
     if (vfilters) {
c5354942
         AVFilterInOut *outputs = avfilter_inout_alloc();
         AVFilterInOut *inputs  = avfilter_inout_alloc();
917d2bb3
 
         outputs->name    = av_strdup("in");
7313132b
         outputs->filter_ctx = filt_src;
917d2bb3
         outputs->pad_idx = 0;
         outputs->next    = NULL;
 
         inputs->name    = av_strdup("out");
ac712309
         inputs->filter_ctx = filt_format;
917d2bb3
         inputs->pad_idx = 0;
         inputs->next    = NULL;
 
6119b23a
         if ((ret = avfilter_graph_parse(graph, vfilters, &inputs, &outputs, NULL)) < 0)
71a1d111
             return ret;
917d2bb3
     } else {
ac712309
         if ((ret = avfilter_link(filt_src, 0, filt_format, 0)) < 0)
71a1d111
             return ret;
917d2bb3
     }
 
8904a0f1
     if ((ret = avfilter_graph_config(graph, NULL)) < 0)
71a1d111
         return ret;
917d2bb3
 
     is->out_video_filter = filt_out;
71a1d111
 
8904a0f1
     return ret;
 }
 
 #endif  /* CONFIG_AVFILTER */
 
 static int video_thread(void *arg)
 {
     VideoState *is = arg;
da7c65f0
     AVFrame *frame = avcodec_alloc_frame();
b4434475
     int64_t pts_int = AV_NOPTS_VALUE, pos = -1;
8904a0f1
     double pts;
     int ret;
 
 #if CONFIG_AVFILTER
     AVFilterGraph *graph = avfilter_graph_alloc();
     AVFilterContext *filt_out = NULL;
428c59d9
     int last_w = is->video_st->codec->width;
     int last_h = is->video_st->codec->height;
8904a0f1
 
     if ((ret = configure_video_filters(graph, is, vfilters)) < 0)
         goto the_end;
     filt_out = is->out_video_filter;
917d2bb3
 #endif
 
da7c65f0
     for (;;) {
917d2bb3
 #if !CONFIG_AVFILTER
         AVPacket pkt;
387b4ac9
 #else
ff0652e5
         AVFilterBufferRef *picref;
44f669e7
         AVRational tb = filt_out->inputs[0]->time_base;
917d2bb3
 #endif
         while (is->paused && !is->videoq.abort_request)
             SDL_Delay(10);
 #if CONFIG_AVFILTER
428c59d9
         if (   last_w != is->video_st->codec->width
             || last_h != is->video_st->codec->height) {
031ba466
             av_log(NULL, AV_LOG_INFO, "Frame changed from size:%dx%d to size:%dx%d\n",
                    last_w, last_h, is->video_st->codec->width, is->video_st->codec->height);
428c59d9
             avfilter_graph_free(&graph);
             graph = avfilter_graph_alloc();
             if ((ret = configure_video_filters(graph, is, vfilters)) < 0)
                 goto the_end;
             filt_out = is->out_video_filter;
             last_w = is->video_st->codec->width;
             last_h = is->video_st->codec->height;
         }
c4415f6e
         ret = av_buffersink_get_buffer_ref(filt_out, &picref, 0);
ff0652e5
         if (picref) {
44f669e7
             avfilter_fill_frame_from_video_buffer_ref(frame, picref);
ff0652e5
             pts_int = picref->pts;
ac712309
             tb      = filt_out->inputs[0]->time_base;
ff0652e5
             pos     = picref->pos;
             frame->opaque = picref;
ac712309
 
             ret = 1;
ff0652e5
         }
387b4ac9
 
951e715c
         if (ret >= 0 && av_cmp_q(tb, is->video_st->time_base)) {
97b925ea
             av_unused int64_t pts1 = pts_int;
387b4ac9
             pts_int = av_rescale_q(pts_int, tb, is->video_st->time_base);
97b925ea
             av_dlog(NULL, "video_thread(): "
                     "tb:%d/%d pts:%"PRId64" -> tb:%d/%d pts:%"PRId64"\n",
                     tb.num, tb.den, pts1,
                     is->video_st->time_base.num, is->video_st->time_base.den, pts_int);
387b4ac9
         }
917d2bb3
 #else
         ret = get_video_frame(is, frame, &pts_int, &pkt);
539647c6
         pos = pkt.pos;
b93e12fd
         av_free_packet(&pkt);
89080a0a
         if (ret == 0)
             continue;
917d2bb3
 #endif
 
da7c65f0
         if (ret < 0)
             goto the_end;
917d2bb3
 
8f17a8ef
         is->frame_last_filter_delay = av_gettime() / 1000000.0 - is->frame_last_returned_time;
         if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
             is->frame_last_filter_delay = 0;
 
67a82516
 #if CONFIG_AVFILTER
44f669e7
         if (!picref)
917d2bb3
             continue;
67a82516
 #endif
917d2bb3
 
da7c65f0
         pts = pts_int * av_q2d(is->video_st->time_base);
917d2bb3
 
c2606259
         ret = queue_picture(is, frame, pts, pos);
b93e12fd
 
917d2bb3
         if (ret < 0)
             goto the_end;
 
84506ebd
         if (is->step)
             stream_toggle_pause(is);
01310af2
     }
  the_end:
917d2bb3
 #if CONFIG_AVFILTER
e1ce7568
     av_freep(&vfilters);
ab543afe
     avfilter_graph_free(&graph);
917d2bb3
 #endif
c6b1edc9
     av_free(frame);
01310af2
     return 0;
 }
 
72ce053b
 static int subtitle_thread(void *arg)
 {
     VideoState *is = arg;
     SubPicture *sp;
     AVPacket pkt1, *pkt = &pkt1;
98704131
     int got_subtitle;
72ce053b
     double pts;
     int i, j;
     int r, g, b, y, u, v, a;
 
da7c65f0
     for (;;) {
72ce053b
         while (is->paused && !is->subtitleq.abort_request) {
             SDL_Delay(10);
         }
         if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
             break;
115329f1
 
da7c65f0
         if (pkt->data == flush_pkt.data) {
39c6a118
             avcodec_flush_buffers(is->subtitle_st->codec);
             continue;
         }
72ce053b
         SDL_LockMutex(is->subpq_mutex);
         while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
                !is->subtitleq.abort_request) {
             SDL_CondWait(is->subpq_cond, is->subpq_mutex);
         }
         SDL_UnlockMutex(is->subpq_mutex);
115329f1
 
72ce053b
         if (is->subtitleq.abort_request)
71a1d111
             return 0;
115329f1
 
72ce053b
         sp = &is->subpq[is->subpq_windex];
 
        /* NOTE: ipts is the PTS of the _first_ picture beginning in
            this packet, if any */
         pts = 0;
         if (pkt->pts != AV_NOPTS_VALUE)
da7c65f0
             pts = av_q2d(is->subtitle_st->time_base) * pkt->pts;
72ce053b
 
98704131
         avcodec_decode_subtitle2(is->subtitle_st->codec, &sp->sub,
                                  &got_subtitle, pkt);
 
72ce053b
         if (got_subtitle && sp->sub.format == 0) {
             sp->pts = pts;
115329f1
 
72ce053b
             for (i = 0; i < sp->sub.num_rects; i++)
             {
db4fac64
                 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b
                 {
25b4c651
                     RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
                     y = RGB_TO_Y_CCIR(r, g, b);
                     u = RGB_TO_U_CCIR(r, g, b, 0);
                     v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651
                     YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
                 }
             }
 
             /* now we can update the picture count */
             if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
                 is->subpq_windex = 0;
             SDL_LockMutex(is->subpq_mutex);
             is->subpq_size++;
             SDL_UnlockMutex(is->subpq_mutex);
         }
         av_free_packet(pkt);
     }
     return 0;
 }
 
01310af2
 /* copy samples for viewing in editor window */
 static void update_sample_display(VideoState *is, short *samples, int samples_size)
 {
705c6520
     int size, len;
01310af2
 
     size = samples_size / sizeof(short);
     while (size > 0) {
         len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
         if (len > size)
             len = size;
         memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
         samples += len;
         is->sample_array_index += len;
         if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
             is->sample_array_index = 0;
         size -= len;
     }
 }
 
6f06545b
 /* return the wanted number of samples to get better sync if sync_type is video
  * or external master clock */
 static int synchronize_audio(VideoState *is, int nb_samples)
01310af2
 {
6f06545b
     int wanted_nb_samples = nb_samples;
01310af2
 
     /* if not master, then we try to remove or add samples to correct the clock */
     if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
          is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
         double diff, avg_diff;
6f06545b
         int min_nb_samples, max_nb_samples;
115329f1
 
6f06545b
         diff = get_audio_clock(is) - get_master_clock(is);
115329f1
 
638c9d91
         if (diff < AV_NOSYNC_THRESHOLD) {
             is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
             if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
                 /* not enough measures to have a correct estimate */
                 is->audio_diff_avg_count++;
             } else {
                 /* estimate the A-V difference */
                 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
 
                 if (fabs(avg_diff) >= is->audio_diff_threshold) {
6f06545b
                     wanted_nb_samples = nb_samples + (int)(diff * is->audio_src_freq);
                     min_nb_samples = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100));
                     max_nb_samples = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100));
                     wanted_nb_samples = FFMIN(FFMAX(wanted_nb_samples, min_nb_samples), max_nb_samples);
638c9d91
                 }
aebb56e1
                 av_dlog(NULL, "diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
6f06545b
                         diff, avg_diff, wanted_nb_samples - nb_samples,
aebb56e1
                         is->audio_clock, is->video_clock, is->audio_diff_threshold);
01310af2
             }
638c9d91
         } else {
             /* too big difference : may be initial PTS errors, so
                reset A-V filter */
             is->audio_diff_avg_count = 0;
da7c65f0
             is->audio_diff_cum       = 0;
01310af2
         }
     }
 
6f06545b
     return wanted_nb_samples;
01310af2
 }
 
 /* decode one audio frame and returns its uncompressed size */
5a4476e2
 static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2
 {
bea18375
     AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2
     AVPacket *pkt = &is->audio_pkt;
da7c65f0
     AVCodecContext *dec = is->audio_st->codec;
1dd3c473
     int len1, len2, data_size, resampled_data_size;
a5a1e3cb
     int64_t dec_channel_layout;
     int got_frame;
01310af2
     double pts;
59df4b82
     int new_packet = 0;
     int flush_complete = 0;
6f06545b
     int wanted_nb_samples;
01310af2
 
da7c65f0
     for (;;) {
72ea344b
         /* NOTE: the audio packet can contain several frames */
59df4b82
         while (pkt_temp->size > 0 || (!pkt_temp->data && new_packet)) {
f199f385
             if (!is->frame) {
                 if (!(is->frame = avcodec_alloc_frame()))
                     return AVERROR(ENOMEM);
             } else
                 avcodec_get_frame_defaults(is->frame);
 
59df4b82
             if (flush_complete)
                 break;
             new_packet = 0;
f199f385
             len1 = avcodec_decode_audio4(dec, is->frame, &got_frame, pkt_temp);
72ea344b
             if (len1 < 0) {
                 /* if error, we skip the frame */
bea18375
                 pkt_temp->size = 0;
01310af2
                 break;
72ea344b
             }
115329f1
 
bea18375
             pkt_temp->data += len1;
             pkt_temp->size -= len1;
59df4b82
 
f199f385
             if (!got_frame) {
59df4b82
                 /* stop sending empty packets if the decoder is finished */
                 if (!pkt_temp->data && dec->codec->capabilities & CODEC_CAP_DELAY)
                     flush_complete = 1;
72ea344b
                 continue;
59df4b82
             }
f199f385
             data_size = av_samples_get_buffer_size(NULL, dec->channels,
                                                    is->frame->nb_samples,
                                                    dec->sample_fmt, 1);
5a4476e2
 
1dd3c473
             dec_channel_layout = (dec->channel_layout && dec->channels == av_get_channel_layout_nb_channels(dec->channel_layout)) ? dec->channel_layout : av_get_default_channel_layout(dec->channels);
6f06545b
             wanted_nb_samples = synchronize_audio(is, is->frame->nb_samples);
1dd3c473
 
6f06545b
             if (dec->sample_fmt != is->audio_src_fmt ||
                 dec_channel_layout != is->audio_src_channel_layout ||
                 dec->sample_rate != is->audio_src_freq ||
                 (wanted_nb_samples != is->frame->nb_samples && !is->swr_ctx)) {
1dd3c473
                 if (is->swr_ctx)
                     swr_free(&is->swr_ctx);
645c61fb
                 is->swr_ctx = swr_alloc_set_opts(NULL,
                                                  is->audio_tgt_channel_layout, is->audio_tgt_fmt, is->audio_tgt_freq,
                                                  dec_channel_layout,           dec->sample_fmt,   dec->sample_rate,
eafa2b60
                                                  0, NULL);
1dd3c473
                 if (!is->swr_ctx || swr_init(is->swr_ctx) < 0) {
                     fprintf(stderr, "Cannot create sample rate converter for conversion of %d Hz %s %d channels to %d Hz %s %d channels!\n",
                         dec->sample_rate,
ba7d6e79
                         av_get_sample_fmt_name(dec->sample_fmt),
1dd3c473
                         dec->channels,
                         is->audio_tgt_freq,
                         av_get_sample_fmt_name(is->audio_tgt_fmt),
                         is->audio_tgt_channels);
                     break;
5a4476e2
                 }
1dd3c473
                 is->audio_src_channel_layout = dec_channel_layout;
                 is->audio_src_channels = dec->channels;
                 is->audio_src_freq = dec->sample_rate;
                 is->audio_src_fmt = dec->sample_fmt;
5a4476e2
             }
 
1dd3c473
             resampled_data_size = data_size;
             if (is->swr_ctx) {
b404ab9e
                 const uint8_t *in[] = { is->frame->data[0] };
1dd3c473
                 uint8_t *out[] = {is->audio_buf2};
6f06545b
                 if (wanted_nb_samples != is->frame->nb_samples) {
                     if (swr_set_compensation(is->swr_ctx, (wanted_nb_samples - is->frame->nb_samples) * is->audio_tgt_freq / dec->sample_rate,
                                                 wanted_nb_samples * is->audio_tgt_freq / dec->sample_rate) < 0) {
                         fprintf(stderr, "swr_set_compensation() failed\n");
                         break;
                     }
                 }
1dd3c473
                 len2 = swr_convert(is->swr_ctx, out, sizeof(is->audio_buf2) / is->audio_tgt_channels / av_get_bytes_per_sample(is->audio_tgt_fmt),
6f06545b
                                                 in, is->frame->nb_samples);
1dd3c473
                 if (len2 < 0) {
                     fprintf(stderr, "audio_resample() failed\n");
5a4476e2
                     break;
                 }
1dd3c473
                 if (len2 == sizeof(is->audio_buf2) / is->audio_tgt_channels / av_get_bytes_per_sample(is->audio_tgt_fmt)) {
                     fprintf(stderr, "warning: audio buffer is probably too small\n");
                     swr_init(is->swr_ctx);
                 }
                 is->audio_buf = is->audio_buf2;
                 resampled_data_size = len2 * is->audio_tgt_channels * av_get_bytes_per_sample(is->audio_tgt_fmt);
             } else {
f199f385
                 is->audio_buf = is->frame->data[0];
5a4476e2
             }
 
72ea344b
             /* if no pts, then compute it */
             pts = is->audio_clock;
             *pts_ptr = pts;
115329f1
             is->audio_clock += (double)data_size /
75f847aa
                 (dec->channels * dec->sample_rate * av_get_bytes_per_sample(dec->sample_fmt));
1f6b9cc3
 #ifdef DEBUG
72ea344b
             {
                 static double last_clock;
                 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
                        is->audio_clock - last_clock,
                        is->audio_clock, pts);
                 last_clock = is->audio_clock;
01310af2
             }
72ea344b
 #endif
1dd3c473
             return resampled_data_size;
01310af2
         }
 
72ea344b
         /* free the current packet */
         if (pkt->data)
01310af2
             av_free_packet(pkt);
efe8a1ba
         memset(pkt_temp, 0, sizeof(*pkt_temp));
115329f1
 
72ea344b
         if (is->paused || is->audioq.abort_request) {
             return -1;
         }
115329f1
 
01310af2
         /* read next packet */
59df4b82
         if ((new_packet = packet_queue_get(&is->audioq, pkt, 1)) < 0)
01310af2
             return -1;
59df4b82
 
ec1f3cab
         if (pkt->data == flush_pkt.data) {
abdff646
             avcodec_flush_buffers(dec);
ec1f3cab
             flush_complete = 0;
         }
39c6a118
 
f199f385
         *pkt_temp = *pkt;
115329f1
 
72ea344b
         /* if update the audio clock with the pts */
         if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75
             is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b
         }
01310af2
     }
 }
 
 /* prepare a new audio buffer */
358061f6
 static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
 {
     VideoState *is = opaque;
     int audio_size, len1;
10b7b4a6
     int bytes_per_sec;
1348420b
     int frame_size = av_samples_get_buffer_size(NULL, is->audio_tgt_channels, 1, is->audio_tgt_fmt, 1);
01310af2
     double pts;
 
     audio_callback_time = av_gettime();
115329f1
 
01310af2
     while (len > 0) {
         if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2
            audio_size = audio_decode_frame(is, &pts);
01310af2
            if (audio_size < 0) {
                 /* if error, just output silence */
e2a2c49f
                is->audio_buf      = is->silence_buf;
1348420b
                is->audio_buf_size = sizeof(is->silence_buf) / frame_size * frame_size;
01310af2
            } else {
f8b8c694
                if (is->show_mode != SHOW_MODE_VIDEO)
01310af2
                    update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
                is->audio_buf_size = audio_size;
            }
            is->audio_buf_index = 0;
         }
         len1 = is->audio_buf_size - is->audio_buf_index;
         if (len1 > len)
             len1 = len;
         memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
         len -= len1;
         stream += len1;
         is->audio_buf_index += len1;
     }
1dd3c473
     bytes_per_sec = is->audio_tgt_freq * is->audio_tgt_channels * av_get_bytes_per_sample(is->audio_tgt_fmt);
10b7b4a6
     is->audio_write_buf_size = is->audio_buf_size - is->audio_buf_index;
     /* Let's assume the audio driver that is used by SDL has two periods. */
     is->audio_current_pts = is->audio_clock - (double)(2 * is->audio_hw_buf_size + is->audio_write_buf_size) / bytes_per_sec;
     is->audio_current_pts_drift = is->audio_current_pts - audio_callback_time / 1000000.0;
01310af2
 }
 
 /* open a given stream. Return 0 if OK */
 static int stream_component_open(VideoState *is, int stream_index)
 {
     AVFormatContext *ic = is->ic;
fe74099a
     AVCodecContext *avctx;
01310af2
     AVCodec *codec;
     SDL_AudioSpec wanted_spec, spec;
3009f521
     AVDictionary *opts;
     AVDictionaryEntry *t = NULL;
1dd3c473
     int64_t wanted_channel_layout = 0;
5d94f28e
     int wanted_nb_channels;
     const char *env;
01310af2
 
     if (stream_index < 0 || stream_index >= ic->nb_streams)
         return -1;
fe74099a
     avctx = ic->streams[stream_index]->codec;
115329f1
 
fe74099a
     codec = avcodec_find_decoder(avctx->codec_id);
6d13499b
     opts = filter_codec_opts(codec_opts, codec, ic, ic->streams[stream_index]);
 
5eda0967
     switch(avctx->codec_type){
0aa793a1
         case AVMEDIA_TYPE_AUDIO   : if(audio_codec_name   ) codec= avcodec_find_decoder_by_name(   audio_codec_name); break;
         case AVMEDIA_TYPE_SUBTITLE: if(subtitle_codec_name) codec= avcodec_find_decoder_by_name(subtitle_codec_name); break;
         case AVMEDIA_TYPE_VIDEO   : if(video_codec_name   ) codec= avcodec_find_decoder_by_name(   video_codec_name); break;
5eda0967
     }
99119bdf
     if (!codec)
         return -1;
 
da7c65f0
     avctx->workaround_bugs   = workaround_bugs;
     avctx->lowres            = lowres;
d8407ee2
     if(avctx->lowres > codec->max_lowres){
         av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n",
                 codec->max_lowres);
         avctx->lowres= codec->max_lowres;
     }
da7c65f0
     avctx->idct_algo         = idct;
     avctx->skip_frame        = skip_frame;
     avctx->skip_idct         = skip_idct;
     avctx->skip_loop_filter  = skip_loop_filter;
     avctx->error_concealment = error_concealment;
fe74099a
 
41211483
     if(avctx->lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
da7c65f0
     if (fast)   avctx->flags2 |= CODEC_FLAG2_FAST;
175714c0
     if(codec->capabilities & CODEC_CAP_DR1)
         avctx->flags |= CODEC_FLAG_EMU_EDGE;
 
2446a8cc
     if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
a2c5be63
         memset(&is->audio_pkt_temp, 0, sizeof(is->audio_pkt_temp));
5d94f28e
         env = SDL_getenv("SDL_AUDIO_CHANNELS");
         if (env)
             wanted_channel_layout = av_get_default_channel_layout(SDL_atoi(env));
         if (!wanted_channel_layout) {
8ee77fc8
             wanted_channel_layout = (avctx->channel_layout && avctx->channels == av_get_channel_layout_nb_channels(avctx->channel_layout)) ? avctx->channel_layout : av_get_default_channel_layout(avctx->channels);
5d94f28e
             wanted_channel_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX;
             wanted_nb_channels = av_get_channel_layout_nb_channels(wanted_channel_layout);
             /* SDL only supports 1, 2, 4 or 6 channels at the moment, so we have to make sure not to request anything else. */
             while (wanted_nb_channels > 0 && (wanted_nb_channels == 3 || wanted_nb_channels == 5 || wanted_nb_channels > 6)) {
                 wanted_nb_channels--;
                 wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
             }
         }
1dd3c473
         wanted_spec.channels = av_get_channel_layout_nb_channels(wanted_channel_layout);
         wanted_spec.freq = avctx->sample_rate;
         if (wanted_spec.freq <= 0 || wanted_spec.channels <= 0) {
             fprintf(stderr, "Invalid sample rate or channel count!\n");
2446a8cc
             return -1;
         }
     }
 
2473a45c
     if (!av_dict_get(opts, "threads", NULL, 0))
         av_dict_set(&opts, "threads", "auto", 0);
01310af2
     if (!codec ||
3009f521
         avcodec_open2(avctx, codec, &opts) < 0)
01310af2
         return -1;
3009f521
     if ((t = av_dict_get(opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
         av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
         return AVERROR_OPTION_NOT_FOUND;
     }
51b73087
 
     /* prepare audio output */
72415b2a
     if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
51b73087
         wanted_spec.format = AUDIO_S16SYS;
         wanted_spec.silence = 0;
         wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
         wanted_spec.callback = sdl_audio_callback;
         wanted_spec.userdata = is;
         if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
             fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
             return -1;
         }
         is->audio_hw_buf_size = spec.size;
1dd3c473
         if (spec.format != AUDIO_S16SYS) {
             fprintf(stderr, "SDL advised audio format %d is not supported!\n", spec.format);
             return -1;
         }
         if (spec.channels != wanted_spec.channels) {
             wanted_channel_layout = av_get_default_channel_layout(spec.channels);
             if (!wanted_channel_layout) {
                 fprintf(stderr, "SDL advised channel count %d is not supported!\n", spec.channels);
                 return -1;
             }
         }
         is->audio_src_fmt = is->audio_tgt_fmt = AV_SAMPLE_FMT_S16;
         is->audio_src_freq = is->audio_tgt_freq = spec.freq;
         is->audio_src_channel_layout = is->audio_tgt_channel_layout = wanted_channel_layout;
         is->audio_src_channels = is->audio_tgt_channels = spec.channels;
51b73087
     }
 
3f3fe38d
     ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
da7c65f0
     switch (avctx->codec_type) {
72415b2a
     case AVMEDIA_TYPE_AUDIO:
01310af2
         is->audio_stream = stream_index;
         is->audio_st = ic->streams[stream_index];
da7c65f0
         is->audio_buf_size  = 0;
01310af2
         is->audio_buf_index = 0;
638c9d91
 
         /* init averaging filter */
da7c65f0
         is->audio_diff_avg_coef  = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
638c9d91
         is->audio_diff_avg_count = 0;
         /* since we do not have a precise anough audio fifo fullness,
            we correct audio sync only if larger than this threshold */
2446a8cc
         is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / wanted_spec.freq;
638c9d91
 
01310af2
         memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
         packet_queue_init(&is->audioq);
bb270c08
         SDL_PauseAudio(0);
01310af2
         break;
72415b2a
     case AVMEDIA_TYPE_VIDEO:
01310af2
         is->video_stream = stream_index;
         is->video_st = ic->streams[stream_index];
 
         packet_queue_init(&is->videoq);
         is->video_tid = SDL_CreateThread(video_thread, is);
         break;
72415b2a
     case AVMEDIA_TYPE_SUBTITLE:
72ce053b
         is->subtitle_stream = stream_index;
         is->subtitle_st = ic->streams[stream_index];
         packet_queue_init(&is->subtitleq);
115329f1
 
72ce053b
         is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
         break;
01310af2
     default:
         break;
     }
     return 0;
 }
 
 static void stream_component_close(VideoState *is, int stream_index)
 {
     AVFormatContext *ic = is->ic;
fe74099a
     AVCodecContext *avctx;
115329f1
 
72ce053b
     if (stream_index < 0 || stream_index >= ic->nb_streams)
         return;
fe74099a
     avctx = ic->streams[stream_index]->codec;
01310af2
 
da7c65f0
     switch (avctx->codec_type) {
72415b2a
     case AVMEDIA_TYPE_AUDIO:
01310af2
         packet_queue_abort(&is->audioq);
 
         SDL_CloseAudio();
 
         packet_queue_end(&is->audioq);
c74f1f47
         av_free_packet(&is->audio_pkt);
1dd3c473
         if (is->swr_ctx)
             swr_free(&is->swr_ctx);
f199f385
         av_freep(&is->audio_buf1);
         is->audio_buf = NULL;
         av_freep(&is->frame);
cb2c4de3
 
         if (is->rdft) {
             av_rdft_end(is->rdft);
             av_freep(&is->rdft_data);
f9324d5a
             is->rdft = NULL;
             is->rdft_bits = 0;
cb2c4de3
         }
01310af2
         break;
72415b2a
     case AVMEDIA_TYPE_VIDEO:
01310af2
         packet_queue_abort(&is->videoq);
 
         /* note: we also signal this mutex to make sure we deblock the
            video thread in all cases */
         SDL_LockMutex(is->pictq_mutex);
         SDL_CondSignal(is->pictq_cond);
         SDL_UnlockMutex(is->pictq_mutex);
 
         SDL_WaitThread(is->video_tid, NULL);
 
         packet_queue_end(&is->videoq);
         break;
72415b2a
     case AVMEDIA_TYPE_SUBTITLE:
72ce053b
         packet_queue_abort(&is->subtitleq);
115329f1
 
72ce053b
         /* note: we also signal this mutex to make sure we deblock the
            video thread in all cases */
         SDL_LockMutex(is->subpq_mutex);
         is->subtitle_stream_changed = 1;
115329f1
 
72ce053b
         SDL_CondSignal(is->subpq_cond);
         SDL_UnlockMutex(is->subpq_mutex);
 
         SDL_WaitThread(is->subtitle_tid, NULL);
 
         packet_queue_end(&is->subtitleq);
         break;
01310af2
     default:
         break;
     }
 
3f3fe38d
     ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
     avcodec_close(avctx);
da7c65f0
     switch (avctx->codec_type) {
72415b2a
     case AVMEDIA_TYPE_AUDIO:
01310af2
         is->audio_st = NULL;
         is->audio_stream = -1;
         break;
72415b2a
     case AVMEDIA_TYPE_VIDEO:
01310af2
         is->video_st = NULL;
         is->video_stream = -1;
         break;
72415b2a
     case AVMEDIA_TYPE_SUBTITLE:
72ce053b
         is->subtitle_st = NULL;
         is->subtitle_stream = -1;
         break;
01310af2
     default:
         break;
     }
 }
 
40972f7c
 static int decode_interrupt_cb(void *ctx)
416e3508
 {
708df4ac
     VideoState *is = ctx;
     return is->abort_request;
416e3508
 }
01310af2
 
 /* this thread gets the stream from the disk or the network */
8adf9bb2
 static int read_thread(void *arg)
01310af2
 {
     VideoState *is = arg;
b6bde8c7
     AVFormatContext *ic = NULL;
6625a3de
     int err, i, ret;
72415b2a
     int st_index[AVMEDIA_TYPE_NB];
01310af2
     AVPacket pkt1, *pkt = &pkt1;
da7c65f0
     int eof = 0;
d834d63b
     int pkt_in_play_range = 0;
b6bde8c7
     AVDictionaryEntry *t;
3009f521
     AVDictionary **opts;
     int orig_nb_streams;
6299a229
 
6625a3de
     memset(st_index, -1, sizeof(st_index));
01310af2
     is->video_stream = -1;
     is->audio_stream = -1;
72ce053b
     is->subtitle_stream = -1;
01310af2
 
40972f7c
     ic = avformat_alloc_context();
     ic->interrupt_callback.callback = decode_interrupt_cb;
708df4ac
     ic->interrupt_callback.opaque = is;
b6bde8c7
     err = avformat_open_input(&ic, is->filename, is->iformat, &format_opts);
638c9d91
     if (err < 0) {
         print_error(is->filename, err);
         ret = -1;
         goto fail;
     }
b6bde8c7
     if ((t = av_dict_get(format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
         av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
         ret = AVERROR_OPTION_NOT_FOUND;
         goto fail;
     }
01310af2
     is->ic = ic;
30bc6613
 
da7c65f0
     if (genpts)
30bc6613
         ic->flags |= AVFMT_FLAG_GENPTS;
 
8ec19f84
     opts = setup_find_stream_info_opts(ic, codec_opts);
3009f521
     orig_nb_streams = ic->nb_streams;
bc778a0c
 
3009f521
     err = avformat_find_stream_info(ic, opts);
24c07998
     if (err < 0) {
         fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
         ret = -1;
         goto fail;
     }
3009f521
     for (i = 0; i < orig_nb_streams; i++)
         av_dict_free(&opts[i]);
     av_freep(&opts);
 
da7c65f0
     if (ic->pb)
41211483
         ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b
 
da7c65f0
     if (seek_by_bytes < 0)
         seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT);
70a4764d
 
72ea344b
     /* if seeking requested, we execute it */
     if (start_time != AV_NOPTS_VALUE) {
         int64_t timestamp;
 
         timestamp = start_time;
         /* add the stream start time */
         if (ic->start_time != AV_NOPTS_VALUE)
             timestamp += ic->start_time;
4ed29207
         ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b
         if (ret < 0) {
115329f1
             fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
                     is->filename, (double)timestamp / AV_TIME_BASE);
         }
     }
 
406f0f1b
     for (i = 0; i < ic->nb_streams; i++)
3f3fe38d
         ic->streams[i]->discard = AVDISCARD_ALL;
be732b70
     if (!video_disable)
406f0f1b
         st_index[AVMEDIA_TYPE_VIDEO] =
             av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
                                 wanted_stream[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
be732b70
     if (!audio_disable)
406f0f1b
         st_index[AVMEDIA_TYPE_AUDIO] =
             av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
                                 wanted_stream[AVMEDIA_TYPE_AUDIO],
                                 st_index[AVMEDIA_TYPE_VIDEO],
                                 NULL, 0);
be732b70
     if (!video_disable)
406f0f1b
         st_index[AVMEDIA_TYPE_SUBTITLE] =
             av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
                                 wanted_stream[AVMEDIA_TYPE_SUBTITLE],
                                 (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
                                  st_index[AVMEDIA_TYPE_AUDIO] :
                                  st_index[AVMEDIA_TYPE_VIDEO]),
                                 NULL, 0);
01310af2
     if (show_status) {
0ebf4754
         av_dump_format(ic, 0, is->filename, 0);
01310af2
     }
 
f521746b
     is->show_mode = show_mode;
 
01310af2
     /* open the streams */
72415b2a
     if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
         stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
     }
 
da7c65f0
     ret = -1;
72415b2a
     if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
da7c65f0
         ret = stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61
     }
d38c9e7a
     is->refresh_tid = SDL_CreateThread(refresh_thread, is);
1d6c82d4
     if (is->show_mode == SHOW_MODE_NONE)
         is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
01310af2
 
72415b2a
     if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
         stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
     }
 
01310af2
     if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
         fprintf(stderr, "%s: could not open codecs\n", is->filename);
         ret = -1;
01310af2
         goto fail;
     }
 
da7c65f0
     for (;;) {
01310af2
         if (is->abort_request)
             break;
416e3508
         if (is->paused != is->last_paused) {
             is->last_paused = is->paused;
72ea344b
             if (is->paused)
da7c65f0
                 is->read_pause_return = av_read_pause(ic);
72ea344b
             else
                 av_read_play(ic);
416e3508
         }
df98446b
 #if CONFIG_RTSP_DEMUXER || CONFIG_MMSH_PROTOCOL
         if (is->paused &&
                 (!strcmp(ic->iformat->name, "rtsp") ||
304ec08f
                  (ic->pb && !strncmp(input_filename, "mmsh:", 5)))) {
416e3508
             /* wait 10 ms to avoid trying to get another packet */
             /* XXX: horrible */
             SDL_Delay(10);
             continue;
         }
400738b1
 #endif
72ea344b
         if (is->seek_req) {
da7c65f0
             int64_t seek_target = is->seek_pos;
             int64_t seek_min    = is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
             int64_t seek_max    = is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
 // FIXME the +-2 is due to rounding being not done in the correct direction in generation
4ed29207
 //      of the seek_pos/seek_rel variables
8e606cc8
 
4ed29207
             ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
             if (ret < 0) {
                 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
da7c65f0
             } else {
e6c0297f
                 if (is->audio_stream >= 0) {
                     packet_queue_flush(&is->audioq);
39c6a118
                     packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f
                 }
72ce053b
                 if (is->subtitle_stream >= 0) {
                     packet_queue_flush(&is->subtitleq);
39c6a118
                     packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b
                 }
e6c0297f
                 if (is->video_stream >= 0) {
                     packet_queue_flush(&is->videoq);
39c6a118
                     packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f
                 }
72ea344b
             }
             is->seek_req = 0;
da7c65f0
             eof = 0;
72ea344b
         }
416e3508
 
01310af2
         /* if the queue are full, no need to read more */
79ee4683
         if (   is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
ee4b1432
             || (   (is->audioq   .nb_packets > MIN_FRAMES || is->audio_stream < 0)
da7c65f0
                 && (is->videoq   .nb_packets > MIN_FRAMES || is->video_stream < 0)
                 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream < 0))) {
01310af2
             /* wait 10 ms */
             SDL_Delay(10);
             continue;
         }
da7c65f0
         if (eof) {
             if (is->video_stream >= 0) {
26534fe8
                 av_init_packet(pkt);
da7c65f0
                 pkt->data = NULL;
                 pkt->size = 0;
                 pkt->stream_index = is->video_stream;
26534fe8
                 packet_queue_put(&is->videoq, pkt);
9dc41767
             }
59df4b82
             if (is->audio_stream >= 0 &&
                 is->audio_st->codec->codec->capabilities & CODEC_CAP_DELAY) {
                 av_init_packet(pkt);
                 pkt->data = NULL;
                 pkt->size = 0;
                 pkt->stream_index = is->audio_stream;
                 packet_queue_put(&is->audioq, pkt);
9dc41767
             }
b4083171
             SDL_Delay(10);
da7c65f0
             if (is->audioq.size + is->videoq.size + is->subtitleq.size == 0) {
                 if (loop != 1 && (!loop || --loop)) {
84506ebd
                     stream_seek(is, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
da7c65f0
                 } else if (autoexit) {
                     ret = AVERROR_EOF;
1922c0a7
                     goto fail;
                 }
2d1653b0
             }
0b82612d
             eof=0;
600a331c
             continue;
         }
72ea344b
         ret = av_read_frame(ic, pkt);
01310af2
         if (ret < 0) {
27d97fde
             if (ret == AVERROR_EOF || url_feof(ic->pb))
da7c65f0
                 eof = 1;
eb4d1cb9
             if (ic->pb && ic->pb->error)
bb270c08
                 break;
75bb7b0a
             SDL_Delay(100); /* wait for user event */
             continue;
01310af2
         }
d834d63b
         /* check if packet is in play range specified by user, then queue, otherwise discard */
         pkt_in_play_range = duration == AV_NOPTS_VALUE ||
                 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
                 av_q2d(ic->streams[pkt->stream_index]->time_base) -
da7c65f0
                 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0) / 1000000
                 <= ((double)duration / 1000000);
d834d63b
         if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2
             packet_queue_put(&is->audioq, pkt);
d834d63b
         } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2
             packet_queue_put(&is->videoq, pkt);
d834d63b
         } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b
             packet_queue_put(&is->subtitleq, pkt);
01310af2
         } else {
             av_free_packet(pkt);
         }
     }
     /* wait until the end */
     while (!is->abort_request) {
         SDL_Delay(100);
     }
 
638c9d91
     ret = 0;
01310af2
  fail:
     /* close each stream */
     if (is->audio_stream >= 0)
         stream_component_close(is, is->audio_stream);
     if (is->video_stream >= 0)
         stream_component_close(is, is->video_stream);
72ce053b
     if (is->subtitle_stream >= 0)
         stream_component_close(is, is->subtitle_stream);
638c9d91
     if (is->ic) {
cd3716b9
         avformat_close_input(&is->ic);
638c9d91
     }
416e3508
 
638c9d91
     if (ret != 0) {
         SDL_Event event;
115329f1
 
638c9d91
         event.type = FF_QUIT_EVENT;
         event.user.data1 = is;
         SDL_PushEvent(&event);
     }
01310af2
     return 0;
 }
 
638c9d91
 static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
 {
     VideoState *is;
 
     is = av_mallocz(sizeof(VideoState));
     if (!is)
         return NULL;
f7d78f36
     av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91
     is->iformat = iformat;
da7c65f0
     is->ytop    = 0;
     is->xleft   = 0;
01310af2
 
     /* start video display */
     is->pictq_mutex = SDL_CreateMutex();
da7c65f0
     is->pictq_cond  = SDL_CreateCond();
115329f1
 
72ce053b
     is->subpq_mutex = SDL_CreateMutex();
da7c65f0
     is->subpq_cond  = SDL_CreateCond();
115329f1
 
638c9d91
     is->av_sync_type = av_sync_type;
41211483
     is->read_tid     = SDL_CreateThread(read_thread, is);
8adf9bb2
     if (!is->read_tid) {
01310af2
         av_free(is);
         return NULL;
     }
     return is;
 }
 
7b49ce2e
 static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
 {
     AVFormatContext *ic = is->ic;
     int start_index, stream_index;
     AVStream *st;
 
72415b2a
     if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91
         start_index = is->video_stream;
72415b2a
     else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91
         start_index = is->audio_stream;
72ce053b
     else
         start_index = is->subtitle_stream;
72415b2a
     if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
         return;
     stream_index = start_index;
da7c65f0
     for (;;) {
638c9d91
         if (++stream_index >= is->ic->nb_streams)
72ce053b
         {
72415b2a
             if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
             {
                 stream_index = -1;
                 goto the_end;
             } else
                 stream_index = 0;
         }
638c9d91
         if (stream_index == start_index)
             return;
         st = ic->streams[stream_index];
01f4895c
         if (st->codec->codec_type == codec_type) {
638c9d91
             /* check that parameters are OK */
da7c65f0
             switch (codec_type) {
72415b2a
             case AVMEDIA_TYPE_AUDIO:
01f4895c
                 if (st->codec->sample_rate != 0 &&
                     st->codec->channels != 0)
638c9d91
                     goto the_end;
                 break;
72415b2a
             case AVMEDIA_TYPE_VIDEO:
             case AVMEDIA_TYPE_SUBTITLE:
638c9d91
                 goto the_end;
             default:
                 break;
             }
         }
     }
  the_end:
     stream_component_close(is, start_index);
     stream_component_open(is, stream_index);
 }
 
 
84506ebd
 static void toggle_full_screen(VideoState *is)
01310af2
 {
bb4c0abc
     av_unused int i;
01310af2
     is_full_screen = !is_full_screen;
8085a5b7
 #if defined(__APPLE__) && SDL_VERSION_ATLEAST(1, 2, 14)
bed04e2b
     /* OS X needs to reallocate the SDL overlays */
16b771b1
     for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
8085a5b7
         is->pictq[i].reallocate = 1;
     }
 #endif
9fb2b412
     video_open(is, 1);
01310af2
 }
 
5db1f94b
 static void toggle_pause(VideoState *is)
01310af2
 {
5db1f94b
     stream_toggle_pause(is);
     is->step = 0;
bba04f1e
 }
 
5db1f94b
 static void step_to_next_frame(VideoState *is)
bba04f1e
 {
5db1f94b
     /* if the stream is paused unpause it, then step */
     if (is->paused)
         stream_toggle_pause(is);
     is->step = 1;
01310af2
 }
 
84506ebd
 static void toggle_audio_display(VideoState *is)
01310af2
 {
84506ebd
     int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
     is->show_mode = (is->show_mode + 1) % SHOW_MODE_NB;
     fill_rectangle(screen,
                 is->xleft, is->ytop, is->width, is->height,
                 bgcolor);
     SDL_UpdateRect(screen, is->xleft, is->ytop, is->width, is->height);
01310af2
 }
 
 /* handle an event sent by the GUI */
84506ebd
 static void event_loop(VideoState *cur_stream)
01310af2
 {
     SDL_Event event;
a11d11aa
     double incr, pos, frac;
01310af2
 
da7c65f0
     for (;;) {
d52ec002
         double x;
01310af2
         SDL_WaitEvent(&event);
da7c65f0
         switch (event.type) {
01310af2
         case SDL_KEYDOWN:
066ce8c9
             if (exit_on_keydown) {
84506ebd
                 do_exit(cur_stream);
066ce8c9
                 break;
             }
da7c65f0
             switch (event.key.keysym.sym) {
01310af2
             case SDLK_ESCAPE:
             case SDLK_q:
84506ebd
                 do_exit(cur_stream);
01310af2
                 break;
             case SDLK_f:
84506ebd
                 toggle_full_screen(cur_stream);
dbe7170e
                 cur_stream->force_refresh = 1;
01310af2
                 break;
             case SDLK_p:
             case SDLK_SPACE:
ba571f6b
                 toggle_pause(cur_stream);
01310af2
                 break;
da7c65f0
             case SDLK_s: // S: Step to next frame
ba571f6b
                 step_to_next_frame(cur_stream);
bba04f1e
                 break;
01310af2
             case SDLK_a:
ba571f6b
                 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
                 break;
             case SDLK_v:
ba571f6b
                 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91
                 break;
72ce053b
             case SDLK_t:
ba571f6b
                 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b
                 break;
638c9d91
             case SDLK_w:
ba571f6b
                 toggle_audio_display(cur_stream);
dbe7170e
                 cur_stream->force_refresh = 1;
01310af2
                 break;
91a3ea67
             case SDLK_PAGEUP:
                 incr = 600.0;
                 goto do_seek;
             case SDLK_PAGEDOWN:
                 incr = -600.0;
                 goto do_seek;
72ea344b
             case SDLK_LEFT:
                 incr = -10.0;
                 goto do_seek;
             case SDLK_RIGHT:
                 incr = 10.0;
                 goto do_seek;
             case SDLK_UP:
                 incr = 60.0;
                 goto do_seek;
             case SDLK_DOWN:
                 incr = -60.0;
             do_seek:
94b594c6
                     if (seek_by_bytes) {
da7c65f0
                         if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos >= 0) {
                             pos = cur_stream->video_current_pos;
                         } else if (cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos >= 0) {
                             pos = cur_stream->audio_pkt.pos;
                         } else
a2704c97
                             pos = avio_tell(cur_stream->ic->pb);
94b594c6
                         if (cur_stream->ic->bit_rate)
566cd2cb
                             incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
                         else
                             incr *= 180000.0;
                         pos += incr;
2ef46053
                         stream_seek(cur_stream, pos, incr, 1);
94b594c6
                     } else {
                         pos = get_master_clock(cur_stream);
                         pos += incr;
2ef46053
                         stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6
                     }
72ea344b
                 break;
01310af2
             default:
                 break;
             }
             break;
dbe7170e
         case SDL_VIDEOEXPOSE:
             cur_stream->force_refresh = 1;
             break;
a11d11aa
         case SDL_MOUSEBUTTONDOWN:
066ce8c9
             if (exit_on_mousedown) {
84506ebd
                 do_exit(cur_stream);
066ce8c9
                 break;
             }
d52ec002
         case SDL_MOUSEMOTION:
da7c65f0
             if (event.type == SDL_MOUSEBUTTONDOWN) {
                 x = event.button.x;
             } else {
                 if (event.motion.state != SDL_PRESSED)
d52ec002
                     break;
da7c65f0
                 x = event.motion.x;
bb270c08
             }
da7c65f0
                 if (seek_by_bytes || cur_stream->ic->duration <= 0) {
                     uint64_t size =  avio_size(cur_stream->ic->pb);
d52ec002
                     stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
da7c65f0
                 } else {
6371c81a
                     int64_t ts;
                     int ns, hh, mm, ss;
                     int tns, thh, tmm, tss;
da7c65f0
                     tns  = cur_stream->ic->duration / 1000000LL;
                     thh  = tns / 3600;
                     tmm  = (tns % 3600) / 60;
                     tss  = (tns % 60);
                     frac = x / cur_stream->width;
                     ns   = frac * tns;
                     hh   = ns / 3600;
                     mm   = (ns % 3600) / 60;
                     ss   = (ns % 60);
6371c81a
                     fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d)       \n", frac*100,
                             hh, mm, ss, thh, tmm, tss);
da7c65f0
                     ts = frac * cur_stream->ic->duration;
6371c81a
                     if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
                         ts += cur_stream->ic->start_time;
                     stream_seek(cur_stream, ts, 0, 0);
2ef46053
                 }
bb270c08
             break;
01310af2
         case SDL_VIDEORESIZE:
115329f1
                 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2
                                           SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
da7c65f0
                 screen_width  = cur_stream->width  = event.resize.w;
                 screen_height = cur_stream->height = event.resize.h;
dbe7170e
                 cur_stream->force_refresh = 1;
01310af2
             break;
         case SDL_QUIT:
638c9d91
         case FF_QUIT_EVENT:
84506ebd
             do_exit(cur_stream);
01310af2
             break;
         case FF_ALLOC_EVENT:
             alloc_picture(event.user.data1);
             break;
         case FF_REFRESH_EVENT:
4a22ea4d
             video_refresh(event.user.data1);
da7c65f0
             cur_stream->refresh = 0;
01310af2
             break;
         default:
             break;
         }
     }
 }
 
eb8bc572
 static int opt_frame_size(const char *opt, const char *arg)
e4b89522
 {
940a116b
     av_log(NULL, AV_LOG_WARNING, "Option -s is deprecated, use -video_size.\n");
     return opt_default("video_size", arg);
e4b89522
 }
 
a5b3b5f6
 static int opt_width(const char *opt, const char *arg)
01310af2
 {
a5b3b5f6
     screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
     return 0;
01310af2
 }
 
a5b3b5f6
 static int opt_height(const char *opt, const char *arg)
01310af2
 {
a5b3b5f6
     screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
     return 0;
01310af2
 }
 
eb8bc572
 static int opt_format(const char *opt, const char *arg)
01310af2
 {
     file_iformat = av_find_input_format(arg);
     if (!file_iformat) {
         fprintf(stderr, "Unknown input format: %s\n", arg);
eb8bc572
         return AVERROR(EINVAL);
01310af2
     }
eb8bc572
     return 0;
01310af2
 }
61890b02
 
eb8bc572
 static int opt_frame_pix_fmt(const char *opt, const char *arg)
e4b89522
 {
940a116b
     av_log(NULL, AV_LOG_WARNING, "Option -pix_fmt is deprecated, use -pixel_format.\n");
     return opt_default("pixel_format", arg);
e4b89522
 }
 
b81d6235
 static int opt_sync(const char *opt, const char *arg)
638c9d91
 {
     if (!strcmp(arg, "audio"))
         av_sync_type = AV_SYNC_AUDIO_MASTER;
     else if (!strcmp(arg, "video"))
         av_sync_type = AV_SYNC_VIDEO_MASTER;
     else if (!strcmp(arg, "ext"))
         av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5
     else {
b81d6235
         fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
         exit(1);
     }
b81d6235
     return 0;
638c9d91
 }
 
e11bc2c6
 static int opt_seek(const char *opt, const char *arg)
72ea344b
 {
e11bc2c6
     start_time = parse_time_or_die(opt, arg, 1);
     return 0;
72ea344b
 }
 
d834d63b
 static int opt_duration(const char *opt, const char *arg)
 {
     duration = parse_time_or_die(opt, arg, 1);
     return 0;
 }
 
f521746b
 static int opt_show_mode(const char *opt, const char *arg)
 {
     show_mode = !strcmp(arg, "video") ? SHOW_MODE_VIDEO :
                 !strcmp(arg, "waves") ? SHOW_MODE_WAVES :
                 !strcmp(arg, "rdft" ) ? SHOW_MODE_RDFT  :
                 parse_number_or_die(opt, arg, OPT_INT, 0, SHOW_MODE_NB-1);
     return 0;
 }
 
d2084402
 static void opt_input_file(void *optctx, const char *filename)
b4af3cf3
 {
     if (input_filename) {
         fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
                 filename, input_filename);
d2084402
         exit_program(1);
b4af3cf3
     }
     if (!strcmp(filename, "-"))
         filename = "pipe:";
     input_filename = filename;
 }
 
5eda0967
 static int opt_codec(void *o, const char *opt, const char *arg)
 {
     switch(opt[strlen(opt)-1]){
     case 'a' :    audio_codec_name = arg; break;
     case 's' : subtitle_codec_name = arg; break;
     case 'v' :    video_codec_name = arg; break;
     }
     return 0;
 }
 
d2084402
 static int dummy;
 
358061f6
 static const OptionDef options[] = {
992f8eae
 #include "cmdutils_common_opts.h"
da7c65f0
     { "x", HAS_ARG, { (void*)opt_width }, "force displayed width", "width" },
     { "y", HAS_ARG, { (void*)opt_height }, "force displayed height", "height" },
     { "s", HAS_ARG | OPT_VIDEO, { (void*)opt_frame_size }, "set frame size (WxH or abbreviation)", "size" },
     { "fs", OPT_BOOL, { (void*)&is_full_screen }, "force full screen" },
     { "an", OPT_BOOL, { (void*)&audio_disable }, "disable audio" },
     { "vn", OPT_BOOL, { (void*)&video_disable }, "disable video" },
     { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&wanted_stream[AVMEDIA_TYPE_AUDIO] }, "select desired audio stream", "stream_number" },
     { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&wanted_stream[AVMEDIA_TYPE_VIDEO] }, "select desired video stream", "stream_number" },
     { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE] }, "select desired subtitle stream", "stream_number" },
     { "ss", HAS_ARG, { (void*)&opt_seek }, "seek to a given position in seconds", "pos" },
     { "t", HAS_ARG, { (void*)&opt_duration }, "play  \"duration\" seconds of audio/video", "duration" },
     { "bytes", OPT_INT | HAS_ARG, { (void*)&seek_by_bytes }, "seek by bytes 0=off 1=on -1=auto", "val" },
     { "nodisp", OPT_BOOL, { (void*)&display_disable }, "disable graphical display" },
     { "f", HAS_ARG, { (void*)opt_format }, "force format", "fmt" },
     { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, { (void*)opt_frame_pix_fmt }, "set pixel format", "format" },
     { "stats", OPT_BOOL | OPT_EXPERT, { (void*)&show_status }, "show status", "" },
     { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&workaround_bugs }, "workaround bugs", "" },
     { "fast", OPT_BOOL | OPT_EXPERT, { (void*)&fast }, "non spec compliant optimizations", "" },
     { "genpts", OPT_BOOL | OPT_EXPERT, { (void*)&genpts }, "generate pts", "" },
     { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&decoder_reorder_pts }, "let decoder reorder pts 0=off 1=on -1=auto", ""},
70d54392
     { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&lowres }, "", "" },
da7c65f0
     { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&skip_loop_filter }, "", "" },
     { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&skip_frame }, "", "" },
     { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&skip_idct }, "", "" },
     { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&idct }, "set idct algo",  "algo" },
     { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&error_concealment }, "set error concealment options",  "bit_mask" },
     { "sync", HAS_ARG | OPT_EXPERT, { (void*)opt_sync }, "set audio-video sync. type (type=audio/video/ext)", "type" },
     { "autoexit", OPT_BOOL | OPT_EXPERT, { (void*)&autoexit }, "exit at the end", "" },
     { "exitonkeydown", OPT_BOOL | OPT_EXPERT, { (void*)&exit_on_keydown }, "exit on key down", "" },
     { "exitonmousedown", OPT_BOOL | OPT_EXPERT, { (void*)&exit_on_mousedown }, "exit on mouse down", "" },
     { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, { (void*)&loop }, "set number of times the playback shall be looped", "loop count" },
     { "framedrop", OPT_BOOL | OPT_EXPERT, { (void*)&framedrop }, "drop frames when cpu is too slow", "" },
     { "window_title", OPT_STRING | HAS_ARG, { (void*)&window_title }, "set window title", "window title" },
917d2bb3
 #if CONFIG_AVFILTER
da7c65f0
     { "vf", OPT_STRING | HAS_ARG, { (void*)&vfilters }, "video filters", "filter list" },
917d2bb3
 #endif
da7c65f0
     { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, { (void*)&rdftspeed }, "rdft speed", "msecs" },
eb8bc572
     { "showmode", HAS_ARG, {(void*)opt_show_mode}, "select show mode (0 = video, 1 = waves, 2 = RDFT)", "mode" },
da7c65f0
     { "default", HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, { (void*)opt_default }, "generic catch all option", "" },
d2084402
     { "i", OPT_BOOL, {(void *)&dummy}, "read specified file", "input_file"},
5eda0967
     { "codec", HAS_ARG | OPT_FUNC2, {(void*)opt_codec}, "force decoder", "decoder" },
01310af2
     { NULL, },
 };
 
0c2a18cb
 static void show_usage(void)
01310af2
 {
ceef1ee7
     av_log(NULL, AV_LOG_INFO, "Simple media player\n");
     av_log(NULL, AV_LOG_INFO, "usage: %s [options] input_file\n", program_name);
     av_log(NULL, AV_LOG_INFO, "\n");
0c2a18cb
 }
 
a0991833
 static int opt_help(const char *opt, const char *arg)
0c2a18cb
 {
f66eb58e
     av_log_set_callback(log_callback_help);
0c2a18cb
     show_usage();
02d504a7
     show_help_options(options, "Main options:\n",
                       OPT_EXPERT, 0);
     show_help_options(options, "\nAdvanced options:\n",
                       OPT_EXPERT, OPT_EXPERT);
f66eb58e
     printf("\n");
7a6cd995
     show_help_children(avcodec_get_class(), AV_OPT_FLAG_DECODING_PARAM);
     show_help_children(avformat_get_class(), AV_OPT_FLAG_DECODING_PARAM);
f66eb58e
 #if !CONFIG_AVFILTER
7a6cd995
     show_help_children(sws_get_class(), AV_OPT_FLAG_ENCODING_PARAM);
f66eb58e
 #endif
01310af2
     printf("\nWhile playing:\n"
            "q, ESC              quit\n"
            "f                   toggle full screen\n"
            "p, SPC              pause\n"
638c9d91
            "a                   cycle audio channel\n"
            "v                   cycle video channel\n"
72ce053b
            "t                   cycle subtitle channel\n"
638c9d91
            "w                   show audio waves\n"
79f8b328
            "s                   activate frame-step mode\n"
72ea344b
            "left/right          seek backward/forward 10 seconds\n"
            "down/up             seek backward/forward 1 minute\n"
91a3ea67
            "page down/page up   seek backward/forward 10 minutes\n"
a11d11aa
            "mouse click         seek to percentage in file corresponding to fraction of width\n"
01310af2
            );
a0991833
     return 0;
01310af2
 }
 
ee0ff051
 static int lockmgr(void **mtx, enum AVLockOp op)
 {
    switch(op) {
       case AV_LOCK_CREATE:
           *mtx = SDL_CreateMutex();
           if(!*mtx)
               return 1;
           return 0;
       case AV_LOCK_OBTAIN:
           return !!SDL_LockMutex(*mtx);
       case AV_LOCK_RELEASE:
           return !!SDL_UnlockMutex(*mtx);
       case AV_LOCK_DESTROY:
           SDL_DestroyMutex(*mtx);
           return 0;
    }
    return 1;
 }
 
01310af2
 /* Called from the main */
 int main(int argc, char **argv)
 {
a5c33faa
     int flags;
84506ebd
     VideoState *is;
115329f1
 
6b6bca64
     av_log_set_flags(AV_LOG_SKIP_REPEATED);
182cbe43
     parse_loglevel(argc, argv, options);
6b6bca64
 
01310af2
     /* register all codecs, demux and protocols */
c721d803
     avcodec_register_all();
9b157b0c
 #if CONFIG_AVDEVICE
c721d803
     avdevice_register_all();
9b157b0c
 #endif
917d2bb3
 #if CONFIG_AVFILTER
     avfilter_register_all();
 #endif
01310af2
     av_register_all();
776f2bb9
     avformat_network_init();
01310af2
 
a5c33faa
     init_opts();
e43d7a18
 
73f2cf4e
     signal(SIGINT , sigterm_handler); /* Interrupt (ANSI).    */
     signal(SIGTERM, sigterm_handler); /* Termination (ANSI).  */
 
452406bd
     show_banner(argc, argv, options);
4cfac5bc
 
7cc8d638
     parse_options(NULL, argc, argv, options, opt_input_file);
01310af2
 
aab1b7e5
     if (!input_filename) {
7f11e745
         show_usage();
7a7da6b4
         fprintf(stderr, "An input file must be specified\n");
266463da
         fprintf(stderr, "Use -h to get full help or, even better, run 'man %s'\n", program_name);
aab1b7e5
         exit(1);
     }
01310af2
 
     if (display_disable) {
         video_disable = 1;
     }
31319a8c
     flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
86824c1d
     if (audio_disable)
         flags &= ~SDL_INIT_AUDIO;
c97f5402
 #if !defined(__MINGW32__) && !defined(__APPLE__)
     flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c
 #endif
01310af2
     if (SDL_Init (flags)) {
05ab0b76
         fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
3743ea1f
         fprintf(stderr, "(Did you set the DISPLAY variable?)\n");
01310af2
         exit(1);
     }
 
     if (!display_disable) {
b250f9c6
 #if HAVE_SDL_VIDEO_SIZE
3ef17d62
         const SDL_VideoInfo *vi = SDL_GetVideoInfo();
         fs_screen_width = vi->current_w;
         fs_screen_height = vi->current_h;
29f3b38a
 #endif
01310af2
     }
 
     SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
     SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
     SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
 
ee0ff051
     if (av_lockmgr_register(lockmgr)) {
         fprintf(stderr, "Could not initialize lock manager!\n");
         do_exit(NULL);
     }
 
39c6a118
     av_init_packet(&flush_pkt);
fe9818d0
     flush_pkt.data = (char *)(intptr_t)"FLUSH";
39c6a118
 
84506ebd
     is = stream_open(input_filename, file_iformat);
     if (!is) {
         fprintf(stderr, "Failed to initialize VideoState!\n");
         do_exit(NULL);
     }
01310af2
 
84506ebd
     event_loop(is);
01310af2
 
     /* never returns */
 
     return 0;
 }