ffplay.c
01310af2
 /*
7b376b39
  * ffplay : Simple Media Player based on the FFmpeg libraries
01310af2
  * Copyright (c) 2003 Fabrice Bellard
  *
b78e7197
  * This file is part of FFmpeg.
  *
  * FFmpeg is free software; you can redistribute it and/or
01310af2
  * modify it under the terms of the GNU Lesser General Public
  * License as published by the Free Software Foundation; either
b78e7197
  * version 2.1 of the License, or (at your option) any later version.
01310af2
  *
b78e7197
  * FFmpeg is distributed in the hope that it will be useful,
01310af2
  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  * Lesser General Public License for more details.
  *
  * You should have received a copy of the GNU Lesser General Public
b78e7197
  * License along with FFmpeg; if not, write to the Free Software
5509bffa
  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2
  */
364a9607
 
ba11257e
 #include "config.h"
8a3ceaf4
 #include <inttypes.h>
0f4e8165
 #include <math.h>
 #include <limits.h>
245976da
 #include "libavutil/avstring.h"
2b4abbd6
 #include "libavutil/colorspace.h"
718c7b18
 #include "libavutil/pixdesc.h"
7ffe76e5
 #include "libavutil/imgutils.h"
e8454552
 #include "libavutil/dict.h"
7ffe76e5
 #include "libavutil/parseutils.h"
 #include "libavutil/samplefmt.h"
f6d71b39
 #include "libavutil/avassert.h"
245976da
 #include "libavformat/avformat.h"
 #include "libavdevice/avdevice.h"
 #include "libswscale/swscale.h"
5a4476e2
 #include "libavcodec/audioconvert.h"
e43d7a18
 #include "libavcodec/opt.h"
166621ab
 #include "libavcodec/avfft.h"
01310af2
 
917d2bb3
 #if CONFIG_AVFILTER
566666ca
 # include "libavfilter/avcodec.h"
917d2bb3
 # include "libavfilter/avfilter.h"
 # include "libavfilter/avfiltergraph.h"
44f669e7
 # include "libavfilter/vsink_buffer.h"
917d2bb3
 #endif
 
01310af2
 #include <SDL.h>
 #include <SDL_thread.h>
 
25c32d08
 #include "cmdutils.h"
31319a8c
 
d38c9e7a
 #include <unistd.h>
 #include <assert.h>
 
89b503b5
 const char program_name[] = "ffplay";
ea9c581f
 const int program_birth_year = 2003;
4cfac5bc
 
79ee4683
 #define MAX_QUEUE_SIZE (15 * 1024 * 1024)
 #define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
 #define MIN_FRAMES 5
01310af2
 
638c9d91
 /* SDL audio buffer size, in samples. Should be small to have precise
    A/V sync as SDL does not have hardware buffer fullness info. */
 #define SDL_AUDIO_BUFFER_SIZE 1024
 
 /* no AV sync correction is done if below the AV sync threshold */
7e0140cb
 #define AV_SYNC_THRESHOLD 0.01
638c9d91
 /* no AV correction is done if too big error */
 #define AV_NOSYNC_THRESHOLD 10.0
 
d38c9e7a
 #define FRAME_SKIP_FACTOR 0.05
 
638c9d91
 /* maximum audio speed change to get correct sync */
 #define SAMPLE_CORRECTION_PERCENT_MAX 10
 
 /* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
 #define AUDIO_DIFF_AVG_NB   20
 
01310af2
 /* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
 #define SAMPLE_ARRAY_SIZE (2*65536)
 
03ae87a3
 static int sws_flags = SWS_BICUBIC;
 
01310af2
 typedef struct PacketQueue {
     AVPacketList *first_pkt, *last_pkt;
     int nb_packets;
     int size;
     int abort_request;
     SDL_mutex *mutex;
     SDL_cond *cond;
 } PacketQueue;
 
562f382c
 #define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b
 #define SUBPICTURE_QUEUE_SIZE 4
01310af2
 
 typedef struct VideoPicture {
267e9dfa
     double pts;                                  ///<presentation time stamp for this picture
d38c9e7a
     double target_clock;                         ///<av_gettime() time at which this should be displayed ideally
1a620dd7
     int64_t pos;                                 ///<byte position in file
01310af2
     SDL_Overlay *bmp;
     int width, height; /* source height & width */
     int allocated;
917d2bb3
     enum PixelFormat pix_fmt;
 
 #if CONFIG_AVFILTER
ecc8dada
     AVFilterBufferRef *picref;
917d2bb3
 #endif
01310af2
 } VideoPicture;
 
72ce053b
 typedef struct SubPicture {
     double pts; /* presentation time stamp for this picture */
     AVSubtitle sub;
 } SubPicture;
 
01310af2
 enum {
     AV_SYNC_AUDIO_MASTER, /* default choice */
     AV_SYNC_VIDEO_MASTER,
638c9d91
     AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
 };
 
 typedef struct VideoState {
8adf9bb2
     SDL_Thread *read_tid;
01310af2
     SDL_Thread *video_tid;
d38c9e7a
     SDL_Thread *refresh_tid;
638c9d91
     AVInputFormat *iformat;
01310af2
     int no_background;
     int abort_request;
     int paused;
416e3508
     int last_paused;
72ea344b
     int seek_req;
3ba1438d
     int seek_flags;
72ea344b
     int64_t seek_pos;
4ed29207
     int64_t seek_rel;
f5668147
     int read_pause_return;
01310af2
     AVFormatContext *ic;
 
     int audio_stream;
115329f1
 
01310af2
     int av_sync_type;
638c9d91
     double external_clock; /* external clock base */
     int64_t external_clock_time;
115329f1
 
638c9d91
     double audio_clock;
     double audio_diff_cum; /* used for AV difference average computation */
     double audio_diff_avg_coef;
     double audio_diff_threshold;
     int audio_diff_avg_count;
01310af2
     AVStream *audio_st;
     PacketQueue audioq;
     int audio_hw_buf_size;
     /* samples output by the codec. we reserve more space for avsync
        compensation */
c6727809
     DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
     DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2
     uint8_t *audio_buf;
7fea94ce
     unsigned int audio_buf_size; /* in bytes */
01310af2
     int audio_buf_index; /* in bytes */
bea18375
     AVPacket audio_pkt_temp;
01310af2
     AVPacket audio_pkt;
5d6e4c16
     enum AVSampleFormat audio_src_fmt;
5a4476e2
     AVAudioConvert *reformat_ctx;
115329f1
 
54ad8e06
     enum ShowMode {
1d6c82d4
         SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB
f8b8c694
     } show_mode;
01310af2
     int16_t sample_array[SAMPLE_ARRAY_SIZE];
     int sample_array_index;
5e0257e3
     int last_i_start;
166621ab
     RDFTContext *rdft;
12eeda34
     int rdft_bits;
7dbbf6a1
     FFTSample *rdft_data;
12eeda34
     int xpos;
115329f1
 
72ce053b
     SDL_Thread *subtitle_tid;
     int subtitle_stream;
     int subtitle_stream_changed;
     AVStream *subtitle_st;
     PacketQueue subtitleq;
     SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
     int subpq_size, subpq_rindex, subpq_windex;
     SDL_mutex *subpq_mutex;
     SDL_cond *subpq_cond;
115329f1
 
638c9d91
     double frame_timer;
     double frame_last_pts;
     double frame_last_delay;
115329f1
     double video_clock;                          ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
     int video_stream;
     AVStream *video_st;
     PacketQueue videoq;
267e9dfa
     double video_current_pts;                    ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8
     double video_current_pts_drift;              ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7
     int64_t video_current_pos;                   ///<current displayed file pos
01310af2
     VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
     int pictq_size, pictq_rindex, pictq_windex;
     SDL_mutex *pictq_mutex;
     SDL_cond *pictq_cond;
917d2bb3
 #if !CONFIG_AVFILTER
3ac56e28
     struct SwsContext *img_convert_ctx;
917d2bb3
 #endif
115329f1
 
01310af2
     char filename[1024];
     int width, height, xleft, ytop;
41db429d
 
917d2bb3
 #if CONFIG_AVFILTER
     AVFilterContext *out_video_filter;          ///<the last filter in the video chain
 #endif
d38c9e7a
 
     float skip_frames;
     float skip_frames_index;
     int refresh;
01310af2
 } VideoState;
 
7b6b9be8
 static int opt_help(const char *opt, const char *arg);
01310af2
 
 /* options specified by the user */
 static AVInputFormat *file_iformat;
 static const char *input_filename;
076db5ed
 static const char *window_title;
01310af2
 static int fs_screen_width;
 static int fs_screen_height;
fccb19e3
 static int screen_width = 0;
 static int screen_height = 0;
e4b89522
 static int frame_width = 0;
 static int frame_height = 0;
 static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
 static int audio_disable;
 static int video_disable;
72415b2a
 static int wanted_stream[AVMEDIA_TYPE_NB]={
     [AVMEDIA_TYPE_AUDIO]=-1,
     [AVMEDIA_TYPE_VIDEO]=-1,
     [AVMEDIA_TYPE_SUBTITLE]=-1,
5b369983
 };
70a4764d
 static int seek_by_bytes=-1;
01310af2
 static int display_disable;
1e1a0b18
 static int show_status = 1;
638c9d91
 static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b
 static int64_t start_time = AV_NOPTS_VALUE;
d834d63b
 static int64_t duration = AV_NOPTS_VALUE;
bba04f1e
 static int step = 0;
c62c07d3
 static int thread_count = 1;
6387c3e6
 static int workaround_bugs = 1;
6fc5b059
 static int fast = 0;
30bc6613
 static int genpts = 0;
178fcca8
 static int lowres = 0;
 static int idct = FF_IDCT_AUTO;
8c3eba7c
 static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
 static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
 static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4
 static int error_recognition = FF_ER_CAREFUL;
1b51e051
 static int error_concealment = 3;
41db429d
 static int decoder_reorder_pts= -1;
2d1653b0
 static int autoexit;
066ce8c9
 static int exit_on_keydown;
 static int exit_on_mousedown;
1922c0a7
 static int loop=1;
6c0f5172
 static int framedrop=-1;
1d6c82d4
 static enum ShowMode show_mode = SHOW_MODE_NONE;
2b3da32f
 
 static int rdftspeed=20;
917d2bb3
 #if CONFIG_AVFILTER
 static char *vfilters = NULL;
 #endif
01310af2
 
 /* current context */
 static int is_full_screen;
 static VideoState *cur_stream;
5e0257e3
 static int64_t audio_callback_time;
01310af2
 
2c676c33
 static AVPacket flush_pkt;
39c6a118
 
01310af2
 #define FF_ALLOC_EVENT   (SDL_USEREVENT)
 #define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91
 #define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
01310af2
 
2c676c33
 static SDL_Surface *screen;
01310af2
 
eef16966
 static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
 {
     AVPacketList *pkt1;
 
     /* duplicate the packet */
     if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
         return -1;
 
     pkt1 = av_malloc(sizeof(AVPacketList));
     if (!pkt1)
         return -1;
     pkt1->pkt = *pkt;
     pkt1->next = NULL;
 
 
     SDL_LockMutex(q->mutex);
 
     if (!q->last_pkt)
 
         q->first_pkt = pkt1;
     else
         q->last_pkt->next = pkt1;
     q->last_pkt = pkt1;
     q->nb_packets++;
     q->size += pkt1->pkt.size + sizeof(*pkt1);
     /* XXX: should duplicate packet data in DV case */
     SDL_CondSignal(q->cond);
 
     SDL_UnlockMutex(q->mutex);
     return 0;
 }
515bd00e
 
01310af2
 /* packet queue handling */
 static void packet_queue_init(PacketQueue *q)
 {
     memset(q, 0, sizeof(PacketQueue));
     q->mutex = SDL_CreateMutex();
     q->cond = SDL_CreateCond();
515bd00e
     packet_queue_put(q, &flush_pkt);
01310af2
 }
 
72ea344b
 static void packet_queue_flush(PacketQueue *q)
01310af2
 {
     AVPacketList *pkt, *pkt1;
 
687fae2b
     SDL_LockMutex(q->mutex);
01310af2
     for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
         pkt1 = pkt->next;
         av_free_packet(&pkt->pkt);
da6c4573
         av_freep(&pkt);
01310af2
     }
72ea344b
     q->last_pkt = NULL;
     q->first_pkt = NULL;
     q->nb_packets = 0;
     q->size = 0;
687fae2b
     SDL_UnlockMutex(q->mutex);
72ea344b
 }
 
 static void packet_queue_end(PacketQueue *q)
 {
     packet_queue_flush(q);
01310af2
     SDL_DestroyMutex(q->mutex);
     SDL_DestroyCond(q->cond);
 }
 
 static void packet_queue_abort(PacketQueue *q)
 {
     SDL_LockMutex(q->mutex);
 
     q->abort_request = 1;
115329f1
 
01310af2
     SDL_CondSignal(q->cond);
 
     SDL_UnlockMutex(q->mutex);
 }
 
 /* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
 static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
 {
     AVPacketList *pkt1;
     int ret;
 
     SDL_LockMutex(q->mutex);
 
     for(;;) {
         if (q->abort_request) {
             ret = -1;
             break;
         }
115329f1
 
01310af2
         pkt1 = q->first_pkt;
         if (pkt1) {
             q->first_pkt = pkt1->next;
             if (!q->first_pkt)
                 q->last_pkt = NULL;
             q->nb_packets--;
7b776589
             q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
             *pkt = pkt1->pkt;
             av_free(pkt1);
             ret = 1;
             break;
         } else if (!block) {
             ret = 0;
             break;
         } else {
             SDL_CondWait(q->cond, q->mutex);
         }
     }
     SDL_UnlockMutex(q->mutex);
     return ret;
 }
 
115329f1
 static inline void fill_rectangle(SDL_Surface *screen,
01310af2
                                   int x, int y, int w, int h, int color)
 {
     SDL_Rect rect;
     rect.x = x;
     rect.y = y;
     rect.w = w;
     rect.h = h;
     SDL_FillRect(screen, &rect, color);
 }
 
72ce053b
 #define ALPHA_BLEND(a, oldp, newp, s)\
 ((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
 
 #define RGBA_IN(r, g, b, a, s)\
 {\
     unsigned int v = ((const uint32_t *)(s))[0];\
     a = (v >> 24) & 0xff;\
     r = (v >> 16) & 0xff;\
     g = (v >> 8) & 0xff;\
     b = v & 0xff;\
 }
 
 #define YUVA_IN(y, u, v, a, s, pal)\
 {\
57cf99f2
     unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
     a = (val >> 24) & 0xff;\
     y = (val >> 16) & 0xff;\
     u = (val >> 8) & 0xff;\
     v = val & 0xff;\
 }
 
 #define YUVA_OUT(d, y, u, v, a)\
 {\
     ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
 }
 
 
 #define BPP 1
 
0a8cd696
 static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
 {
     int wrap, wrap3, width2, skip2;
     int y, u, v, a, u1, v1, a1, w, h;
     uint8_t *lum, *cb, *cr;
     const uint8_t *p;
     const uint32_t *pal;
9cb5a11e
     int dstx, dsty, dstw, dsth;
 
7cf9c6ae
     dstw = av_clip(rect->w, 0, imgw);
     dsth = av_clip(rect->h, 0, imgh);
     dstx = av_clip(rect->x, 0, imgw - dstw);
     dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
     lum = dst->data[0] + dsty * dst->linesize[0];
     cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
     cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
 
f54b31b9
     width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e
     skip2 = dstx >> 1;
72ce053b
     wrap = dst->linesize[0];
25b4c651
     wrap3 = rect->pict.linesize[0];
     p = rect->pict.data[0];
     pal = (const uint32_t *)rect->pict.data[1];  /* Now in YCrCb! */
115329f1
 
9cb5a11e
     if (dsty & 1) {
         lum += dstx;
72ce053b
         cb += skip2;
         cr += skip2;
115329f1
 
9cb5a11e
         if (dstx & 1) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
             cb++;
             cr++;
             lum++;
             p += BPP;
         }
9cb5a11e
         for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += 2 * BPP;
             lum += 2;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
             p++;
             lum++;
72ce053b
         }
4606a059
         p += wrap3 - dstw * BPP;
         lum += wrap - dstw - dstx;
72ce053b
         cb += dst->linesize[1] - width2 - skip2;
         cr += dst->linesize[2] - width2 - skip2;
     }
9cb5a11e
     for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
         lum += dstx;
72ce053b
         cb += skip2;
         cr += skip2;
115329f1
 
9cb5a11e
         if (dstx & 1) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             p += wrap3;
             lum += wrap;
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += -wrap3 + BPP;
             lum += -wrap + 1;
         }
9cb5a11e
         for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
f8ca63e8
             YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
             p += wrap3;
             lum += wrap;
 
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
f8ca63e8
             YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
 
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
 
             cb++;
             cr++;
             p += -wrap3 + 2 * BPP;
             lum += -wrap + 2;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             p += wrap3;
             lum += wrap;
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += -wrap3 + BPP;
             lum += -wrap + 1;
         }
9cb5a11e
         p += wrap3 + (wrap3 - dstw * BPP);
         lum += wrap + (wrap - dstw - dstx);
72ce053b
         cb += dst->linesize[1] - width2 - skip2;
         cr += dst->linesize[2] - width2 - skip2;
     }
     /* handle odd height */
     if (h) {
9cb5a11e
         lum += dstx;
72ce053b
         cb += skip2;
         cr += skip2;
115329f1
 
9cb5a11e
         if (dstx & 1) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
             cb++;
             cr++;
             lum++;
             p += BPP;
         }
9cb5a11e
         for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
             YUVA_IN(y, u, v, a, p, pal);
             u1 = u;
             v1 = v;
             a1 = a;
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
             lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
             cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
             cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
             cb++;
             cr++;
             p += 2 * BPP;
             lum += 2;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
             lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
             cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
             cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
         }
     }
 }
 
 static void free_subpicture(SubPicture *sp)
 {
e1d7c883
     avsubtitle_free(&sp->sub);
72ce053b
 }
 
01310af2
 static void video_image_display(VideoState *is)
 {
     VideoPicture *vp;
72ce053b
     SubPicture *sp;
     AVPicture pict;
01310af2
     float aspect_ratio;
     int width, height, x, y;
     SDL_Rect rect;
72ce053b
     int i;
01310af2
 
     vp = &is->pictq[is->pictq_rindex];
     if (vp->bmp) {
917d2bb3
 #if CONFIG_AVFILTER
35fe66ab
          if (vp->picref->video->sample_aspect_ratio.num == 0)
917d2bb3
              aspect_ratio = 0;
          else
35fe66ab
              aspect_ratio = av_q2d(vp->picref->video->sample_aspect_ratio);
917d2bb3
 #else
 
01310af2
         /* XXX: use variable in the frame */
c30a4489
         if (is->video_st->sample_aspect_ratio.num)
             aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
         else if (is->video_st->codec->sample_aspect_ratio.num)
             aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b
         else
c30a4489
             aspect_ratio = 0;
917d2bb3
 #endif
01310af2
         if (aspect_ratio <= 0.0)
c30a4489
             aspect_ratio = 1.0;
917d2bb3
         aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
 
df149f6c
         if (is->subtitle_st) {
             if (is->subpq_size > 0) {
72ce053b
                 sp = &is->subpq[is->subpq_rindex];
 
df149f6c
                 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
72ce053b
                     SDL_LockYUVOverlay (vp->bmp);
 
                     pict.data[0] = vp->bmp->pixels[0];
                     pict.data[1] = vp->bmp->pixels[2];
                     pict.data[2] = vp->bmp->pixels[1];
 
                     pict.linesize[0] = vp->bmp->pitches[0];
                     pict.linesize[1] = vp->bmp->pitches[2];
                     pict.linesize[2] = vp->bmp->pitches[1];
 
                     for (i = 0; i < sp->sub.num_rects; i++)
db4fac64
                         blend_subrect(&pict, sp->sub.rects[i],
0a8cd696
                                       vp->bmp->w, vp->bmp->h);
72ce053b
 
                     SDL_UnlockYUVOverlay (vp->bmp);
                 }
             }
         }
 
 
01310af2
         /* XXX: we suppose the screen has a 1.0 pixel ratio */
         height = is->height;
bb6c34e5
         width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
         if (width > is->width) {
             width = is->width;
bb6c34e5
             height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
         }
         x = (is->width - width) / 2;
         y = (is->height - height) / 2;
6c6c976f
         is->no_background = 0;
01310af2
         rect.x = is->xleft + x;
2f6547fb
         rect.y = is->ytop  + y;
69f58958
         rect.w = FFMAX(width,  1);
         rect.h = FFMAX(height, 1);
01310af2
         SDL_DisplayYUVOverlay(vp->bmp, &rect);
     }
 }
 
8776f3d2
 /* get the current audio output buffer size, in samples. With SDL, we
    cannot have a precise information */
 static int audio_write_get_buf_size(VideoState *is)
 {
     return is->audio_buf_size - is->audio_buf_index;
 }
 
01310af2
 static inline int compute_mod(int a, int b)
 {
91b27e49
     return a < 0 ? a%b + b : a%b;
01310af2
 }
 
 static void video_audio_display(VideoState *s)
 {
     int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
     int ch, channels, h, h2, bgcolor, fgcolor;
     int16_t time_diff;
4c7c7645
     int rdft_bits, nb_freq;
 
     for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
         ;
     nb_freq= 1<<(rdft_bits-1);
115329f1
 
01310af2
     /* compute display index : center on currently output samples */
01f4895c
     channels = s->audio_st->codec->channels;
01310af2
     nb_display_channels = channels;
5e0257e3
     if (!s->paused) {
f8b8c694
         int data_used= s->show_mode == SHOW_MODE_WAVES ? s->width : (2*nb_freq);
5e0257e3
         n = 2 * channels;
         delay = audio_write_get_buf_size(s);
         delay /= n;
115329f1
 
5e0257e3
         /* to be more precise, we take into account the time spent since
            the last buffer computation */
         if (audio_callback_time) {
             time_diff = av_gettime() - audio_callback_time;
122dcdcb
             delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3
         }
115329f1
 
122dcdcb
         delay += 2*data_used;
4c7c7645
         if (delay < data_used)
             delay = data_used;
ac50bcc8
 
         i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
f8b8c694
         if (s->show_mode == SHOW_MODE_WAVES) {
6c7165c7
             h= INT_MIN;
             for(i=0; i<1000; i+=channels){
                 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
                 int a= s->sample_array[idx];
                 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
                 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
                 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
                 int score= a-d;
                 if(h<score && (b^c)<0){
                     h= score;
                     i_start= idx;
                 }
ac50bcc8
             }
         }
 
5e0257e3
         s->last_i_start = i_start;
     } else {
         i_start = s->last_i_start;
01310af2
     }
 
     bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
f8b8c694
     if (s->show_mode == SHOW_MODE_WAVES) {
6c7165c7
         fill_rectangle(screen,
                        s->xleft, s->ytop, s->width, s->height,
                        bgcolor);
 
         fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
 
         /* total height for one channel */
         h = s->height / nb_display_channels;
         /* graph height / 2 */
         h2 = (h * 9) / 20;
         for(ch = 0;ch < nb_display_channels; ch++) {
             i = i_start + ch;
             y1 = s->ytop + ch * h + (h / 2); /* position of center line */
             for(x = 0; x < s->width; x++) {
                 y = (s->sample_array[i] * h2) >> 15;
                 if (y < 0) {
                     y = -y;
                     ys = y1 - y;
                 } else {
                     ys = y1;
                 }
                 fill_rectangle(screen,
                                s->xleft + x, ys, 1, y,
                                fgcolor);
                 i += channels;
                 if (i >= SAMPLE_ARRAY_SIZE)
                     i -= SAMPLE_ARRAY_SIZE;
01310af2
             }
         }
 
6c7165c7
         fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2
 
6c7165c7
         for(ch = 1;ch < nb_display_channels; ch++) {
             y = s->ytop + ch * h;
             fill_rectangle(screen,
                            s->xleft, y, s->width, 1,
                            fgcolor);
         }
         SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34
     }else{
         nb_display_channels= FFMIN(nb_display_channels, 2);
         if(rdft_bits != s->rdft_bits){
166621ab
             av_rdft_end(s->rdft);
7dbbf6a1
             av_free(s->rdft_data);
166621ab
             s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34
             s->rdft_bits= rdft_bits;
7dbbf6a1
             s->rdft_data= av_malloc(4*nb_freq*sizeof(*s->rdft_data));
12eeda34
         }
         {
7dbbf6a1
             FFTSample *data[2];
12eeda34
             for(ch = 0;ch < nb_display_channels; ch++) {
7dbbf6a1
                 data[ch] = s->rdft_data + 2*nb_freq*ch;
12eeda34
                 i = i_start + ch;
                 for(x = 0; x < 2*nb_freq; x++) {
                     double w= (x-nb_freq)*(1.0/nb_freq);
                     data[ch][x]= s->sample_array[i]*(1.0-w*w);
                     i += channels;
                     if (i >= SAMPLE_ARRAY_SIZE)
                         i -= SAMPLE_ARRAY_SIZE;
                 }
166621ab
                 av_rdft_calc(s->rdft, data[ch]);
12eeda34
             }
             //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf
             for(y=0; y<s->height; y++){
12eeda34
                 double w= 1/sqrt(nb_freq);
                 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
00f72577
                 int b= (nb_display_channels == 2 ) ? sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0]
                        + data[1][2*y+1]*data[1][2*y+1])) : a;
12eeda34
                 a= FFMIN(a,255);
                 b= FFMIN(b,255);
                 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
 
                 fill_rectangle(screen,
                             s->xpos, s->height-y, 1, 1,
                             fgcolor);
             }
         }
         SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
         s->xpos++;
         if(s->xpos >= s->width)
             s->xpos= s->xleft;
     }
01310af2
 }
 
d5708923
 static void stream_close(VideoState *is)
 {
     VideoPicture *vp;
     int i;
     /* XXX: use a special url_shutdown call to abort parse cleanly */
     is->abort_request = 1;
     SDL_WaitThread(is->read_tid, NULL);
     SDL_WaitThread(is->refresh_tid, NULL);
 
     /* free all pictures */
     for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
         vp = &is->pictq[i];
 #if CONFIG_AVFILTER
         if (vp->picref) {
             avfilter_unref_buffer(vp->picref);
             vp->picref = NULL;
         }
 #endif
         if (vp->bmp) {
             SDL_FreeYUVOverlay(vp->bmp);
             vp->bmp = NULL;
         }
     }
     SDL_DestroyMutex(is->pictq_mutex);
     SDL_DestroyCond(is->pictq_cond);
     SDL_DestroyMutex(is->subpq_mutex);
     SDL_DestroyCond(is->subpq_cond);
 #if !CONFIG_AVFILTER
     if (is->img_convert_ctx)
         sws_freeContext(is->img_convert_ctx);
 #endif
     av_free(is);
 }
 
 static void do_exit(void)
 {
     if (cur_stream) {
         stream_close(cur_stream);
         cur_stream = NULL;
     }
     uninit_opts();
 #if CONFIG_AVFILTER
     avfilter_uninit();
 #endif
     if (show_status)
         printf("\n");
     SDL_Quit();
4a34e54b
     av_log(NULL, AV_LOG_QUIET, "%s", "");
d5708923
     exit(0);
 }
 
990c8438
 static int video_open(VideoState *is){
     int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
     int w,h;
 
fb84155b
     if(is_full_screen) flags |= SDL_FULLSCREEN;
     else               flags |= SDL_RESIZABLE;
 
990c8438
     if (is_full_screen && fs_screen_width) {
         w = fs_screen_width;
         h = fs_screen_height;
fb84155b
     } else if(!is_full_screen && screen_width){
         w = screen_width;
         h = screen_height;
917d2bb3
 #if CONFIG_AVFILTER
     }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
         w = is->out_video_filter->inputs[0]->w;
         h = is->out_video_filter->inputs[0]->h;
 #else
fb84155b
     }else if (is->video_st && is->video_st->codec->width){
         w = is->video_st->codec->width;
         h = is->video_st->codec->height;
917d2bb3
 #endif
990c8438
     } else {
fb84155b
         w = 640;
         h = 480;
990c8438
     }
d3d7b12e
     if(screen && is->width == screen->w && screen->w == w
        && is->height== screen->h && screen->h == h)
         return 0;
 
c97f5402
 #ifndef __APPLE__
990c8438
     screen = SDL_SetVideoMode(w, h, 0, flags);
 #else
     /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
     screen = SDL_SetVideoMode(w, h, 24, flags);
 #endif
     if (!screen) {
         fprintf(stderr, "SDL: could not set video mode - exiting\n");
d8ee7770
         do_exit();
990c8438
     }
076db5ed
     if (!window_title)
         window_title = input_filename;
     SDL_WM_SetCaption(window_title, window_title);
990c8438
 
     is->width = screen->w;
     is->height = screen->h;
 
     return 0;
 }
8c982c5d
 
01310af2
 /* display the current picture, if any */
 static void video_display(VideoState *is)
 {
8c982c5d
     if(!screen)
         video_open(cur_stream);
f8b8c694
     if (is->audio_st && is->show_mode != SHOW_MODE_VIDEO)
01310af2
         video_audio_display(is);
     else if (is->video_st)
         video_image_display(is);
 }
 
d38c9e7a
 static int refresh_thread(void *opaque)
01310af2
 {
d38c9e7a
     VideoState *is= opaque;
     while(!is->abort_request){
d881a0e8
         SDL_Event event;
         event.type = FF_REFRESH_EVENT;
         event.user.data1 = opaque;
d38c9e7a
         if(!is->refresh){
             is->refresh=1;
d881a0e8
             SDL_PushEvent(&event);
d38c9e7a
         }
f8b8c694
         //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
         usleep(is->audio_st && is->show_mode != SHOW_MODE_VIDEO ? rdftspeed*1000 : 5000);
d38c9e7a
     }
     return 0;
01310af2
 }
 
638c9d91
 /* get the current audio clock value */
 static double get_audio_clock(VideoState *is)
 {
     double pts;
     int hw_buf_size, bytes_per_sec;
     pts = is->audio_clock;
     hw_buf_size = audio_write_get_buf_size(is);
     bytes_per_sec = 0;
     if (is->audio_st) {
115329f1
         bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c
             2 * is->audio_st->codec->channels;
638c9d91
     }
     if (bytes_per_sec)
         pts -= (double)hw_buf_size / bytes_per_sec;
     return pts;
 }
 
 /* get the current video clock value */
 static double get_video_clock(VideoState *is)
 {
04108619
     if (is->paused) {
41a4cd0c
         return is->video_current_pts;
72ea344b
     } else {
68aefbe8
         return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b
     }
638c9d91
 }
 
 /* get the current external clock value */
 static double get_external_clock(VideoState *is)
 {
     int64_t ti;
     ti = av_gettime();
     return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
 }
 
 /* get the current master clock value */
 static double get_master_clock(VideoState *is)
 {
     double val;
 
72ea344b
     if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
         if (is->video_st)
             val = get_video_clock(is);
         else
             val = get_audio_clock(is);
     } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
         if (is->audio_st)
             val = get_audio_clock(is);
         else
             val = get_video_clock(is);
     } else {
638c9d91
         val = get_external_clock(is);
72ea344b
     }
638c9d91
     return val;
 }
 
72ea344b
 /* seek in the stream */
2ef46053
 static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b
 {
687fae2b
     if (!is->seek_req) {
         is->seek_pos = pos;
4ed29207
         is->seek_rel = rel;
3890dd3a
         is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
         if (seek_by_bytes)
             is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
         is->seek_req = 1;
     }
72ea344b
 }
 
 /* pause or resume the video */
ab7fdbab
 static void stream_toggle_pause(VideoState *is)
72ea344b
 {
68aefbe8
     if (is->paused) {
         is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147
         if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8
             is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147
         }
68aefbe8
         is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b
     }
68aefbe8
     is->paused = !is->paused;
72ea344b
 }
 
d38c9e7a
 static double compute_target_time(double frame_current_pts, VideoState *is)
49410784
 {
d38c9e7a
     double delay, sync_threshold, diff;
49410784
 
     /* compute nominal delay */
     delay = frame_current_pts - is->frame_last_pts;
     if (delay <= 0 || delay >= 10.0) {
         /* if incorrect delay, use previous one */
         delay = is->frame_last_delay;
443658fd
     } else {
712de377
         is->frame_last_delay = delay;
443658fd
     }
49410784
     is->frame_last_pts = frame_current_pts;
 
     /* update delay to follow master synchronisation source */
     if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
          is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
         /* if video is slave, we try to correct big delays by
            duplicating or deleting a frame */
f04c6e35
         diff = get_video_clock(is) - get_master_clock(is);
49410784
 
         /* skip or repeat frame. We take into account the
            delay to compute the threshold. I still don't know
            if it is the best guess */
         sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
         if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
             if (diff <= -sync_threshold)
                 delay = 0;
             else if (diff >= sync_threshold)
                 delay = 2 * delay;
         }
     }
     is->frame_timer += delay;
8543f0f9
 
     av_dlog(NULL, "video: delay=%0.3f pts=%0.3f A-V=%f\n",
             delay, frame_current_pts, -diff);
eecc17a7
 
d38c9e7a
     return is->frame_timer;
49410784
 }
 
01310af2
 /* called to display each frame */
4a22ea4d
 static void video_refresh(void *opaque)
01310af2
 {
     VideoState *is = opaque;
     VideoPicture *vp;
638c9d91
 
72ce053b
     SubPicture *sp, *sp2;
01310af2
 
     if (is->video_st) {
d38c9e7a
 retry:
01310af2
         if (is->pictq_size == 0) {
d38c9e7a
             //nothing to do, no picture to display in the que
01310af2
         } else {
d38c9e7a
             double time= av_gettime()/1000000.0;
             double next_target;
638c9d91
             /* dequeue the picture */
01310af2
             vp = &is->pictq[is->pictq_rindex];
638c9d91
 
d38c9e7a
             if(time < vp->target_clock)
                 return;
638c9d91
             /* update current video pts */
             is->video_current_pts = vp->pts;
d38c9e7a
             is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160
             is->video_current_pos = vp->pos;
d38c9e7a
             if(is->pictq_size > 1){
                 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
                 assert(nextvp->target_clock >= vp->target_clock);
                 next_target= nextvp->target_clock;
             }else{
                 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
             }
6c0f5172
             if((framedrop>0 || (framedrop && is->audio_st)) && time > next_target){
d38c9e7a
                 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
                 if(is->pictq_size > 1 || time > next_target + 0.5){
                     /* update queue size and signal for next picture */
                     if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
                         is->pictq_rindex = 0;
 
                     SDL_LockMutex(is->pictq_mutex);
                     is->pictq_size--;
                     SDL_CondSignal(is->pictq_cond);
                     SDL_UnlockMutex(is->pictq_mutex);
                     goto retry;
                 }
             }
638c9d91
 
72ce053b
             if(is->subtitle_st) {
                 if (is->subtitle_stream_changed) {
                     SDL_LockMutex(is->subpq_mutex);
115329f1
 
72ce053b
                     while (is->subpq_size) {
                         free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1
 
72ce053b
                         /* update queue size and signal for next picture */
                         if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
                             is->subpq_rindex = 0;
115329f1
 
72ce053b
                         is->subpq_size--;
                     }
                     is->subtitle_stream_changed = 0;
 
                     SDL_CondSignal(is->subpq_cond);
                     SDL_UnlockMutex(is->subpq_mutex);
                 } else {
                     if (is->subpq_size > 0) {
                         sp = &is->subpq[is->subpq_rindex];
 
                         if (is->subpq_size > 1)
                             sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
                         else
                             sp2 = NULL;
 
                         if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
                                 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
                         {
                             free_subpicture(sp);
 
                             /* update queue size and signal for next picture */
                             if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
                                 is->subpq_rindex = 0;
 
                             SDL_LockMutex(is->subpq_mutex);
                             is->subpq_size--;
                             SDL_CondSignal(is->subpq_cond);
                             SDL_UnlockMutex(is->subpq_mutex);
                         }
                     }
                 }
             }
 
01310af2
             /* display picture */
112c4b87
             if (!display_disable)
24d13ebc
                 video_display(is);
115329f1
 
01310af2
             /* update queue size and signal for next picture */
             if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
                 is->pictq_rindex = 0;
115329f1
 
01310af2
             SDL_LockMutex(is->pictq_mutex);
             is->pictq_size--;
             SDL_CondSignal(is->pictq_cond);
             SDL_UnlockMutex(is->pictq_mutex);
         }
     } else if (is->audio_st) {
         /* draw the next audio frame */
 
         /* if only audio stream, then display the audio bars (better
            than nothing, just to test the implementation */
115329f1
 
01310af2
         /* display picture */
112c4b87
         if (!display_disable)
24d13ebc
             video_display(is);
01310af2
     }
     if (show_status) {
         static int64_t last_time;
         int64_t cur_time;
72ce053b
         int aqsize, vqsize, sqsize;
638c9d91
         double av_diff;
115329f1
 
01310af2
         cur_time = av_gettime();
1e1a0b18
         if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
             aqsize = 0;
             vqsize = 0;
72ce053b
             sqsize = 0;
01310af2
             if (is->audio_st)
                 aqsize = is->audioq.size;
             if (is->video_st)
                 vqsize = is->videoq.size;
72ce053b
             if (is->subtitle_st)
                 sqsize = is->subtitleq.size;
638c9d91
             av_diff = 0;
             if (is->audio_st && is->video_st)
                 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b
             printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64"   \r",
d6705a27
                    get_master_clock(is),
                    av_diff,
                    FFMAX(is->skip_frames-1, 0),
                    aqsize / 1024,
                    vqsize / 1024,
                    sqsize,
                    is->video_st ? is->video_st->codec->pts_correction_num_faulty_dts : 0,
                    is->video_st ? is->video_st->codec->pts_correction_num_faulty_pts : 0);
01310af2
             fflush(stdout);
             last_time = cur_time;
         }
     }
 }
 
 /* allocate a picture (needs to do that in main thread to avoid
    potential locking problems */
 static void alloc_picture(void *opaque)
 {
     VideoState *is = opaque;
     VideoPicture *vp;
 
     vp = &is->pictq[is->pictq_windex];
 
     if (vp->bmp)
         SDL_FreeYUVOverlay(vp->bmp);
 
917d2bb3
 #if CONFIG_AVFILTER
     if (vp->picref)
7fce481a
         avfilter_unref_buffer(vp->picref);
917d2bb3
     vp->picref = NULL;
 
     vp->width   = is->out_video_filter->inputs[0]->w;
     vp->height  = is->out_video_filter->inputs[0]->h;
     vp->pix_fmt = is->out_video_filter->inputs[0]->format;
 #else
     vp->width   = is->video_st->codec->width;
     vp->height  = is->video_st->codec->height;
     vp->pix_fmt = is->video_st->codec->pix_fmt;
 #endif
 
     vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1
                                    SDL_YV12_OVERLAY,
61890b02
                                    screen);
cb036f90
     if (!vp->bmp || vp->bmp->pitches[0] < vp->width) {
         /* SDL allocates a buffer smaller than requested if the video
          * overlay hardware is unable to support the requested size. */
         fprintf(stderr, "Error: the video system does not support an image\n"
aa78a6d6
                         "size of %dx%d pixels. Try using -lowres or -vf \"scale=w:h\"\n"
cb036f90
                         "to reduce the image size.\n", vp->width, vp->height );
         do_exit();
     }
01310af2
 
     SDL_LockMutex(is->pictq_mutex);
     vp->allocated = 1;
     SDL_CondSignal(is->pictq_cond);
     SDL_UnlockMutex(is->pictq_mutex);
 }
 
c2606259
 static int queue_picture(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
01310af2
 {
     VideoPicture *vp;
c2606259
     double frame_delay, pts = pts1;
 
     /* compute the exact PTS for the picture if it is omitted in the stream
      * pts1 is the dts of the pkt / pts of the frame */
     if (pts != 0) {
         /* update video clock with pts, if present */
         is->video_clock = pts;
     } else {
         pts = is->video_clock;
     }
     /* update video clock for next frame */
     frame_delay = av_q2d(is->video_st->codec->time_base);
     /* for MPEG2, the frame can be repeated, so we update the
        clock accordingly */
     frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
     is->video_clock += frame_delay;
 
 #if defined(DEBUG_SYNC) && 0
     printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
301183d9
            av_get_picture_type_char(src_frame->pict_type), pts, pts1);
c2606259
 #endif
a6f395d6
 
01310af2
     /* wait until we have space to put a new picture */
     SDL_LockMutex(is->pictq_mutex);
d38c9e7a
 
     if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
         is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
 
01310af2
     while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
            !is->videoq.abort_request) {
         SDL_CondWait(is->pictq_cond, is->pictq_mutex);
     }
     SDL_UnlockMutex(is->pictq_mutex);
115329f1
 
01310af2
     if (is->videoq.abort_request)
         return -1;
 
     vp = &is->pictq[is->pictq_windex];
 
     /* alloc or resize hardware picture buffer */
115329f1
     if (!vp->bmp ||
917d2bb3
 #if CONFIG_AVFILTER
         vp->width  != is->out_video_filter->inputs[0]->w ||
         vp->height != is->out_video_filter->inputs[0]->h) {
 #else
01f4895c
         vp->width != is->video_st->codec->width ||
         vp->height != is->video_st->codec->height) {
917d2bb3
 #endif
01310af2
         SDL_Event event;
 
         vp->allocated = 0;
 
         /* the allocation must be done in the main thread to avoid
            locking problems */
         event.type = FF_ALLOC_EVENT;
         event.user.data1 = is;
         SDL_PushEvent(&event);
115329f1
 
01310af2
         /* wait until the picture is allocated */
         SDL_LockMutex(is->pictq_mutex);
         while (!vp->allocated && !is->videoq.abort_request) {
             SDL_CondWait(is->pictq_cond, is->pictq_mutex);
         }
         SDL_UnlockMutex(is->pictq_mutex);
 
         if (is->videoq.abort_request)
             return -1;
     }
 
638c9d91
     /* if the frame is not skipped, then display it */
01310af2
     if (vp->bmp) {
fbf1b885
         AVPicture pict;
917d2bb3
 #if CONFIG_AVFILTER
         if(vp->picref)
7fce481a
             avfilter_unref_buffer(vp->picref);
917d2bb3
         vp->picref = src_frame->opaque;
 #endif
fbf1b885
 
01310af2
         /* get a pointer on the bitmap */
         SDL_LockYUVOverlay (vp->bmp);
 
fbf1b885
         memset(&pict,0,sizeof(AVPicture));
01310af2
         pict.data[0] = vp->bmp->pixels[0];
         pict.data[1] = vp->bmp->pixels[2];
         pict.data[2] = vp->bmp->pixels[1];
 
         pict.linesize[0] = vp->bmp->pitches[0];
         pict.linesize[1] = vp->bmp->pitches[2];
         pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
 
 #if CONFIG_AVFILTER
         //FIXME use direct rendering
a6f395d6
         av_picture_copy(&pict, (AVPicture *)src_frame,
917d2bb3
                         vp->pix_fmt, vp->width, vp->height);
 #else
e43d7a18
         sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28
         is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3
             vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
05d33d86
             PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
3ac56e28
         if (is->img_convert_ctx == NULL) {
26ba8235
             fprintf(stderr, "Cannot initialize the conversion context\n");
             exit(1);
         }
3ac56e28
         sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
                   0, vp->height, pict.data, pict.linesize);
 #endif
01310af2
         /* update the bitmap content */
         SDL_UnlockYUVOverlay(vp->bmp);
 
638c9d91
         vp->pts = pts;
1a620dd7
         vp->pos = pos;
01310af2
 
         /* now we can update the picture count */
         if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
             is->pictq_windex = 0;
         SDL_LockMutex(is->pictq_mutex);
d38c9e7a
         vp->target_clock= compute_target_time(vp->pts, is);
 
01310af2
         is->pictq_size++;
         SDL_UnlockMutex(is->pictq_mutex);
     }
638c9d91
     return 0;
 }
 
3966a574
 static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2
 {
adba9c63
     int len1 av_unused, got_picture, i;
01310af2
 
199c18a7
     if (packet_queue_get(&is->videoq, pkt, 1) < 0)
         return -1;
6c7d3ead
 
199c18a7
     if (pkt->data == flush_pkt.data) {
         avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
 
199c18a7
         SDL_LockMutex(is->pictq_mutex);
         //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
         for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) {
             is->pictq[i].target_clock= 0;
         }
         while (is->pictq_size && !is->videoq.abort_request) {
             SDL_CondWait(is->pictq_cond, is->pictq_mutex);
39c6a118
         }
199c18a7
         is->video_current_pos = -1;
         SDL_UnlockMutex(is->pictq_mutex);
39c6a118
 
199c18a7
         is->frame_last_pts = AV_NOPTS_VALUE;
         is->frame_last_delay = 0;
         is->frame_timer = (double)av_gettime() / 1000000.0;
         is->skip_frames = 1;
         is->skip_frames_index = 0;
         return 0;
     }
7a8bfa5d
 
199c18a7
     len1 = avcodec_decode_video2(is->video_st->codec,
                                  frame, &got_picture,
                                  pkt);
 
     if (got_picture) {
         if (decoder_reorder_pts == -1) {
76ad67ca
             *pts = frame->best_effort_timestamp;
199c18a7
         } else if (decoder_reorder_pts) {
2fa1d7b3
             *pts = frame->pkt_pts;
199c18a7
         } else {
fd0ae17a
             *pts = frame->pkt_dts;
199c18a7
         }
 
         if (*pts == AV_NOPTS_VALUE) {
             *pts = 0;
99e0b12b
         }
41db429d
 
d38c9e7a
         is->skip_frames_index += 1;
         if(is->skip_frames_index >= is->skip_frames){
             is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
             return 1;
         }
 
     }
917d2bb3
     return 0;
 }
 
 #if CONFIG_AVFILTER
 typedef struct {
     VideoState *is;
     AVFrame *frame;
dd0c789b
     int use_dr1;
917d2bb3
 } FilterPriv;
 
dd0c789b
 static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
 {
     AVFilterContext *ctx = codec->opaque;
ecc8dada
     AVFilterBufferRef  *ref;
dd0c789b
     int perms = AV_PERM_WRITE;
65929418
     int i, w, h, stride[4];
dd0c789b
     unsigned edge;
dc172ecc
     int pixel_size;
dd0c789b
 
f6d71b39
     av_assert0(codec->flags & CODEC_FLAG_EMU_EDGE);
 
0ccabeea
     if (codec->codec->capabilities & CODEC_CAP_NEG_LINESIZES)
         perms |= AV_PERM_NEG_LINESIZES;
 
dd0c789b
     if(pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
         if(pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
         if(pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
         if(pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
     }
     if(pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
 
     w = codec->width;
     h = codec->height;
9f8008a9
 
     if(av_image_check_size(w, h, 0, codec))
         return -1;
 
dd0c789b
     avcodec_align_dimensions2(codec, &w, &h, stride);
     edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
     w += edge << 1;
     h += edge << 1;
 
     if(!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
         return -1;
 
dc172ecc
     pixel_size = av_pix_fmt_descriptors[ref->format].comp[0].step_minus1+1;
cc80caff
     ref->video->w = codec->width;
     ref->video->h = codec->height;
cfb7e6e6
     for(i = 0; i < 4; i ++) {
d54e0948
         unsigned hshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_w : 0;
         unsigned vshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_h : 0;
dd0c789b
 
3635c07b
         if (ref->data[i]) {
dc172ecc
             ref->data[i]    += ((edge * pixel_size) >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b
         }
dd0c789b
         pic->data[i]     = ref->data[i];
         pic->linesize[i] = ref->linesize[i];
     }
     pic->opaque = ref;
     pic->age    = INT_MAX;
     pic->type   = FF_BUFFER_TYPE_USER;
867ab7fb
     pic->reordered_opaque = codec->reordered_opaque;
393cbb96
     if(codec->pkt) pic->pkt_pts = codec->pkt->pts;
     else           pic->pkt_pts = AV_NOPTS_VALUE;
dd0c789b
     return 0;
 }
 
 static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
 {
     memset(pic->data, 0, sizeof(pic->data));
7fce481a
     avfilter_unref_buffer(pic->opaque);
dd0c789b
 }
 
12bd3c1f
 static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
 {
ecc8dada
     AVFilterBufferRef *ref = pic->opaque;
12bd3c1f
 
     if (pic->data[0] == NULL) {
         pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
         return codec->get_buffer(codec, pic);
     }
 
cc80caff
     if ((codec->width != ref->video->w) || (codec->height != ref->video->h) ||
d54e0948
         (codec->pix_fmt != ref->format)) {
12bd3c1f
         av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
         return -1;
     }
 
     pic->reordered_opaque = codec->reordered_opaque;
393cbb96
     if(codec->pkt) pic->pkt_pts = codec->pkt->pts;
     else           pic->pkt_pts = AV_NOPTS_VALUE;
12bd3c1f
     return 0;
 }
 
917d2bb3
 static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
 {
     FilterPriv *priv = ctx->priv;
dd0c789b
     AVCodecContext *codec;
917d2bb3
     if(!opaque) return -1;
 
     priv->is = opaque;
dd0c789b
     codec    = priv->is->video_st->codec;
     codec->opaque = ctx;
454fab72
     if((codec->codec->capabilities & CODEC_CAP_DR1)
     ) {
175714c0
         av_assert0(codec->flags & CODEC_FLAG_EMU_EDGE);
dd0c789b
         priv->use_dr1 = 1;
         codec->get_buffer     = input_get_buffer;
         codec->release_buffer = input_release_buffer;
12bd3c1f
         codec->reget_buffer   = input_reget_buffer;
b38f008e
         codec->thread_safe_callbacks = 1;
dd0c789b
     }
 
917d2bb3
     priv->frame = avcodec_alloc_frame();
 
     return 0;
 }
 
 static void input_uninit(AVFilterContext *ctx)
 {
     FilterPriv *priv = ctx->priv;
     av_free(priv->frame);
 }
 
 static int input_request_frame(AVFilterLink *link)
 {
     FilterPriv *priv = link->src->priv;
ecc8dada
     AVFilterBufferRef *picref;
3966a574
     int64_t pts = 0;
917d2bb3
     AVPacket pkt;
     int ret;
 
     while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
         av_free_packet(&pkt);
     if (ret < 0)
         return -1;
 
3a1aaf7b
     if(priv->use_dr1 && priv->frame->opaque) {
7fce481a
         picref = avfilter_ref_buffer(priv->frame->opaque, ~0);
dd0c789b
     } else {
cf097cbc
         picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
34017fd9
         av_image_copy(picref->data, picref->linesize,
4afbcf46
                       priv->frame->data, priv->frame->linesize,
                       picref->format, link->w, link->h);
dd0c789b
     }
917d2bb3
     av_free_packet(&pkt);
 
566666ca
     avfilter_copy_frame_props(picref, priv->frame);
917d2bb3
     picref->pts = pts;
566666ca
 
c41c5b02
     avfilter_start_frame(link, picref);
917d2bb3
     avfilter_draw_slice(link, 0, link->h, 1);
     avfilter_end_frame(link);
 
     return 0;
 }
 
 static int input_query_formats(AVFilterContext *ctx)
 {
     FilterPriv *priv = ctx->priv;
     enum PixelFormat pix_fmts[] = {
         priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
     };
 
fd2c0a5d
     avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(pix_fmts));
917d2bb3
     return 0;
 }
 
 static int input_config_props(AVFilterLink *link)
 {
     FilterPriv *priv  = link->src->priv;
     AVCodecContext *c = priv->is->video_st->codec;
 
     link->w = c->width;
     link->h = c->height;
387b4ac9
     link->time_base = priv->is->video_st->time_base;
917d2bb3
 
     return 0;
 }
 
 static AVFilter input_filter =
 {
     .name      = "ffplay_input",
 
     .priv_size = sizeof(FilterPriv),
 
     .init      = input_init,
     .uninit    = input_uninit,
 
     .query_formats = input_query_formats,
 
     .inputs    = (AVFilterPad[]) {{ .name = NULL }},
     .outputs   = (AVFilterPad[]) {{ .name = "default",
72415b2a
                                     .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
                                     .request_frame = input_request_frame,
                                     .config_props  = input_config_props, },
                                   { .name = NULL }},
 };
 
8904a0f1
 static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
917d2bb3
 {
3f073fa2
     char sws_flags_str[128];
8904a0f1
     int ret;
44f669e7
     enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
917d2bb3
     AVFilterContext *filt_src = NULL, *filt_out = NULL;
3f073fa2
     snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
     graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3
 
8904a0f1
     if ((ret = avfilter_graph_create_filter(&filt_src, &input_filter, "src",
                                             NULL, is, graph)) < 0)
037be76e
         goto the_end;
44f669e7
     if ((ret = avfilter_graph_create_filter(&filt_out, avfilter_get_by_name("buffersink"), "out",
                                             NULL, pix_fmts, graph)) < 0)
037be76e
         goto the_end;
917d2bb3
 
     if(vfilters) {
c5354942
         AVFilterInOut *outputs = avfilter_inout_alloc();
         AVFilterInOut *inputs  = avfilter_inout_alloc();
917d2bb3
 
         outputs->name    = av_strdup("in");
7313132b
         outputs->filter_ctx = filt_src;
917d2bb3
         outputs->pad_idx = 0;
         outputs->next    = NULL;
 
         inputs->name    = av_strdup("out");
7313132b
         inputs->filter_ctx = filt_out;
917d2bb3
         inputs->pad_idx = 0;
         inputs->next    = NULL;
 
6119b23a
         if ((ret = avfilter_graph_parse(graph, vfilters, &inputs, &outputs, NULL)) < 0)
917d2bb3
             goto the_end;
         av_freep(&vfilters);
     } else {
8904a0f1
         if ((ret = avfilter_link(filt_src, 0, filt_out, 0)) < 0)
             goto the_end;
917d2bb3
     }
 
8904a0f1
     if ((ret = avfilter_graph_config(graph, NULL)) < 0)
2a24df93
         goto the_end;
917d2bb3
 
     is->out_video_filter = filt_out;
8904a0f1
 the_end:
     return ret;
 }
 
 #endif  /* CONFIG_AVFILTER */
 
 static int video_thread(void *arg)
 {
     VideoState *is = arg;
     AVFrame *frame= avcodec_alloc_frame();
b4434475
     int64_t pts_int = AV_NOPTS_VALUE, pos = -1;
8904a0f1
     double pts;
     int ret;
 
 #if CONFIG_AVFILTER
     AVFilterGraph *graph = avfilter_graph_alloc();
     AVFilterContext *filt_out = NULL;
 
     if ((ret = configure_video_filters(graph, is, vfilters)) < 0)
         goto the_end;
     filt_out = is->out_video_filter;
917d2bb3
 #endif
 
     for(;;) {
 #if !CONFIG_AVFILTER
         AVPacket pkt;
387b4ac9
 #else
ff0652e5
         AVFilterBufferRef *picref;
44f669e7
         AVRational tb = filt_out->inputs[0]->time_base;
917d2bb3
 #endif
         while (is->paused && !is->videoq.abort_request)
             SDL_Delay(10);
 #if CONFIG_AVFILTER
44f669e7
         ret = av_vsink_buffer_get_video_buffer_ref(filt_out, &picref, 0);
ff0652e5
         if (picref) {
44f669e7
             avfilter_fill_frame_from_video_buffer_ref(frame, picref);
ff0652e5
             pts_int = picref->pts;
             pos     = picref->pos;
             frame->opaque = picref;
         }
387b4ac9
 
         if (av_cmp_q(tb, is->video_st->time_base)) {
97b925ea
             av_unused int64_t pts1 = pts_int;
387b4ac9
             pts_int = av_rescale_q(pts_int, tb, is->video_st->time_base);
97b925ea
             av_dlog(NULL, "video_thread(): "
                     "tb:%d/%d pts:%"PRId64" -> tb:%d/%d pts:%"PRId64"\n",
                     tb.num, tb.den, pts1,
                     is->video_st->time_base.num, is->video_st->time_base.den, pts_int);
387b4ac9
         }
917d2bb3
 #else
         ret = get_video_frame(is, frame, &pts_int, &pkt);
539647c6
         pos = pkt.pos;
b93e12fd
         av_free_packet(&pkt);
917d2bb3
 #endif
 
         if (ret < 0) goto the_end;
 
87757508
 #if CONFIG_AVFILTER
44f669e7
         if (!picref)
917d2bb3
             continue;
87757508
 #endif
917d2bb3
 
3966a574
         pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
 
c2606259
         ret = queue_picture(is, frame, pts, pos);
b93e12fd
 
917d2bb3
         if (ret < 0)
             goto the_end;
 
115329f1
         if (step)
bba04f1e
             if (cur_stream)
ab7fdbab
                 stream_toggle_pause(cur_stream);
01310af2
     }
  the_end:
917d2bb3
 #if CONFIG_AVFILTER
ab543afe
     avfilter_graph_free(&graph);
917d2bb3
 #endif
c6b1edc9
     av_free(frame);
01310af2
     return 0;
 }
 
72ce053b
 static int subtitle_thread(void *arg)
 {
     VideoState *is = arg;
     SubPicture *sp;
     AVPacket pkt1, *pkt = &pkt1;
adba9c63
     int len1 av_unused, got_subtitle;
72ce053b
     double pts;
     int i, j;
     int r, g, b, y, u, v, a;
 
     for(;;) {
         while (is->paused && !is->subtitleq.abort_request) {
             SDL_Delay(10);
         }
         if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
             break;
115329f1
 
39c6a118
         if(pkt->data == flush_pkt.data){
             avcodec_flush_buffers(is->subtitle_st->codec);
             continue;
         }
72ce053b
         SDL_LockMutex(is->subpq_mutex);
         while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
                !is->subtitleq.abort_request) {
             SDL_CondWait(is->subpq_cond, is->subpq_mutex);
         }
         SDL_UnlockMutex(is->subpq_mutex);
115329f1
 
72ce053b
         if (is->subtitleq.abort_request)
             goto the_end;
115329f1
 
72ce053b
         sp = &is->subpq[is->subpq_windex];
 
        /* NOTE: ipts is the PTS of the _first_ picture beginning in
            this packet, if any */
         pts = 0;
         if (pkt->pts != AV_NOPTS_VALUE)
             pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
 
bea18375
         len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1
                                     &sp->sub, &got_subtitle,
bea18375
                                     pkt);
72ce053b
         if (got_subtitle && sp->sub.format == 0) {
             sp->pts = pts;
115329f1
 
72ce053b
             for (i = 0; i < sp->sub.num_rects; i++)
             {
db4fac64
                 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b
                 {
25b4c651
                     RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
                     y = RGB_TO_Y_CCIR(r, g, b);
                     u = RGB_TO_U_CCIR(r, g, b, 0);
                     v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651
                     YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
                 }
             }
 
             /* now we can update the picture count */
             if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
                 is->subpq_windex = 0;
             SDL_LockMutex(is->subpq_mutex);
             is->subpq_size++;
             SDL_UnlockMutex(is->subpq_mutex);
         }
         av_free_packet(pkt);
     }
  the_end:
     return 0;
 }
 
01310af2
 /* copy samples for viewing in editor window */
 static void update_sample_display(VideoState *is, short *samples, int samples_size)
 {
705c6520
     int size, len;
01310af2
 
     size = samples_size / sizeof(short);
     while (size > 0) {
         len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
         if (len > size)
             len = size;
         memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
         samples += len;
         is->sample_array_index += len;
         if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
             is->sample_array_index = 0;
         size -= len;
     }
 }
 
 /* return the new audio buffer size (samples can be added or deleted
    to get better sync if video or external master clock) */
115329f1
 static int synchronize_audio(VideoState *is, short *samples,
638c9d91
                              int samples_size1, double pts)
01310af2
 {
638c9d91
     int n, samples_size;
01310af2
     double ref_clock;
115329f1
 
01f4895c
     n = 2 * is->audio_st->codec->channels;
638c9d91
     samples_size = samples_size1;
01310af2
 
     /* if not master, then we try to remove or add samples to correct the clock */
     if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
          is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
         double diff, avg_diff;
01310af2
         int wanted_size, min_size, max_size, nb_samples;
115329f1
 
638c9d91
         ref_clock = get_master_clock(is);
         diff = get_audio_clock(is) - ref_clock;
115329f1
 
638c9d91
         if (diff < AV_NOSYNC_THRESHOLD) {
             is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
             if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
                 /* not enough measures to have a correct estimate */
                 is->audio_diff_avg_count++;
             } else {
                 /* estimate the A-V difference */
                 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
 
                 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c
                     wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91
                     nb_samples = samples_size / n;
115329f1
 
638c9d91
                     min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
                     max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
                     if (wanted_size < min_size)
                         wanted_size = min_size;
                     else if (wanted_size > max_size)
                         wanted_size = max_size;
115329f1
 
638c9d91
                     /* add or remove samples to correction the synchro */
                     if (wanted_size < samples_size) {
                         /* remove samples */
                         samples_size = wanted_size;
                     } else if (wanted_size > samples_size) {
                         uint8_t *samples_end, *q;
                         int nb;
115329f1
 
638c9d91
                         /* add samples */
                         nb = (samples_size - wanted_size);
                         samples_end = (uint8_t *)samples + samples_size - n;
                         q = samples_end + n;
                         while (nb > 0) {
                             memcpy(q, samples_end, n);
                             q += n;
                             nb -= n;
                         }
                         samples_size = wanted_size;
                     }
                 }
 #if 0
115329f1
                 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
                        diff, avg_diff, samples_size - samples_size1,
638c9d91
                        is->audio_clock, is->video_clock, is->audio_diff_threshold);
 #endif
01310af2
             }
638c9d91
         } else {
             /* too big difference : may be initial PTS errors, so
                reset A-V filter */
             is->audio_diff_avg_count = 0;
             is->audio_diff_cum = 0;
01310af2
         }
     }
 
     return samples_size;
 }
 
 /* decode one audio frame and returns its uncompressed size */
5a4476e2
 static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2
 {
bea18375
     AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2
     AVPacket *pkt = &is->audio_pkt;
abdff646
     AVCodecContext *dec= is->audio_st->codec;
72ea344b
     int n, len1, data_size;
01310af2
     double pts;
 
     for(;;) {
72ea344b
         /* NOTE: the audio packet can contain several frames */
bea18375
         while (pkt_temp->size > 0) {
5a4476e2
             data_size = sizeof(is->audio_buf1);
bea18375
             len1 = avcodec_decode_audio3(dec,
5a4476e2
                                         (int16_t *)is->audio_buf1, &data_size,
bea18375
                                         pkt_temp);
72ea344b
             if (len1 < 0) {
                 /* if error, we skip the frame */
bea18375
                 pkt_temp->size = 0;
01310af2
                 break;
72ea344b
             }
115329f1
 
bea18375
             pkt_temp->data += len1;
             pkt_temp->size -= len1;
72ea344b
             if (data_size <= 0)
                 continue;
5a4476e2
 
             if (dec->sample_fmt != is->audio_src_fmt) {
                 if (is->reformat_ctx)
                     av_audio_convert_free(is->reformat_ctx);
5d6e4c16
                 is->reformat_ctx= av_audio_convert_alloc(AV_SAMPLE_FMT_S16, 1,
5a4476e2
                                                          dec->sample_fmt, 1, NULL, 0);
                 if (!is->reformat_ctx) {
                     fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
ba7d6e79
                         av_get_sample_fmt_name(dec->sample_fmt),
5d6e4c16
                         av_get_sample_fmt_name(AV_SAMPLE_FMT_S16));
5a4476e2
                         break;
                 }
                 is->audio_src_fmt= dec->sample_fmt;
             }
 
             if (is->reformat_ctx) {
                 const void *ibuf[6]= {is->audio_buf1};
                 void *obuf[6]= {is->audio_buf2};
e6c52cee
                 int istride[6]= {av_get_bytes_per_sample(dec->sample_fmt)};
5a4476e2
                 int ostride[6]= {2};
                 int len= data_size/istride[0];
                 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
                     printf("av_audio_convert() failed\n");
                     break;
                 }
                 is->audio_buf= is->audio_buf2;
                 /* FIXME: existing code assume that data_size equals framesize*channels*2
                           remove this legacy cruft */
                 data_size= len*2;
             }else{
                 is->audio_buf= is->audio_buf1;
             }
 
72ea344b
             /* if no pts, then compute it */
             pts = is->audio_clock;
             *pts_ptr = pts;
abdff646
             n = 2 * dec->channels;
115329f1
             is->audio_clock += (double)data_size /
abdff646
                 (double)(n * dec->sample_rate);
1f6b9cc3
 #ifdef DEBUG
72ea344b
             {
                 static double last_clock;
                 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
                        is->audio_clock - last_clock,
                        is->audio_clock, pts);
                 last_clock = is->audio_clock;
01310af2
             }
72ea344b
 #endif
             return data_size;
01310af2
         }
 
72ea344b
         /* free the current packet */
         if (pkt->data)
01310af2
             av_free_packet(pkt);
115329f1
 
72ea344b
         if (is->paused || is->audioq.abort_request) {
             return -1;
         }
115329f1
 
01310af2
         /* read next packet */
         if (packet_queue_get(&is->audioq, pkt, 1) < 0)
             return -1;
39c6a118
         if(pkt->data == flush_pkt.data){
abdff646
             avcodec_flush_buffers(dec);
39c6a118
             continue;
         }
 
bea18375
         pkt_temp->data = pkt->data;
         pkt_temp->size = pkt->size;
115329f1
 
72ea344b
         /* if update the audio clock with the pts */
         if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75
             is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b
         }
01310af2
     }
 }
 
 /* prepare a new audio buffer */
358061f6
 static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
 {
     VideoState *is = opaque;
     int audio_size, len1;
     double pts;
 
     audio_callback_time = av_gettime();
115329f1
 
01310af2
     while (len > 0) {
         if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2
            audio_size = audio_decode_frame(is, &pts);
01310af2
            if (audio_size < 0) {
                 /* if error, just output silence */
1a1078fa
                is->audio_buf = is->audio_buf1;
01310af2
                is->audio_buf_size = 1024;
                memset(is->audio_buf, 0, is->audio_buf_size);
            } else {
f8b8c694
                if (is->show_mode != SHOW_MODE_VIDEO)
01310af2
                    update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1
                audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
                                               pts);
                is->audio_buf_size = audio_size;
            }
            is->audio_buf_index = 0;
         }
         len1 = is->audio_buf_size - is->audio_buf_index;
         if (len1 > len)
             len1 = len;
         memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
         len -= len1;
         stream += len1;
         is->audio_buf_index += len1;
     }
 }
 
 /* open a given stream. Return 0 if OK */
 static int stream_component_open(VideoState *is, int stream_index)
 {
     AVFormatContext *ic = is->ic;
fe74099a
     AVCodecContext *avctx;
01310af2
     AVCodec *codec;
     SDL_AudioSpec wanted_spec, spec;
 
     if (stream_index < 0 || stream_index >= ic->nb_streams)
         return -1;
fe74099a
     avctx = ic->streams[stream_index]->codec;
115329f1
 
01310af2
     /* prepare audio output */
72415b2a
     if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a
         if (avctx->channels > 0) {
             avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b
         } else {
fe74099a
             avctx->request_channels = 2;
638c9d91
         }
01310af2
     }
 
fe74099a
     codec = avcodec_find_decoder(avctx->codec_id);
99119bdf
     if (!codec)
         return -1;
 
fe74099a
     avctx->workaround_bugs = workaround_bugs;
     avctx->lowres = lowres;
abaf8c38
     if(avctx->lowres > codec->max_lowres){
         av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n",
                 codec->max_lowres);
         avctx->lowres= codec->max_lowres;
     }
     if(avctx->lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
fe74099a
     avctx->idct_algo= idct;
     if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
     avctx->skip_frame= skip_frame;
     avctx->skip_idct= skip_idct;
     avctx->skip_loop_filter= skip_loop_filter;
     avctx->error_recognition= error_recognition;
     avctx->error_concealment= error_concealment;
043d2ff2
     avctx->thread_count= thread_count;
fe74099a
 
0093ebc2
     set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0, codec);
e43d7a18
 
175714c0
     if(codec->capabilities & CODEC_CAP_DR1)
         avctx->flags |= CODEC_FLAG_EMU_EDGE;
 
99119bdf
     if (avcodec_open(avctx, codec) < 0)
01310af2
         return -1;
51b73087
 
     /* prepare audio output */
72415b2a
     if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
33af5335
         if(avctx->sample_rate <= 0 || avctx->channels <= 0){
             fprintf(stderr, "Invalid sample rate or channel count\n");
             return -1;
         }
fe74099a
         wanted_spec.freq = avctx->sample_rate;
51b73087
         wanted_spec.format = AUDIO_S16SYS;
fe74099a
         wanted_spec.channels = avctx->channels;
51b73087
         wanted_spec.silence = 0;
         wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
         wanted_spec.callback = sdl_audio_callback;
         wanted_spec.userdata = is;
         if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
             fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
             return -1;
         }
         is->audio_hw_buf_size = spec.size;
5d6e4c16
         is->audio_src_fmt= AV_SAMPLE_FMT_S16;
51b73087
     }
 
3f3fe38d
     ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a
     switch(avctx->codec_type) {
72415b2a
     case AVMEDIA_TYPE_AUDIO:
01310af2
         is->audio_stream = stream_index;
         is->audio_st = ic->streams[stream_index];
         is->audio_buf_size = 0;
         is->audio_buf_index = 0;
638c9d91
 
         /* init averaging filter */
         is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
         is->audio_diff_avg_count = 0;
         /* since we do not have a precise anough audio fifo fullness,
            we correct audio sync only if larger than this threshold */
fe74099a
         is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91
 
01310af2
         memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
         packet_queue_init(&is->audioq);
bb270c08
         SDL_PauseAudio(0);
01310af2
         break;
72415b2a
     case AVMEDIA_TYPE_VIDEO:
01310af2
         is->video_stream = stream_index;
         is->video_st = ic->streams[stream_index];
 
         packet_queue_init(&is->videoq);
         is->video_tid = SDL_CreateThread(video_thread, is);
         break;
72415b2a
     case AVMEDIA_TYPE_SUBTITLE:
72ce053b
         is->subtitle_stream = stream_index;
         is->subtitle_st = ic->streams[stream_index];
         packet_queue_init(&is->subtitleq);
115329f1
 
72ce053b
         is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
         break;
01310af2
     default:
         break;
     }
     return 0;
 }
 
 static void stream_component_close(VideoState *is, int stream_index)
 {
     AVFormatContext *ic = is->ic;
fe74099a
     AVCodecContext *avctx;
115329f1
 
72ce053b
     if (stream_index < 0 || stream_index >= ic->nb_streams)
         return;
fe74099a
     avctx = ic->streams[stream_index]->codec;
01310af2
 
fe74099a
     switch(avctx->codec_type) {
72415b2a
     case AVMEDIA_TYPE_AUDIO:
01310af2
         packet_queue_abort(&is->audioq);
 
         SDL_CloseAudio();
 
         packet_queue_end(&is->audioq);
5a4476e2
         if (is->reformat_ctx)
             av_audio_convert_free(is->reformat_ctx);
bc77fce6
         is->reformat_ctx = NULL;
01310af2
         break;
72415b2a
     case AVMEDIA_TYPE_VIDEO:
01310af2
         packet_queue_abort(&is->videoq);
 
         /* note: we also signal this mutex to make sure we deblock the
            video thread in all cases */
         SDL_LockMutex(is->pictq_mutex);
         SDL_CondSignal(is->pictq_cond);
         SDL_UnlockMutex(is->pictq_mutex);
 
         SDL_WaitThread(is->video_tid, NULL);
 
         packet_queue_end(&is->videoq);
         break;
72415b2a
     case AVMEDIA_TYPE_SUBTITLE:
72ce053b
         packet_queue_abort(&is->subtitleq);
115329f1
 
72ce053b
         /* note: we also signal this mutex to make sure we deblock the
            video thread in all cases */
         SDL_LockMutex(is->subpq_mutex);
         is->subtitle_stream_changed = 1;
115329f1
 
72ce053b
         SDL_CondSignal(is->subpq_cond);
         SDL_UnlockMutex(is->subpq_mutex);
 
         SDL_WaitThread(is->subtitle_tid, NULL);
 
         packet_queue_end(&is->subtitleq);
         break;
01310af2
     default:
         break;
     }
 
3f3fe38d
     ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
     avcodec_close(avctx);
     switch(avctx->codec_type) {
72415b2a
     case AVMEDIA_TYPE_AUDIO:
01310af2
         is->audio_st = NULL;
         is->audio_stream = -1;
         break;
72415b2a
     case AVMEDIA_TYPE_VIDEO:
01310af2
         is->video_st = NULL;
         is->video_stream = -1;
         break;
72415b2a
     case AVMEDIA_TYPE_SUBTITLE:
72ce053b
         is->subtitle_st = NULL;
         is->subtitle_stream = -1;
         break;
01310af2
     default:
         break;
     }
 }
 
416e3508
 /* since we have only one decoding thread, we can use a global
    variable instead of a thread local variable */
 static VideoState *global_video_state;
 
 static int decode_interrupt_cb(void)
 {
     return (global_video_state && global_video_state->abort_request);
 }
01310af2
 
 /* this thread gets the stream from the disk or the network */
8adf9bb2
 static int read_thread(void *arg)
01310af2
 {
     VideoState *is = arg;
e8454552
     AVFormatContext *ic = NULL;
6625a3de
     int err, i, ret;
72415b2a
     int st_index[AVMEDIA_TYPE_NB];
01310af2
     AVPacket pkt1, *pkt = &pkt1;
75bb7b0a
     int eof=0;
d834d63b
     int pkt_in_play_range = 0;
e8454552
     AVDictionaryEntry *t;
6299a229
 
6625a3de
     memset(st_index, -1, sizeof(st_index));
01310af2
     is->video_stream = -1;
     is->audio_stream = -1;
72ce053b
     is->subtitle_stream = -1;
01310af2
 
416e3508
     global_video_state = is;
80c6e238
     avio_set_interrupt_cb(decode_interrupt_cb);
416e3508
 
e8454552
     err = avformat_open_input(&ic, is->filename, is->iformat, &format_opts);
638c9d91
     if (err < 0) {
         print_error(is->filename, err);
         ret = -1;
         goto fail;
     }
e8454552
     if ((t = av_dict_get(format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
         av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
         ret = AVERROR_OPTION_NOT_FOUND;
         goto fail;
     }
01310af2
     is->ic = ic;
30bc6613
 
     if(genpts)
         ic->flags |= AVFMT_FLAG_GENPTS;
 
24c07998
     err = av_find_stream_info(ic);
     if (err < 0) {
         fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
         ret = -1;
         goto fail;
     }
899681cd
     if(ic->pb)
         ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b
 
70a4764d
     if(seek_by_bytes<0)
         seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
 
72ea344b
     /* if seeking requested, we execute it */
     if (start_time != AV_NOPTS_VALUE) {
         int64_t timestamp;
 
         timestamp = start_time;
         /* add the stream start time */
         if (ic->start_time != AV_NOPTS_VALUE)
             timestamp += ic->start_time;
4ed29207
         ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b
         if (ret < 0) {
115329f1
             fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
                     is->filename, (double)timestamp / AV_TIME_BASE);
         }
     }
 
406f0f1b
     for (i = 0; i < ic->nb_streams; i++)
3f3fe38d
         ic->streams[i]->discard = AVDISCARD_ALL;
be732b70
     if (!video_disable)
406f0f1b
         st_index[AVMEDIA_TYPE_VIDEO] =
             av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
                                 wanted_stream[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
be732b70
     if (!audio_disable)
406f0f1b
         st_index[AVMEDIA_TYPE_AUDIO] =
             av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
                                 wanted_stream[AVMEDIA_TYPE_AUDIO],
                                 st_index[AVMEDIA_TYPE_VIDEO],
                                 NULL, 0);
be732b70
     if (!video_disable)
406f0f1b
         st_index[AVMEDIA_TYPE_SUBTITLE] =
             av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
                                 wanted_stream[AVMEDIA_TYPE_SUBTITLE],
                                 (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
                                  st_index[AVMEDIA_TYPE_AUDIO] :
                                  st_index[AVMEDIA_TYPE_VIDEO]),
                                 NULL, 0);
01310af2
     if (show_status) {
0ebf4754
         av_dump_format(ic, 0, is->filename, 0);
01310af2
     }
 
f521746b
     is->show_mode = show_mode;
 
01310af2
     /* open the streams */
72415b2a
     if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
         stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
     }
 
077a8d61
     ret=-1;
72415b2a
     if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
         ret= stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61
     }
d38c9e7a
     is->refresh_tid = SDL_CreateThread(refresh_thread, is);
1d6c82d4
     if (is->show_mode == SHOW_MODE_NONE)
         is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
01310af2
 
72415b2a
     if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
         stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
     }
 
01310af2
     if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
         fprintf(stderr, "%s: could not open codecs\n", is->filename);
         ret = -1;
01310af2
         goto fail;
     }
 
     for(;;) {
         if (is->abort_request)
             break;
416e3508
         if (is->paused != is->last_paused) {
             is->last_paused = is->paused;
72ea344b
             if (is->paused)
f5668147
                 is->read_pause_return= av_read_pause(ic);
72ea344b
             else
                 av_read_play(ic);
416e3508
         }
2f642393
 #if CONFIG_RTSP_DEMUXER
         if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
             /* wait 10 ms to avoid trying to get another packet */
             /* XXX: horrible */
             SDL_Delay(10);
             continue;
         }
400738b1
 #endif
72ea344b
         if (is->seek_req) {
8e606cc8
             int64_t seek_target= is->seek_pos;
4ed29207
             int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
             int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
 //FIXME the +-2 is due to rounding being not done in the correct direction in generation
 //      of the seek_pos/seek_rel variables
8e606cc8
 
4ed29207
             ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
             if (ret < 0) {
                 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
             }else{
                 if (is->audio_stream >= 0) {
                     packet_queue_flush(&is->audioq);
39c6a118
                     packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f
                 }
72ce053b
                 if (is->subtitle_stream >= 0) {
                     packet_queue_flush(&is->subtitleq);
39c6a118
                     packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b
                 }
e6c0297f
                 if (is->video_stream >= 0) {
                     packet_queue_flush(&is->videoq);
39c6a118
                     packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f
                 }
72ea344b
             }
             is->seek_req = 0;
e45aeb38
             eof= 0;
72ea344b
         }
416e3508
 
01310af2
         /* if the queue are full, no need to read more */
79ee4683
         if (   is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
             || (   (is->audioq   .size  > MIN_AUDIOQ_SIZE || is->audio_stream<0)
                 && (is->videoq   .nb_packets > MIN_FRAMES || is->video_stream<0)
                 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
             /* wait 10 ms */
             SDL_Delay(10);
             continue;
         }
27d97fde
         if(eof) {
9dc41767
             if(is->video_stream >= 0){
26534fe8
                 av_init_packet(pkt);
                 pkt->data=NULL;
                 pkt->size=0;
                 pkt->stream_index= is->video_stream;
                 packet_queue_put(&is->videoq, pkt);
9dc41767
             }
b4083171
             SDL_Delay(10);
1922c0a7
             if(is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
                 if(loop!=1 && (!loop || --loop)){
                     stream_seek(cur_stream, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
                 }else if(autoexit){
                     ret=AVERROR_EOF;
                     goto fail;
                 }
2d1653b0
             }
0b82612d
             eof=0;
600a331c
             continue;
         }
72ea344b
         ret = av_read_frame(ic, pkt);
01310af2
         if (ret < 0) {
27d97fde
             if (ret == AVERROR_EOF || url_feof(ic->pb))
75bb7b0a
                 eof=1;
eb4d1cb9
             if (ic->pb && ic->pb->error)
bb270c08
                 break;
75bb7b0a
             SDL_Delay(100); /* wait for user event */
             continue;
01310af2
         }
d834d63b
         /* check if packet is in play range specified by user, then queue, otherwise discard */
         pkt_in_play_range = duration == AV_NOPTS_VALUE ||
                 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
                 av_q2d(ic->streams[pkt->stream_index]->time_base) -
                 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0)/1000000
                 <= ((double)duration/1000000);
         if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2
             packet_queue_put(&is->audioq, pkt);
d834d63b
         } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2
             packet_queue_put(&is->videoq, pkt);
d834d63b
         } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b
             packet_queue_put(&is->subtitleq, pkt);
01310af2
         } else {
             av_free_packet(pkt);
         }
     }
     /* wait until the end */
     while (!is->abort_request) {
         SDL_Delay(100);
     }
 
638c9d91
     ret = 0;
01310af2
  fail:
416e3508
     /* disable interrupting */
     global_video_state = NULL;
 
01310af2
     /* close each stream */
     if (is->audio_stream >= 0)
         stream_component_close(is, is->audio_stream);
     if (is->video_stream >= 0)
         stream_component_close(is, is->video_stream);
72ce053b
     if (is->subtitle_stream >= 0)
         stream_component_close(is, is->subtitle_stream);
638c9d91
     if (is->ic) {
         av_close_input_file(is->ic);
         is->ic = NULL; /* safety */
     }
80c6e238
     avio_set_interrupt_cb(NULL);
416e3508
 
638c9d91
     if (ret != 0) {
         SDL_Event event;
115329f1
 
638c9d91
         event.type = FF_QUIT_EVENT;
         event.user.data1 = is;
         SDL_PushEvent(&event);
     }
01310af2
     return 0;
 }
 
638c9d91
 static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
 {
     VideoState *is;
 
     is = av_mallocz(sizeof(VideoState));
     if (!is)
         return NULL;
f7d78f36
     av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91
     is->iformat = iformat;
01310af2
     is->ytop = 0;
     is->xleft = 0;
 
     /* start video display */
     is->pictq_mutex = SDL_CreateMutex();
     is->pictq_cond = SDL_CreateCond();
115329f1
 
72ce053b
     is->subpq_mutex = SDL_CreateMutex();
     is->subpq_cond = SDL_CreateCond();
115329f1
 
638c9d91
     is->av_sync_type = av_sync_type;
8adf9bb2
     is->read_tid = SDL_CreateThread(read_thread, is);
     if (!is->read_tid) {
01310af2
         av_free(is);
         return NULL;
     }
     return is;
 }
 
7b49ce2e
 static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
 {
     AVFormatContext *ic = is->ic;
     int start_index, stream_index;
     AVStream *st;
 
72415b2a
     if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91
         start_index = is->video_stream;
72415b2a
     else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91
         start_index = is->audio_stream;
72ce053b
     else
         start_index = is->subtitle_stream;
72415b2a
     if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
         return;
     stream_index = start_index;
     for(;;) {
         if (++stream_index >= is->ic->nb_streams)
72ce053b
         {
72415b2a
             if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
             {
                 stream_index = -1;
                 goto the_end;
             } else
                 stream_index = 0;
         }
638c9d91
         if (stream_index == start_index)
             return;
         st = ic->streams[stream_index];
01f4895c
         if (st->codec->codec_type == codec_type) {
638c9d91
             /* check that parameters are OK */
             switch(codec_type) {
72415b2a
             case AVMEDIA_TYPE_AUDIO:
01f4895c
                 if (st->codec->sample_rate != 0 &&
                     st->codec->channels != 0)
638c9d91
                     goto the_end;
                 break;
72415b2a
             case AVMEDIA_TYPE_VIDEO:
             case AVMEDIA_TYPE_SUBTITLE:
638c9d91
                 goto the_end;
             default:
                 break;
             }
         }
     }
  the_end:
     stream_component_close(is, start_index);
     stream_component_open(is, stream_index);
 }
 
 
7b49ce2e
 static void toggle_full_screen(void)
01310af2
 {
     is_full_screen = !is_full_screen;
fb84155b
     video_open(cur_stream);
01310af2
 }
 
7b49ce2e
 static void toggle_pause(void)
01310af2
 {
     if (cur_stream)
ab7fdbab
         stream_toggle_pause(cur_stream);
bba04f1e
     step = 0;
 }
 
7b49ce2e
 static void step_to_next_frame(void)
bba04f1e
 {
     if (cur_stream) {
19cc524a
         /* if the stream is paused unpause it, then step */
bba04f1e
         if (cur_stream->paused)
ab7fdbab
             stream_toggle_pause(cur_stream);
bba04f1e
     }
     step = 1;
01310af2
 }
 
7b49ce2e
 static void toggle_audio_display(void)
01310af2
 {
     if (cur_stream) {
f5968788
         int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
f8b8c694
         cur_stream->show_mode = (cur_stream->show_mode + 1) % SHOW_MODE_NB;
f5968788
         fill_rectangle(screen,
                     cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
                     bgcolor);
         SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
     }
 }
 
 /* handle an event sent by the GUI */
7b49ce2e
 static void event_loop(void)
01310af2
 {
     SDL_Event event;
a11d11aa
     double incr, pos, frac;
01310af2
 
     for(;;) {
d52ec002
         double x;
01310af2
         SDL_WaitEvent(&event);
         switch(event.type) {
         case SDL_KEYDOWN:
066ce8c9
             if (exit_on_keydown) {
                 do_exit();
                 break;
             }
01310af2
             switch(event.key.keysym.sym) {
             case SDLK_ESCAPE:
             case SDLK_q:
                 do_exit();
                 break;
             case SDLK_f:
                 toggle_full_screen();
                 break;
             case SDLK_p:
             case SDLK_SPACE:
                 toggle_pause();
                 break;
bba04f1e
             case SDLK_s: //S: Step to next frame
                 step_to_next_frame();
                 break;
01310af2
             case SDLK_a:
115329f1
                 if (cur_stream)
72415b2a
                     stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
                 break;
             case SDLK_v:
115329f1
                 if (cur_stream)
72415b2a
                     stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91
                 break;
72ce053b
             case SDLK_t:
115329f1
                 if (cur_stream)
72415b2a
                     stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b
                 break;
638c9d91
             case SDLK_w:
01310af2
                 toggle_audio_display();
                 break;
72ea344b
             case SDLK_LEFT:
                 incr = -10.0;
                 goto do_seek;
             case SDLK_RIGHT:
                 incr = 10.0;
                 goto do_seek;
             case SDLK_UP:
                 incr = 60.0;
                 goto do_seek;
             case SDLK_DOWN:
                 incr = -60.0;
             do_seek:
                 if (cur_stream) {
94b594c6
                     if (seek_by_bytes) {
1a620dd7
                         if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
                             pos= cur_stream->video_current_pos;
                         }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
                             pos= cur_stream->audio_pkt.pos;
                         }else
384c9c2f
                             pos = avio_tell(cur_stream->ic->pb);
94b594c6
                         if (cur_stream->ic->bit_rate)
566cd2cb
                             incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
                         else
                             incr *= 180000.0;
                         pos += incr;
2ef46053
                         stream_seek(cur_stream, pos, incr, 1);
94b594c6
                     } else {
                         pos = get_master_clock(cur_stream);
                         pos += incr;
2ef46053
                         stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6
                     }
72ea344b
                 }
                 break;
01310af2
             default:
                 break;
             }
             break;
a11d11aa
         case SDL_MOUSEBUTTONDOWN:
066ce8c9
             if (exit_on_mousedown) {
                 do_exit();
                 break;
             }
d52ec002
         case SDL_MOUSEMOTION:
             if(event.type ==SDL_MOUSEBUTTONDOWN){
                 x= event.button.x;
             }else{
                 if(event.motion.state != SDL_PRESSED)
                     break;
                 x= event.motion.x;
             }
bb270c08
             if (cur_stream) {
2ef46053
                 if(seek_by_bytes || cur_stream->ic->duration<=0){
db44ea96
                     uint64_t size=  avio_size(cur_stream->ic->pb);
d52ec002
                     stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053
                 }else{
6371c81a
                     int64_t ts;
                     int ns, hh, mm, ss;
                     int tns, thh, tmm, tss;
                     tns = cur_stream->ic->duration/1000000LL;
                     thh = tns/3600;
                     tmm = (tns%3600)/60;
                     tss = (tns%60);
d52ec002
                     frac = x/cur_stream->width;
6371c81a
                     ns = frac*tns;
                     hh = ns/3600;
                     mm = (ns%3600)/60;
                     ss = (ns%60);
                     fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d)       \n", frac*100,
                             hh, mm, ss, thh, tmm, tss);
                     ts = frac*cur_stream->ic->duration;
                     if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
                         ts += cur_stream->ic->start_time;
                     stream_seek(cur_stream, ts, 0, 0);
2ef46053
                 }
bb270c08
             }
             break;
01310af2
         case SDL_VIDEORESIZE:
             if (cur_stream) {
115329f1
                 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2
                                           SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
                 screen_width = cur_stream->width = event.resize.w;
                 screen_height= cur_stream->height= event.resize.h;
01310af2
             }
             break;
         case SDL_QUIT:
638c9d91
         case FF_QUIT_EVENT:
01310af2
             do_exit();
             break;
         case FF_ALLOC_EVENT:
fccb19e3
             video_open(event.user.data1);
01310af2
             alloc_picture(event.user.data1);
             break;
         case FF_REFRESH_EVENT:
4a22ea4d
             video_refresh(event.user.data1);
d38c9e7a
             cur_stream->refresh=0;
01310af2
             break;
         default:
             break;
         }
     }
 }
 
eb8bc572
 static int opt_frame_size(const char *opt, const char *arg)
e4b89522
 {
126b638e
     if (av_parse_video_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
         fprintf(stderr, "Incorrect frame size\n");
eb8bc572
         return AVERROR(EINVAL);
e4b89522
     }
     if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
         fprintf(stderr, "Frame size must be a multiple of 2\n");
eb8bc572
         return AVERROR(EINVAL);
e4b89522
     }
eb8bc572
     return 0;
e4b89522
 }
 
a5b3b5f6
 static int opt_width(const char *opt, const char *arg)
01310af2
 {
a5b3b5f6
     screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
     return 0;
01310af2
 }
 
a5b3b5f6
 static int opt_height(const char *opt, const char *arg)
01310af2
 {
a5b3b5f6
     screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
     return 0;
01310af2
 }
 
eb8bc572
 static int opt_format(const char *opt, const char *arg)
01310af2
 {
     file_iformat = av_find_input_format(arg);
     if (!file_iformat) {
         fprintf(stderr, "Unknown input format: %s\n", arg);
eb8bc572
         return AVERROR(EINVAL);
01310af2
     }
eb8bc572
     return 0;
01310af2
 }
61890b02
 
eb8bc572
 static int opt_frame_pix_fmt(const char *opt, const char *arg)
e4b89522
 {
718c7b18
     frame_pix_fmt = av_get_pix_fmt(arg);
eb8bc572
     return 0;
e4b89522
 }
 
b81d6235
 static int opt_sync(const char *opt, const char *arg)
638c9d91
 {
     if (!strcmp(arg, "audio"))
         av_sync_type = AV_SYNC_AUDIO_MASTER;
     else if (!strcmp(arg, "video"))
         av_sync_type = AV_SYNC_VIDEO_MASTER;
     else if (!strcmp(arg, "ext"))
         av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5
     else {
b81d6235
         fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
         exit(1);
     }
b81d6235
     return 0;
638c9d91
 }
 
e11bc2c6
 static int opt_seek(const char *opt, const char *arg)
72ea344b
 {
e11bc2c6
     start_time = parse_time_or_die(opt, arg, 1);
     return 0;
72ea344b
 }
 
d834d63b
 static int opt_duration(const char *opt, const char *arg)
 {
     duration = parse_time_or_die(opt, arg, 1);
     return 0;
 }
 
a5b3b5f6
 static int opt_thread_count(const char *opt, const char *arg)
c62c07d3
 {
a5b3b5f6
     thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6
 #if !HAVE_THREADS
c62c07d3
     fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
 #endif
a5b3b5f6
     return 0;
c62c07d3
 }
115329f1
 
f521746b
 static int opt_show_mode(const char *opt, const char *arg)
 {
     show_mode = !strcmp(arg, "video") ? SHOW_MODE_VIDEO :
                 !strcmp(arg, "waves") ? SHOW_MODE_WAVES :
                 !strcmp(arg, "rdft" ) ? SHOW_MODE_RDFT  :
                 parse_number_or_die(opt, arg, OPT_INT, 0, SHOW_MODE_NB-1);
     return 0;
 }
 
b4af3cf3
 static int opt_input_file(const char *opt, const char *filename)
 {
     if (input_filename) {
         fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
                 filename, input_filename);
         exit(1);
     }
     if (!strcmp(filename, "-"))
         filename = "pipe:";
     input_filename = filename;
     return 0;
 }
 
358061f6
 static const OptionDef options[] = {
992f8eae
 #include "cmdutils_common_opts.h"
eb8bc572
     { "x", HAS_ARG, {(void*)opt_width}, "force displayed width", "width" },
     { "y", HAS_ARG, {(void*)opt_height}, "force displayed height", "height" },
e4b89522
     { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91
     { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
     { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
     { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
72415b2a
     { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
     { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
     { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
eb8bc572
     { "ss", HAS_ARG, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
     { "t", HAS_ARG, {(void*)&opt_duration}, "play  \"duration\" seconds of audio/video", "duration" },
674fe163
     { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
     { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
     { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522
     { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf
     { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
6387c3e6
     { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
6fc5b059
     { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613
     { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363
     { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8
     { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
     { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
     { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
     { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8
     { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo",  "algo" },
047599a4
     { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)",  "threshold" },
1b51e051
     { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options",  "bit_mask" },
eb8bc572
     { "sync", HAS_ARG | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
     { "threads", HAS_ARG | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0
     { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
066ce8c9
     { "exitonkeydown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_keydown}, "exit on key down", "" },
     { "exitonmousedown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_mousedown}, "exit on mouse down", "" },
1922c0a7
     { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&loop}, "set number of times the playback shall be looped", "loop count" },
d38c9e7a
     { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
076db5ed
     { "window_title", OPT_STRING | HAS_ARG, {(void*)&window_title}, "set window title", "window title" },
917d2bb3
 #if CONFIG_AVFILTER
09ed11e5
     { "vf", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
917d2bb3
 #endif
2b3da32f
     { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
eb8bc572
     { "showmode", HAS_ARG, {(void*)opt_show_mode}, "select show mode (0 = video, 1 = waves, 2 = RDFT)", "mode" },
     { "default", HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
b4af3cf3
     { "i", HAS_ARG, {(void *)opt_input_file}, "read specified file", "input_file"},
01310af2
     { NULL, },
 };
 
0c2a18cb
 static void show_usage(void)
01310af2
 {
27daa420
     printf("Simple media player\n");
     printf("usage: ffplay [options] input_file\n");
01310af2
     printf("\n");
0c2a18cb
 }
 
7b6b9be8
 static int opt_help(const char *opt, const char *arg)
0c2a18cb
 {
f66eb58e
     av_log_set_callback(log_callback_help);
0c2a18cb
     show_usage();
02d504a7
     show_help_options(options, "Main options:\n",
                       OPT_EXPERT, 0);
     show_help_options(options, "\nAdvanced options:\n",
                       OPT_EXPERT, OPT_EXPERT);
f66eb58e
     printf("\n");
     av_opt_show2(avcodec_opts[0], NULL,
                  AV_OPT_FLAG_DECODING_PARAM, 0);
     printf("\n");
     av_opt_show2(avformat_opts, NULL,
                  AV_OPT_FLAG_DECODING_PARAM, 0);
 #if !CONFIG_AVFILTER
     printf("\n");
     av_opt_show2(sws_opts, NULL,
                  AV_OPT_FLAG_ENCODING_PARAM, 0);
 #endif
01310af2
     printf("\nWhile playing:\n"
            "q, ESC              quit\n"
            "f                   toggle full screen\n"
            "p, SPC              pause\n"
638c9d91
            "a                   cycle audio channel\n"
            "v                   cycle video channel\n"
72ce053b
            "t                   cycle subtitle channel\n"
638c9d91
            "w                   show audio waves\n"
79f8b328
            "s                   activate frame-step mode\n"
72ea344b
            "left/right          seek backward/forward 10 seconds\n"
            "down/up             seek backward/forward 1 minute\n"
a11d11aa
            "mouse click         seek to percentage in file corresponding to fraction of width\n"
01310af2
            );
7b6b9be8
     return 0;
01310af2
 }
 
 /* Called from the main */
 int main(int argc, char **argv)
 {
a5c33faa
     int flags;
115329f1
 
6b6bca64
     av_log_set_flags(AV_LOG_SKIP_REPEATED);
 
01310af2
     /* register all codecs, demux and protocols */
c721d803
     avcodec_register_all();
9b157b0c
 #if CONFIG_AVDEVICE
c721d803
     avdevice_register_all();
9b157b0c
 #endif
917d2bb3
 #if CONFIG_AVFILTER
     avfilter_register_all();
 #endif
01310af2
     av_register_all();
 
a5c33faa
     init_opts();
e43d7a18
 
ea9c581f
     show_banner();
4cfac5bc
 
f5da5c93
     parse_options(argc, argv, options, opt_input_file);
01310af2
 
aab1b7e5
     if (!input_filename) {
7f11e745
         show_usage();
7a7da6b4
         fprintf(stderr, "An input file must be specified\n");
7f11e745
         fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
         exit(1);
     }
01310af2
 
     if (display_disable) {
         video_disable = 1;
     }
31319a8c
     flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
86824c1d
     if (audio_disable)
         flags &= ~SDL_INIT_AUDIO;
c97f5402
 #if !defined(__MINGW32__) && !defined(__APPLE__)
     flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c
 #endif
01310af2
     if (SDL_Init (flags)) {
05ab0b76
         fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
         exit(1);
     }
 
     if (!display_disable) {
b250f9c6
 #if HAVE_SDL_VIDEO_SIZE
3ef17d62
         const SDL_VideoInfo *vi = SDL_GetVideoInfo();
         fs_screen_width = vi->current_w;
         fs_screen_height = vi->current_h;
29f3b38a
 #endif
01310af2
     }
 
     SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
     SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
     SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
 
39c6a118
     av_init_packet(&flush_pkt);
     flush_pkt.data= "FLUSH";
 
638c9d91
     cur_stream = stream_open(input_filename, file_iformat);
01310af2
 
     event_loop();
 
     /* never returns */
 
     return 0;
 }