libavfilter/vf_codecview.c
f8883317
 /*
  * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  * Copyright (c) 2014 Clément Bœsch <u pkh me>
  *
  * This file is part of FFmpeg.
  *
  * FFmpeg is free software; you can redistribute it and/or
  * modify it under the terms of the GNU Lesser General Public
  * License as published by the Free Software Foundation; either
  * version 2.1 of the License, or (at your option) any later version.
  *
  * FFmpeg is distributed in the hope that it will be useful,
  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  * Lesser General Public License for more details.
  *
  * You should have received a copy of the GNU Lesser General Public
  * License along with FFmpeg; if not, write to the Free Software
  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  */
 
 /**
  * @file
  * Codec debug viewer filter.
  *
  * All the MV drawing code from Michael Niedermayer is extracted from
  * libavcodec/mpegvideo.c.
  *
  * TODO: segmentation
  */
 
 #include "libavutil/imgutils.h"
 #include "libavutil/motion_vector.h"
 #include "libavutil/opt.h"
 #include "avfilter.h"
 #include "internal.h"
 
 #define MV_P_FOR  (1<<0)
 #define MV_B_FOR  (1<<1)
 #define MV_B_BACK (1<<2)
7a2b9dd0
 #define MV_TYPE_FOR  (1<<0)
 #define MV_TYPE_BACK (1<<1)
 #define FRAME_TYPE_I (1<<0)
 #define FRAME_TYPE_P (1<<1)
 #define FRAME_TYPE_B (1<<2)
f8883317
 
ed93ed5e
 typedef struct CodecViewContext {
f8883317
     const AVClass *class;
     unsigned mv;
7a2b9dd0
     unsigned frame_type;
     unsigned mv_type;
560d1e7b
     int hsub, vsub;
     int qp;
f8883317
 } CodecViewContext;
 
 #define OFFSET(x) offsetof(CodecViewContext, x)
 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
7a2b9dd0
 #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, 0, 0, FLAGS, unit }
 
f8883317
 static const AVOption codecview_options[] = {
     { "mv", "set motion vectors to visualize", OFFSET(mv), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv" },
7a2b9dd0
         CONST("pf", "forward predicted MVs of P-frames",  MV_P_FOR,  "mv"),
         CONST("bf", "forward predicted MVs of B-frames",  MV_B_FOR,  "mv"),
         CONST("bb", "backward predicted MVs of B-frames", MV_B_BACK, "mv"),
560d1e7b
     { "qp", NULL, OFFSET(qp), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS },
7a2b9dd0
     { "mv_type", "set motion vectors type", OFFSET(mv_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv_type" },
     { "mvt",     "set motion vectors type", OFFSET(mv_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv_type" },
         CONST("fp", "forward predicted MVs",  MV_TYPE_FOR,  "mv_type"),
         CONST("bp", "backward predicted MVs", MV_TYPE_BACK, "mv_type"),
     { "frame_type", "set frame types to visualize motion vectors of", OFFSET(frame_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "frame_type" },
     { "ft",         "set frame types to visualize motion vectors of", OFFSET(frame_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "frame_type" },
         CONST("if", "I-frames", FRAME_TYPE_I, "frame_type"),
         CONST("pf", "P-frames", FRAME_TYPE_P, "frame_type"),
         CONST("bf", "B-frames", FRAME_TYPE_B, "frame_type"),
f8883317
     { NULL }
 };
 
 AVFILTER_DEFINE_CLASS(codecview);
 
 static int query_formats(AVFilterContext *ctx)
 {
     // TODO: we can probably add way more pixel formats without any other
     // changes; anything with 8-bit luma in first plane should be working
     static const enum AVPixelFormat pix_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE};
fd682b18
     AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
     if (!fmts_list)
         return AVERROR(ENOMEM);
     return ff_set_common_formats(ctx, fmts_list);
f8883317
 }
 
 static int clip_line(int *sx, int *sy, int *ex, int *ey, int maxx)
 {
     if(*sx > *ex)
         return clip_line(ex, ey, sx, sy, maxx);
 
     if (*sx < 0) {
         if (*ex < 0)
             return 1;
         *sy = *ey + (*sy - *ey) * (int64_t)*ex / (*ex - *sx);
         *sx = 0;
     }
 
     if (*ex > maxx) {
         if (*sx > maxx)
             return 1;
         *ey = *sy + (*ey - *sy) * (int64_t)(maxx - *sx) / (*ex - *sx);
         *ex = maxx;
     }
     return 0;
 }
 
 /**
  * Draw a line from (ex, ey) -> (sx, sy).
  * @param w width of the image
  * @param h height of the image
  * @param stride stride/linesize of the image
  * @param color color of the arrow
  */
 static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey,
                       int w, int h, int stride, int color)
 {
     int x, y, fr, f;
 
     if (clip_line(&sx, &sy, &ex, &ey, w - 1))
         return;
     if (clip_line(&sy, &sx, &ey, &ex, h - 1))
         return;
 
     sx = av_clip(sx, 0, w - 1);
     sy = av_clip(sy, 0, h - 1);
     ex = av_clip(ex, 0, w - 1);
     ey = av_clip(ey, 0, h - 1);
 
     buf[sy * stride + sx] += color;
 
     if (FFABS(ex - sx) > FFABS(ey - sy)) {
         if (sx > ex) {
             FFSWAP(int, sx, ex);
             FFSWAP(int, sy, ey);
         }
         buf += sx + sy * stride;
         ex  -= sx;
         f    = ((ey - sy) << 16) / ex;
         for (x = 0; x <= ex; x++) {
             y  = (x * f) >> 16;
             fr = (x * f) & 0xFFFF;
                    buf[ y      * stride + x] += (color * (0x10000 - fr)) >> 16;
             if(fr) buf[(y + 1) * stride + x] += (color *            fr ) >> 16;
         }
     } else {
         if (sy > ey) {
             FFSWAP(int, sx, ex);
             FFSWAP(int, sy, ey);
         }
         buf += sx + sy * stride;
         ey  -= sy;
         if (ey)
             f = ((ex - sx) << 16) / ey;
         else
             f = 0;
         for(y= 0; y <= ey; y++){
             x  = (y*f) >> 16;
             fr = (y*f) & 0xFFFF;
                    buf[y * stride + x    ] += (color * (0x10000 - fr)) >> 16;
             if(fr) buf[y * stride + x + 1] += (color *            fr ) >> 16;
         }
     }
 }
 
 /**
  * Draw an arrow from (ex, ey) -> (sx, sy).
  * @param w width of the image
  * @param h height of the image
  * @param stride stride/linesize of the image
  * @param color color of the arrow
  */
 static void draw_arrow(uint8_t *buf, int sx, int sy, int ex,
                        int ey, int w, int h, int stride, int color, int tail, int direction)
 {
     int dx,dy;
 
     if (direction) {
         FFSWAP(int, sx, ex);
         FFSWAP(int, sy, ey);
     }
 
     sx = av_clip(sx, -100, w + 100);
     sy = av_clip(sy, -100, h + 100);
     ex = av_clip(ex, -100, w + 100);
     ey = av_clip(ey, -100, h + 100);
 
     dx = ex - sx;
     dy = ey - sy;
 
     if (dx * dx + dy * dy > 3 * 3) {
         int rx =  dx + dy;
         int ry = -dx + dy;
         int length = sqrt((rx * rx + ry * ry) << 8);
 
         // FIXME subpixel accuracy
         rx = ROUNDED_DIV(rx * 3 << 4, length);
         ry = ROUNDED_DIV(ry * 3 << 4, length);
 
         if (tail) {
             rx = -rx;
             ry = -ry;
         }
 
         draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
         draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
     }
     draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
 }
 
 static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
 {
     AVFilterContext *ctx = inlink->dst;
     CodecViewContext *s = ctx->priv;
     AVFilterLink *outlink = ctx->outputs[0];
 
560d1e7b
     if (s->qp) {
         int qstride, qp_type;
         int8_t *qp_table = av_frame_get_qp_table(frame, &qstride, &qp_type);
 
         if (qp_table) {
             int x, y;
21f94684
             const int w = AV_CEIL_RSHIFT(frame->width,  s->hsub);
             const int h = AV_CEIL_RSHIFT(frame->height, s->vsub);
560d1e7b
             uint8_t *pu = frame->data[1];
             uint8_t *pv = frame->data[2];
             const int lzu = frame->linesize[1];
             const int lzv = frame->linesize[2];
 
             for (y = 0; y < h; y++) {
                 for (x = 0; x < w; x++) {
                     const int qp = ff_norm_qscale(qp_table[(y >> 3) * qstride + (x >> 3)], qp_type) * 128/31;
                     pu[x] = pv[x] = qp;
                 }
                 pu += lzu;
                 pv += lzv;
             }
         }
     }
 
7a2b9dd0
     if (s->mv || s->mv_type) {
3f46e7ba
         AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_MOTION_VECTORS);
         if (sd) {
             int i;
             const AVMotionVector *mvs = (const AVMotionVector *)sd->data;
7a2b9dd0
             const int is_iframe = (s->frame_type & FRAME_TYPE_I) && frame->pict_type == AV_PICTURE_TYPE_I;
             const int is_pframe = (s->frame_type & FRAME_TYPE_P) && frame->pict_type == AV_PICTURE_TYPE_P;
             const int is_bframe = (s->frame_type & FRAME_TYPE_B) && frame->pict_type == AV_PICTURE_TYPE_B;
 
3f46e7ba
             for (i = 0; i < sd->size / sizeof(*mvs); i++) {
                 const AVMotionVector *mv = &mvs[i];
                 const int direction = mv->source > 0;
7a2b9dd0
 
                 if (s->mv_type) {
                     const int is_fp = direction == 0 && (s->mv_type & MV_TYPE_FOR);
                     const int is_bp = direction == 1 && (s->mv_type & MV_TYPE_BACK);
 
                     if ((!s->frame_type && (is_fp || is_bp)) ||
                         is_iframe && is_fp || is_iframe && is_bp ||
                         is_pframe && is_fp ||
                         is_bframe && is_fp || is_bframe && is_bp)
                         draw_arrow(frame->data[0], mv->dst_x, mv->dst_y, mv->src_x, mv->src_y,
                                    frame->width, frame->height, frame->linesize[0],
                                    100, 0, direction);
                 } else if (s->mv)
                     if ((direction == 0 && (s->mv & MV_P_FOR)  && frame->pict_type == AV_PICTURE_TYPE_P) ||
                         (direction == 0 && (s->mv & MV_B_FOR)  && frame->pict_type == AV_PICTURE_TYPE_B) ||
                         (direction == 1 && (s->mv & MV_B_BACK) && frame->pict_type == AV_PICTURE_TYPE_B))
                         draw_arrow(frame->data[0], mv->dst_x, mv->dst_y, mv->src_x, mv->src_y,
                                    frame->width, frame->height, frame->linesize[0],
                                    100, 0, direction);
3f46e7ba
             }
f8883317
         }
     }
560d1e7b
 
f8883317
     return ff_filter_frame(outlink, frame);
 }
 
560d1e7b
 static int config_input(AVFilterLink *inlink)
 {
     AVFilterContext *ctx = inlink->dst;
     CodecViewContext *s = ctx->priv;
     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
 
     s->hsub = desc->log2_chroma_w;
     s->vsub = desc->log2_chroma_h;
     return 0;
 }
 
f8883317
 static const AVFilterPad codecview_inputs[] = {
     {
         .name           = "default",
         .type           = AVMEDIA_TYPE_VIDEO,
         .filter_frame   = filter_frame,
560d1e7b
         .config_props   = config_input,
f8883317
         .needs_writable = 1,
40cc3be7
     },
     { NULL }
f8883317
 };
 
 static const AVFilterPad codecview_outputs[] = {
40cc3be7
     {
         .name = "default",
         .type = AVMEDIA_TYPE_VIDEO,
     },
     { NULL }
f8883317
 };
 
 AVFilter ff_vf_codecview = {
     .name          = "codecview",
56ff563f
     .description   = NULL_IF_CONFIG_SMALL("Visualize information about some codecs."),
f8883317
     .priv_size     = sizeof(CodecViewContext),
     .query_formats = query_formats,
     .inputs        = codecview_inputs,
     .outputs       = codecview_outputs,
     .priv_class    = &codecview_class,
     .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC,
 };