de6d9b64 |
/*
* utils for libavcodec |
406792e7 |
* Copyright (c) 2001 Fabrice Bellard |
8f2ab833 |
* Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at> |
de6d9b64 |
* |
b78e7197 |
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or |
ff4ec49e |
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either |
b78e7197 |
* version 2.1 of the License, or (at your option) any later version. |
de6d9b64 |
* |
b78e7197 |
* FFmpeg is distributed in the hope that it will be useful, |
de6d9b64 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
ff4ec49e |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. |
de6d9b64 |
* |
ff4ec49e |
* You should have received a copy of the GNU Lesser General Public |
b78e7197 |
* License along with FFmpeg; if not, write to the Free Software |
5509bffa |
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
de6d9b64 |
*/ |
115329f1 |
|
983e3246 |
/** |
ba87f080 |
* @file |
983e3246 |
* utils.
*/ |
115329f1 |
|
c6507946 |
#include "config.h" |
8738d942 |
#include "libavutil/atomic.h" |
6fee1b90 |
#include "libavutil/attributes.h" |
b2c75b6e |
#include "libavutil/avassert.h" |
7fb94406 |
#include "libavutil/avstring.h" |
36e61e24 |
#include "libavutil/bprint.h" |
a903f8f0 |
#include "libavutil/channel_layout.h" |
245976da |
#include "libavutil/crc.h" |
759001c5 |
#include "libavutil/frame.h" |
7950e519 |
#include "libavutil/internal.h" |
0ebcdf5c |
#include "libavutil/mathematics.h" |
eb285cfe |
#include "libavutil/pixdesc.h" |
7ffe76e5 |
#include "libavutil/imgutils.h"
#include "libavutil/samplefmt.h" |
0b950fe2 |
#include "libavutil/dict.h" |
de6d9b64 |
#include "avcodec.h" |
3123dd79 |
#include "dsputil.h" |
6ed04040 |
#include "libavutil/opt.h" |
b38f008e |
#include "thread.h" |
fde1bc64 |
#include "frame_thread_encoder.h" |
0ba39dd1 |
#include "internal.h" |
f13db94d |
#include "bytestream.h" |
f099d3d1 |
#include "version.h" |
7246177d |
#include <stdlib.h> |
9b879566 |
#include <stdarg.h> |
4c263142 |
#include <limits.h> |
860a40c8 |
#include <float.h> |
7b2d50f8 |
#if CONFIG_ICONV |
f7963993 |
# include <iconv.h>
#endif |
de6d9b64 |
|
094c40ca |
#if HAVE_PTHREADS
#include <pthread.h>
#elif HAVE_W32THREADS
#include "compat/w32pthreads.h"
#elif HAVE_OS2THREADS
#include "compat/os2threads.h"
#endif
#if HAVE_PTHREADS || HAVE_W32THREADS || HAVE_OS2THREADS
static int default_lockmgr_cb(void **arg, enum AVLockOp op)
{
void * volatile * mutex = arg;
int err;
switch (op) {
case AV_LOCK_CREATE:
return 0;
case AV_LOCK_OBTAIN:
if (!*mutex) {
pthread_mutex_t *tmp = av_malloc(sizeof(pthread_mutex_t));
if (!tmp)
return AVERROR(ENOMEM);
if ((err = pthread_mutex_init(tmp, NULL))) {
av_free(tmp);
return AVERROR(err);
}
if (avpriv_atomic_ptr_cas(mutex, NULL, tmp)) {
pthread_mutex_destroy(tmp);
av_free(tmp);
}
}
if ((err = pthread_mutex_lock(*mutex)))
return AVERROR(err);
return 0;
case AV_LOCK_RELEASE:
if ((err = pthread_mutex_unlock(*mutex)))
return AVERROR(err);
return 0;
case AV_LOCK_DESTROY:
if (*mutex)
pthread_mutex_destroy(*mutex);
av_free(*mutex);
avpriv_atomic_ptr_cas(mutex, *mutex, NULL);
return 0;
}
return 1;
}
static int (*lockmgr_cb)(void **mutex, enum AVLockOp op) = default_lockmgr_cb;
#else
static int (*lockmgr_cb)(void **mutex, enum AVLockOp op) = NULL;
#endif
|
25fec859 |
volatile int ff_avcodec_locked; |
419ffb23 |
static int volatile entangled_thread_counter = 0; |
f988ce6c |
static void *codec_mutex; |
2d1b6fb7 |
static void *avformat_mutex; |
ddebfb15 |
|
102b794e |
#if CONFIG_RAISE_MAJOR
# define LIBNAME "LIBAVCODEC_155"
#else
# define LIBNAME "LIBAVCODEC_55"
#endif
|
cce3e0a4 |
#if FF_API_FAST_MALLOC && CONFIG_SHARED && HAVE_SYMVER |
102b794e |
FF_SYMVER(void*, av_fast_realloc, (void *ptr, unsigned int *size, size_t min_size), LIBNAME) |
8e1e6f31 |
{ |
cce3e0a4 |
return av_fast_realloc(ptr, size, min_size); |
8e1e6f31 |
} |
978805b2 |
|
102b794e |
FF_SYMVER(void, av_fast_malloc, (void *ptr, unsigned int *size, size_t min_size), LIBNAME) |
238ef6da |
{ |
cce3e0a4 |
av_fast_malloc(ptr, size, min_size); |
8e1e6f31 |
} |
cce3e0a4 |
#endif |
8e1e6f31 |
|
3b55429d |
static inline int ff_fast_malloc(void *ptr, unsigned int *size, size_t min_size, int zero_realloc) |
238ef6da |
{
void **p = ptr;
if (min_size < *size) |
3b55429d |
return 0; |
419ffb23 |
min_size = FFMAX(17 * min_size / 16 + 32, min_size); |
238ef6da |
av_free(*p); |
3b55429d |
*p = zero_realloc ? av_mallocz(min_size) : av_malloc(min_size); |
419ffb23 |
if (!*p)
min_size = 0;
*size = min_size; |
3b55429d |
return 1;
}
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
{
uint8_t **p = ptr;
if (min_size > SIZE_MAX - FF_INPUT_BUFFER_PADDING_SIZE) { |
316fc744 |
av_freep(p); |
3b55429d |
*size = 0;
return;
}
if (!ff_fast_malloc(p, size, min_size + FF_INPUT_BUFFER_PADDING_SIZE, 1))
memset(*p + min_size, 0, FF_INPUT_BUFFER_PADDING_SIZE); |
238ef6da |
}
|
0e003d8c |
void av_fast_padded_mallocz(void *ptr, unsigned int *size, size_t min_size)
{
uint8_t **p = ptr;
if (min_size > SIZE_MAX - FF_INPUT_BUFFER_PADDING_SIZE) {
av_freep(p);
*size = 0;
return;
}
if (!ff_fast_malloc(p, size, min_size + FF_INPUT_BUFFER_PADDING_SIZE, 1))
memset(*p, 0, min_size + FF_INPUT_BUFFER_PADDING_SIZE);
}
|
de6d9b64 |
/* encoder management */ |
e6df765e |
static AVCodec *first_avcodec = NULL; |
ec464c96 |
static AVCodec **last_avcodec = &first_avcodec; |
de6d9b64 |
|
0a0f19b5 |
AVCodec *av_codec_next(const AVCodec *c)
{ |
419ffb23 |
if (c)
return c->next;
else
return first_avcodec; |
55b9e69a |
}
|
6fee1b90 |
static av_cold void avcodec_init(void) |
9ecfbb3e |
{
static int initialized = 0;
if (initialized != 0)
return;
initialized = 1;
|
6a701306 |
if (CONFIG_DSPUTIL)
ff_dsputil_static_init(); |
9ecfbb3e |
}
|
0a0f19b5 |
int av_codec_is_encoder(const AVCodec *codec) |
b2c75b6e |
{ |
466b39ef |
return codec && (codec->encode_sub || codec->encode2); |
b2c75b6e |
}
|
0a0f19b5 |
int av_codec_is_decoder(const AVCodec *codec) |
b2c75b6e |
{
return codec && codec->decode;
}
|
6fee1b90 |
av_cold void avcodec_register(AVCodec *codec) |
de6d9b64 |
{
AVCodec **p; |
7a961a46 |
avcodec_init(); |
ec464c96 |
p = last_avcodec; |
335a761a |
codec->next = NULL; |
133fbfc7 |
while(*p || avpriv_atomic_ptr_cas((void * volatile *)p, NULL, codec)) |
8738d942 |
p = &(*p)->next; |
ec464c96 |
last_avcodec = &codec->next; |
d97efd7f |
if (codec->init_static_data)
codec->init_static_data(codec); |
de6d9b64 |
}
|
93c553c7 |
#if FF_API_EMU_EDGE |
0fb49b59 |
unsigned avcodec_get_edge_width(void)
{
return EDGE_WIDTH;
} |
93c553c7 |
#endif |
0fb49b59 |
|
7644f5a8 |
#if FF_API_SET_DIMENSIONS |
419ffb23 |
void avcodec_set_dimensions(AVCodecContext *s, int width, int height)
{ |
b6eee405 |
int ret = ff_set_dimensions(s, width, height);
if (ret < 0) {
av_log(s, AV_LOG_WARNING, "Failed to set dimensions %d %d\n", width, height);
} |
7644f5a8 |
}
#endif
int ff_set_dimensions(AVCodecContext *s, int width, int height)
{
int ret = av_image_check_size(width, height, 0, s);
if (ret < 0)
width = height = 0; |
7b91e9cf |
|
419ffb23 |
s->coded_width = width;
s->coded_height = height; |
570d63ee |
s->width = FF_CEIL_RSHIFT(width, s->lowres);
s->height = FF_CEIL_RSHIFT(height, s->lowres); |
7644f5a8 |
return ret; |
21adafec |
}
|
5b4797a2 |
int ff_side_data_update_matrix_encoding(AVFrame *frame,
enum AVMatrixEncoding matrix_encoding)
{
AVFrameSideData *side_data;
enum AVMatrixEncoding *data;
side_data = av_frame_get_side_data(frame, AV_FRAME_DATA_MATRIXENCODING);
if (!side_data)
side_data = av_frame_new_side_data(frame, AV_FRAME_DATA_MATRIXENCODING,
sizeof(enum AVMatrixEncoding));
if (!side_data)
return AVERROR(ENOMEM);
data = (enum AVMatrixEncoding*)side_data->data;
*data = matrix_encoding;
return 0;
}
|
e95930ed |
#if HAVE_NEON || ARCH_PPC || HAVE_MMX |
c6507946 |
# define STRIDE_ALIGN 16
#else
# define STRIDE_ALIGN 8
#endif
|
560f773c |
void avcodec_align_dimensions2(AVCodecContext *s, int *width, int *height,
int linesize_align[AV_NUM_DATA_POINTERS])
{
int i; |
419ffb23 |
int w_align = 1;
int h_align = 1; |
115329f1 |
|
419ffb23 |
switch (s->pix_fmt) { |
716d413c |
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUYV422:
case AV_PIX_FMT_UYVY422:
case AV_PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV440P:
case AV_PIX_FMT_YUV444P: |
5c057433 |
case AV_PIX_FMT_GBRAP: |
716d413c |
case AV_PIX_FMT_GBRP:
case AV_PIX_FMT_GRAY8:
case AV_PIX_FMT_GRAY16BE:
case AV_PIX_FMT_GRAY16LE:
case AV_PIX_FMT_YUVJ420P:
case AV_PIX_FMT_YUVJ422P:
case AV_PIX_FMT_YUVJ440P:
case AV_PIX_FMT_YUVJ444P:
case AV_PIX_FMT_YUVA420P: |
ac627b3d |
case AV_PIX_FMT_YUVA422P:
case AV_PIX_FMT_YUVA444P: |
716d413c |
case AV_PIX_FMT_YUV420P9LE:
case AV_PIX_FMT_YUV420P9BE:
case AV_PIX_FMT_YUV420P10LE:
case AV_PIX_FMT_YUV420P10BE: |
ac627b3d |
case AV_PIX_FMT_YUV420P12LE:
case AV_PIX_FMT_YUV420P12BE:
case AV_PIX_FMT_YUV420P14LE:
case AV_PIX_FMT_YUV420P14BE: |
7bc9fb8c |
case AV_PIX_FMT_YUV420P16LE:
case AV_PIX_FMT_YUV420P16BE: |
4e276b84 |
case AV_PIX_FMT_YUVA420P9LE:
case AV_PIX_FMT_YUVA420P9BE:
case AV_PIX_FMT_YUVA420P10LE:
case AV_PIX_FMT_YUVA420P10BE:
case AV_PIX_FMT_YUVA420P16LE:
case AV_PIX_FMT_YUVA420P16BE: |
716d413c |
case AV_PIX_FMT_YUV422P9LE:
case AV_PIX_FMT_YUV422P9BE:
case AV_PIX_FMT_YUV422P10LE:
case AV_PIX_FMT_YUV422P10BE: |
ac627b3d |
case AV_PIX_FMT_YUV422P12LE:
case AV_PIX_FMT_YUV422P12BE:
case AV_PIX_FMT_YUV422P14LE:
case AV_PIX_FMT_YUV422P14BE: |
7bc9fb8c |
case AV_PIX_FMT_YUV422P16LE:
case AV_PIX_FMT_YUV422P16BE: |
4e276b84 |
case AV_PIX_FMT_YUVA422P9LE:
case AV_PIX_FMT_YUVA422P9BE: |
8058284c |
case AV_PIX_FMT_YUVA422P10LE:
case AV_PIX_FMT_YUVA422P10BE: |
4e276b84 |
case AV_PIX_FMT_YUVA422P16LE:
case AV_PIX_FMT_YUVA422P16BE: |
716d413c |
case AV_PIX_FMT_YUV444P9LE:
case AV_PIX_FMT_YUV444P9BE:
case AV_PIX_FMT_YUV444P10LE:
case AV_PIX_FMT_YUV444P10BE: |
ac627b3d |
case AV_PIX_FMT_YUV444P12LE:
case AV_PIX_FMT_YUV444P12BE:
case AV_PIX_FMT_YUV444P14LE:
case AV_PIX_FMT_YUV444P14BE: |
7bc9fb8c |
case AV_PIX_FMT_YUV444P16LE:
case AV_PIX_FMT_YUV444P16BE: |
5e29e912 |
case AV_PIX_FMT_YUVA444P9LE:
case AV_PIX_FMT_YUVA444P9BE:
case AV_PIX_FMT_YUVA444P10LE:
case AV_PIX_FMT_YUVA444P10BE: |
7bc9fb8c |
case AV_PIX_FMT_YUVA444P16LE:
case AV_PIX_FMT_YUVA444P16BE: |
716d413c |
case AV_PIX_FMT_GBRP9LE:
case AV_PIX_FMT_GBRP9BE:
case AV_PIX_FMT_GBRP10LE:
case AV_PIX_FMT_GBRP10BE: |
ac627b3d |
case AV_PIX_FMT_GBRP12LE:
case AV_PIX_FMT_GBRP12BE:
case AV_PIX_FMT_GBRP14LE:
case AV_PIX_FMT_GBRP14BE: |
37c0dc62 |
w_align = 16; //FIXME assume 16 pixel per macroblock
h_align = 16 * 2; // interlaced needs 2 macroblocks height |
f0bbfc4a |
break; |
716d413c |
case AV_PIX_FMT_YUV411P: |
a90baa63 |
case AV_PIX_FMT_YUVJ411P: |
716d413c |
case AV_PIX_FMT_UYYVYY411: |
419ffb23 |
w_align = 32;
h_align = 8; |
f0bbfc4a |
break; |
716d413c |
case AV_PIX_FMT_YUV410P: |
419ffb23 |
if (s->codec_id == AV_CODEC_ID_SVQ1) {
w_align = 64;
h_align = 64; |
f0bbfc4a |
} |
d86ef544 |
break; |
716d413c |
case AV_PIX_FMT_RGB555: |
419ffb23 |
if (s->codec_id == AV_CODEC_ID_RPZA) {
w_align = 4;
h_align = 4; |
d99fbbf4 |
} |
d86ef544 |
break; |
716d413c |
case AV_PIX_FMT_PAL8:
case AV_PIX_FMT_BGR8:
case AV_PIX_FMT_RGB8: |
f5c00b34 |
if (s->codec_id == AV_CODEC_ID_SMC ||
s->codec_id == AV_CODEC_ID_CINEPAK) { |
419ffb23 |
w_align = 4;
h_align = 4; |
d99fbbf4 |
} |
f0bbfc4a |
break; |
716d413c |
case AV_PIX_FMT_BGR24: |
419ffb23 |
if ((s->codec_id == AV_CODEC_ID_MSZH) ||
(s->codec_id == AV_CODEC_ID_ZLIB)) {
w_align = 4;
h_align = 4; |
c31b8121 |
} |
f5c00b34 |
break;
case AV_PIX_FMT_RGB24:
if (s->codec_id == AV_CODEC_ID_CINEPAK) {
w_align = 4;
h_align = 4;
} |
c31b8121 |
break; |
f0bbfc4a |
default: |
419ffb23 |
w_align = 1;
h_align = 1; |
f0bbfc4a |
break;
}
|
67d501b4 |
if (s->codec_id == AV_CODEC_ID_IFF_ILBM || s->codec_id == AV_CODEC_ID_IFF_BYTERUN1) {
w_align = FFMAX(w_align, 8); |
fd3e75d2 |
}
|
419ffb23 |
*width = FFALIGN(*width, w_align);
*height = FFALIGN(*height, h_align); |
67d501b4 |
if (s->codec_id == AV_CODEC_ID_H264 || s->lowres) |
419ffb23 |
// some of the optimized chroma MC reads one line too much |
67d501b4 |
// which is also done in mpeg decoders with lowres > 0 |
419ffb23 |
*height += 2; |
eb285cfe |
|
2d9535ad |
for (i = 0; i < 4; i++) |
560f773c |
linesize_align[i] = STRIDE_ALIGN; |
eb285cfe |
}
|
419ffb23 |
void avcodec_align_dimensions(AVCodecContext *s, int *width, int *height)
{ |
50ba57e0 |
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(s->pix_fmt);
int chroma_shift = desc->log2_chroma_w; |
560f773c |
int linesize_align[AV_NUM_DATA_POINTERS]; |
eb285cfe |
int align; |
419ffb23 |
|
eb285cfe |
avcodec_align_dimensions2(s, width, height, linesize_align); |
419ffb23 |
align = FFMAX(linesize_align[0], linesize_align[3]); |
eb285cfe |
linesize_align[1] <<= chroma_shift;
linesize_align[2] <<= chroma_shift; |
419ffb23 |
align = FFMAX3(align, linesize_align[1], linesize_align[2]);
*width = FFALIGN(*width, align); |
f0bbfc4a |
}
|
b7397857 |
int avcodec_enum_to_chroma_pos(int *xpos, int *ypos, enum AVChromaLocation pos)
{
if (pos <= AVCHROMA_LOC_UNSPECIFIED || pos >= AVCHROMA_LOC_NB)
return AVERROR(EINVAL);
pos--;
*xpos = (pos&1) * 128;
*ypos = ((pos>>1)^(pos<4)) * 128;
return 0;
}
|
abc8110f |
enum AVChromaLocation avcodec_chroma_pos_to_enum(int xpos, int ypos)
{
int pos, xout, yout;
for (pos = AVCHROMA_LOC_UNSPECIFIED + 1; pos < AVCHROMA_LOC_NB; pos++) {
if (avcodec_enum_to_chroma_pos(&xout, &yout, pos) == 0 && xout == xpos && yout == ypos)
return pos;
}
return AVCHROMA_LOC_UNSPECIFIED;
}
|
5ee5fa02 |
int avcodec_fill_audio_frame(AVFrame *frame, int nb_channels,
enum AVSampleFormat sample_fmt, const uint8_t *buf,
int buf_size, int align)
{
int ch, planar, needed_size, ret = 0;
needed_size = av_samples_get_buffer_size(NULL, nb_channels,
frame->nb_samples, sample_fmt,
align);
if (buf_size < needed_size)
return AVERROR(EINVAL);
planar = av_sample_fmt_is_planar(sample_fmt);
if (planar && nb_channels > AV_NUM_DATA_POINTERS) {
if (!(frame->extended_data = av_mallocz(nb_channels *
sizeof(*frame->extended_data))))
return AVERROR(ENOMEM);
} else {
frame->extended_data = frame->data;
}
if ((ret = av_samples_fill_arrays(frame->extended_data, &frame->linesize[0], |
15d75dab |
(uint8_t *)(intptr_t)buf, nb_channels, frame->nb_samples, |
5ee5fa02 |
sample_fmt, align)) < 0) {
if (frame->extended_data != frame->data) |
af5004b5 |
av_freep(&frame->extended_data); |
5ee5fa02 |
return ret;
}
if (frame->extended_data != frame->data) {
for (ch = 0; ch < AV_NUM_DATA_POINTERS; ch++)
frame->data[ch] = frame->extended_data[ch];
}
return ret;
}
|
759001c5 |
static int update_frame_pool(AVCodecContext *avctx, AVFrame *frame) |
0eea2129 |
{ |
759001c5 |
FramePool *pool = avctx->internal->pool;
int i, ret; |
0701006e |
|
759001c5 |
switch (avctx->codec_type) {
case AVMEDIA_TYPE_VIDEO: { |
c7622f9a |
AVPicture picture; |
759001c5 |
int size[4] = { 0 };
int w = frame->width;
int h = frame->height;
int tmpsize, unaligned; |
c7622f9a |
|
759001c5 |
if (pool->format == frame->format &&
pool->width == frame->width && pool->height == frame->height)
return 0; |
f0bbfc4a |
|
759001c5 |
avcodec_align_dimensions2(avctx, &w, &h, pool->stride_align); |
115329f1 |
|
759001c5 |
if (!(avctx->flags & CODEC_FLAG_EMU_EDGE)) { |
419ffb23 |
w += EDGE_WIDTH * 2;
h += EDGE_WIDTH * 2; |
1e491e29 |
} |
5b67307a |
|
c9d6e847 |
do {
// NOTE: do not align linesizes individually, this breaks e.g. assumptions
// that linesize[0] == 2*linesize[1] in the MPEG-encoder for 4:2:2 |
759001c5 |
av_image_fill_linesizes(picture.linesize, avctx->pix_fmt, w); |
c9d6e847 |
// increase alignment of w for next try (rhs gives the lowest bit set in w) |
419ffb23 |
w += w & ~(w - 1); |
db7ae7d1 |
|
c9d6e847 |
unaligned = 0; |
419ffb23 |
for (i = 0; i < 4; i++) |
759001c5 |
unaligned |= picture.linesize[i] % pool->stride_align[i]; |
c9d6e847 |
} while (unaligned); |
db7ae7d1 |
|
759001c5 |
tmpsize = av_image_fill_pointers(picture.data, avctx->pix_fmt, h,
NULL, picture.linesize); |
f8c96d01 |
if (tmpsize < 0)
return -1; |
db7ae7d1 |
|
419ffb23 |
for (i = 0; i < 3 && picture.data[i + 1]; i++)
size[i] = picture.data[i + 1] - picture.data[i]; |
cf73e32a |
size[i] = tmpsize - (picture.data[i] - picture.data[0]); |
c7622f9a |
|
759001c5 |
for (i = 0; i < 4; i++) {
av_buffer_pool_uninit(&pool->pools[i]);
pool->linesize[i] = picture.linesize[i];
if (size[i]) { |
175e916f |
pool->pools[i] = av_buffer_pool_init(size[i] + 16 + STRIDE_ALIGN - 1, |
01df2a13 |
CONFIG_MEMORY_POISONING ?
NULL :
av_buffer_allocz); |
759001c5 |
if (!pool->pools[i]) {
ret = AVERROR(ENOMEM);
goto fail;
}
}
}
pool->format = frame->format;
pool->width = frame->width;
pool->height = frame->height; |
1e491e29 |
|
759001c5 |
break;
}
case AVMEDIA_TYPE_AUDIO: { |
9dd0b7ad |
int ch = av_frame_get_channels(frame); //av_get_channel_layout_nb_channels(frame->channel_layout); |
759001c5 |
int planar = av_sample_fmt_is_planar(frame->format);
int planes = planar ? ch : 1;
if (pool->format == frame->format && pool->planes == planes &&
pool->channels == ch && frame->nb_samples == pool->samples)
return 0;
av_buffer_pool_uninit(&pool->pools[0]);
ret = av_samples_get_buffer_size(&pool->linesize[0], ch,
frame->nb_samples, frame->format, 0);
if (ret < 0)
goto fail; |
1e491e29 |
|
759001c5 |
pool->pools[0] = av_buffer_pool_init(pool->linesize[0], NULL);
if (!pool->pools[0]) {
ret = AVERROR(ENOMEM);
goto fail;
} |
c7622f9a |
|
759001c5 |
pool->format = frame->format;
pool->planes = planes;
pool->channels = ch;
pool->samples = frame->nb_samples;
break; |
1e491e29 |
} |
759001c5 |
default: av_assert0(0);
}
return 0;
fail:
for (i = 0; i < 4; i++)
av_buffer_pool_uninit(&pool->pools[i]);
pool->format = -1;
pool->planes = pool->channels = pool->samples = 0;
pool->width = pool->height = 0;
return ret;
} |
1e491e29 |
|
759001c5 |
static int audio_get_buffer(AVCodecContext *avctx, AVFrame *frame)
{
FramePool *pool = avctx->internal->pool;
int planes = pool->planes;
int i; |
c7622f9a |
|
759001c5 |
frame->linesize[0] = pool->linesize[0];
if (planes > AV_NUM_DATA_POINTERS) {
frame->extended_data = av_mallocz(planes * sizeof(*frame->extended_data));
frame->nb_extended_buf = planes - AV_NUM_DATA_POINTERS;
frame->extended_buf = av_mallocz(frame->nb_extended_buf *
sizeof(*frame->extended_buf));
if (!frame->extended_data || !frame->extended_buf) {
av_freep(&frame->extended_data);
av_freep(&frame->extended_buf);
return AVERROR(ENOMEM); |
560f773c |
} |
2c993fec |
} else { |
759001c5 |
frame->extended_data = frame->data; |
2c993fec |
av_assert0(frame->nb_extended_buf == 0);
} |
759001c5 |
for (i = 0; i < FFMIN(planes, AV_NUM_DATA_POINTERS); i++) {
frame->buf[i] = av_buffer_pool_get(pool->pools[0]);
if (!frame->buf[i])
goto fail;
frame->extended_data[i] = frame->data[i] = frame->buf[i]->data; |
1e491e29 |
} |
759001c5 |
for (i = 0; i < frame->nb_extended_buf; i++) {
frame->extended_buf[i] = av_buffer_pool_get(pool->pools[0]);
if (!frame->extended_buf[i])
goto fail;
frame->extended_data[i + AV_NUM_DATA_POINTERS] = frame->extended_buf[i]->data; |
1e491e29 |
}
|
759001c5 |
if (avctx->debug & FF_DEBUG_BUFFERS)
av_log(avctx, AV_LOG_DEBUG, "default_get_buffer called on frame %p", frame);
return 0;
fail:
av_frame_unref(frame);
return AVERROR(ENOMEM);
} |
1e491e29 |
|
759001c5 |
static int video_get_buffer(AVCodecContext *s, AVFrame *pic)
{
FramePool *pool = s->internal->pool;
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pic->format);
int pixel_size = desc->comp[0].step_minus1 + 1;
int h_chroma_shift, v_chroma_shift;
int i;
if (pic->data[0] != NULL) {
av_log(s, AV_LOG_ERROR, "pic->data[0]!=NULL in avcodec_default_get_buffer\n");
return -1; |
d90cf87b |
} |
759001c5 |
memset(pic->data, 0, sizeof(pic->data)); |
0eea2129 |
pic->extended_data = pic->data; |
759001c5 |
av_pix_fmt_get_chroma_sub_sample(s->pix_fmt, &h_chroma_shift, &v_chroma_shift);
for (i = 0; i < 4 && pool->pools[i]; i++) {
const int h_shift = i == 0 ? 0 : h_chroma_shift;
const int v_shift = i == 0 ? 0 : v_chroma_shift; |
811d58e0 |
int is_planar = pool->pools[2] || (i==0 && s->pix_fmt == AV_PIX_FMT_GRAY8); |
759001c5 |
pic->linesize[i] = pool->linesize[i];
pic->buf[i] = av_buffer_pool_get(pool->pools[i]);
if (!pic->buf[i])
goto fail;
// no edge if EDGE EMU or not planar YUV |
811d58e0 |
if ((s->flags & CODEC_FLAG_EMU_EDGE) || !is_planar) |
759001c5 |
pic->data[i] = pic->buf[i]->data;
else {
pic->data[i] = pic->buf[i]->data +
FFALIGN((pic->linesize[i] * EDGE_WIDTH >> v_shift) +
(pixel_size * EDGE_WIDTH >> h_shift), pool->stride_align[i]);
}
}
for (; i < AV_NUM_DATA_POINTERS; i++) {
pic->data[i] = NULL;
pic->linesize[i] = 0;
}
if (pic->data[1] && !pic->data[2])
avpriv_set_systematic_pal2((uint32_t *)pic->data[1], s->pix_fmt); |
79de84f2 |
|
419ffb23 |
if (s->debug & FF_DEBUG_BUFFERS) |
759001c5 |
av_log(s, AV_LOG_DEBUG, "default_get_buffer called on pic %p\n", pic); |
385c820b |
|
1e491e29 |
return 0; |
759001c5 |
fail:
av_frame_unref(pic);
return AVERROR(ENOMEM); |
1e491e29 |
}
|
a53b144e |
void avpriv_color_frame(AVFrame *frame, const int c[4])
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
int p, y, x;
|
c7c71f95 |
av_assert0(desc->flags & AV_PIX_FMT_FLAG_PLANAR); |
a53b144e |
for (p = 0; p<desc->nb_components; p++) {
uint8_t *dst = frame->data[p];
int is_chroma = p == 1 || p == 2; |
570d63ee |
int bytes = is_chroma ? FF_CEIL_RSHIFT(frame->width, desc->log2_chroma_w) : frame->width;
int height = is_chroma ? FF_CEIL_RSHIFT(frame->height, desc->log2_chroma_h) : frame->height;
for (y = 0; y < height; y++) { |
a53b144e |
if (desc->comp[0].depth_minus1 >= 8) {
for (x = 0; x<bytes; x++)
((uint16_t*)dst)[x] = c[p];
}else
memset(dst, c[p], bytes);
dst += frame->linesize[p];
}
}
}
|
759001c5 |
int avcodec_default_get_buffer2(AVCodecContext *avctx, AVFrame *frame, int flags) |
0eea2129 |
{ |
759001c5 |
int ret;
if ((ret = update_frame_pool(avctx, frame)) < 0)
return ret;
#if FF_API_GET_BUFFER |
7950e519 |
FF_DISABLE_DEPRECATION_WARNINGS |
387bef95 |
frame->type = FF_BUFFER_TYPE_INTERNAL; |
7950e519 |
FF_ENABLE_DEPRECATION_WARNINGS |
759001c5 |
#endif
|
0eea2129 |
switch (avctx->codec_type) {
case AVMEDIA_TYPE_VIDEO:
return video_get_buffer(avctx, frame);
case AVMEDIA_TYPE_AUDIO:
return audio_get_buffer(avctx, frame);
default:
return -1;
}
}
|
f9fd6f98 |
int ff_init_buffer_info(AVCodecContext *avctx, AVFrame *frame) |
594d4d5d |
{ |
ab71be09 |
if (avctx->internal->pkt) {
frame->pkt_pts = avctx->internal->pkt->pts;
av_frame_set_pkt_pos (frame, avctx->internal->pkt->pos);
av_frame_set_pkt_duration(frame, avctx->internal->pkt->duration);
av_frame_set_pkt_size (frame, avctx->internal->pkt->size); |
d2a3f08d |
} else {
frame->pkt_pts = AV_NOPTS_VALUE; |
cc38ca67 |
av_frame_set_pkt_pos (frame, -1);
av_frame_set_pkt_duration(frame, 0);
av_frame_set_pkt_size (frame, -1); |
d2a3f08d |
} |
36685c3c |
frame->reordered_opaque = avctx->reordered_opaque; |
d2a3f08d |
|
36685c3c |
switch (avctx->codec->type) { |
ff953fec |
case AVMEDIA_TYPE_VIDEO: |
e5c72299 |
frame->format = avctx->pix_fmt; |
36685c3c |
if (!frame->sample_aspect_ratio.num)
frame->sample_aspect_ratio = avctx->sample_aspect_ratio; |
a80e6229 |
if (av_frame_get_colorspace(frame) == AVCOL_SPC_UNSPECIFIED)
av_frame_set_colorspace(frame, avctx->colorspace);
if (av_frame_get_color_range(frame) == AVCOL_RANGE_UNSPECIFIED)
av_frame_set_color_range(frame, avctx->color_range); |
ff953fec |
break;
case AVMEDIA_TYPE_AUDIO: |
36685c3c |
if (!frame->sample_rate)
frame->sample_rate = avctx->sample_rate;
if (frame->format < 0)
frame->format = avctx->sample_fmt; |
2eba9087 |
if (!frame->channel_layout) {
if (avctx->channel_layout) {
if (av_get_channel_layout_nb_channels(avctx->channel_layout) !=
avctx->channels) {
av_log(avctx, AV_LOG_ERROR, "Inconsistent channel "
"configuration.\n");
return AVERROR(EINVAL);
}
frame->channel_layout = avctx->channel_layout;
} else {
if (avctx->channels > FF_SANE_NB_CHANNELS) {
av_log(avctx, AV_LOG_ERROR, "Too many channels: %d.\n",
avctx->channels);
return AVERROR(ENOSYS);
}
}
} |
36685c3c |
av_frame_set_channels(frame, avctx->channels); |
ff953fec |
break;
} |
f9fd6f98 |
return 0; |
d2a3f08d |
} |
ff953fec |
|
759001c5 |
#if FF_API_GET_BUFFER |
20be5e0a |
FF_DISABLE_DEPRECATION_WARNINGS |
759001c5 |
int avcodec_default_get_buffer(AVCodecContext *avctx, AVFrame *frame) |
d2a3f08d |
{ |
759001c5 |
return avcodec_default_get_buffer2(avctx, frame, 0);
} |
2f980cf3 |
|
759001c5 |
typedef struct CompatReleaseBufPriv {
AVCodecContext avctx;
AVFrame frame;
} CompatReleaseBufPriv;
static void compat_free_buffer(void *opaque, uint8_t *data)
{
CompatReleaseBufPriv *priv = opaque; |
bb3823d4 |
if (priv->avctx.release_buffer)
priv->avctx.release_buffer(&priv->avctx, &priv->frame); |
759001c5 |
av_freep(&priv); |
594d4d5d |
}
|
759001c5 |
static void compat_release_buffer(void *opaque, uint8_t *data) |
419ffb23 |
{ |
759001c5 |
AVBufferRef *buf = opaque;
av_buffer_unref(&buf);
} |
20be5e0a |
FF_ENABLE_DEPRECATION_WARNINGS |
759001c5 |
#endif |
d90cf87b |
|
1ec94b0f |
static int get_buffer_internal(AVCodecContext *avctx, AVFrame *frame, int flags) |
759001c5 |
{ |
f7e85ee9 |
int override_dimensions = 1; |
759001c5 |
int ret;
|
80e9e63c |
if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) {
if ((ret = av_image_check_size(avctx->width, avctx->height, 0, avctx)) < 0 || avctx->pix_fmt<0) {
av_log(avctx, AV_LOG_ERROR, "video_get_buffer: image parameters invalid\n");
return AVERROR(EINVAL);
} |
ff953fec |
} |
ea4b477a |
if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) { |
f7e85ee9 |
if (frame->width <= 0 || frame->height <= 0) { |
ea4b477a |
frame->width = FFMAX(avctx->width, FF_CEIL_RSHIFT(avctx->coded_width, avctx->lowres));
frame->height = FFMAX(avctx->height, FF_CEIL_RSHIFT(avctx->coded_height, avctx->lowres)); |
f7e85ee9 |
override_dimensions = 0;
} |
ff953fec |
} |
f9fd6f98 |
if ((ret = ff_init_buffer_info(avctx, frame)) < 0)
return ret; |
0eea2129 |
|
759001c5 |
#if FF_API_GET_BUFFER |
7950e519 |
FF_DISABLE_DEPRECATION_WARNINGS |
759001c5 |
/* |
f18ccb52 |
* Wrap an old get_buffer()-allocated buffer in a bunch of AVBuffers. |
759001c5 |
* We wrap each plane in its own AVBuffer. Each of those has a reference to
* a dummy AVBuffer as its private data, unreffing it on free.
* When all the planes are freed, the dummy buffer's free callback calls
* release_buffer().
*/
if (avctx->get_buffer) {
CompatReleaseBufPriv *priv = NULL;
AVBufferRef *dummy_buf = NULL;
int planes, i, ret; |
f3a29b75 |
|
759001c5 |
if (flags & AV_GET_BUFFER_FLAG_REF)
frame->reference = 1; |
d90cf87b |
|
759001c5 |
ret = avctx->get_buffer(avctx, frame);
if (ret < 0)
return ret; |
0eea2129 |
|
759001c5 |
/* return if the buffers are already set up
* this would happen e.g. when a custom get_buffer() calls
* avcodec_default_get_buffer
*/
if (frame->buf[0]) |
cdfa1c89 |
goto end; |
759001c5 |
priv = av_mallocz(sizeof(*priv));
if (!priv) {
ret = AVERROR(ENOMEM);
goto fail; |
f3a29b75 |
} |
759001c5 |
priv->avctx = *avctx;
priv->frame = *frame; |
d90cf87b |
|
759001c5 |
dummy_buf = av_buffer_create(NULL, 0, compat_free_buffer, priv, 0);
if (!dummy_buf) {
ret = AVERROR(ENOMEM);
goto fail; |
f3a29b75 |
} |
d90cf87b |
|
759001c5 |
#define WRAP_PLANE(ref_out, data, data_size) \
do { \
AVBufferRef *dummy_ref = av_buffer_ref(dummy_buf); \
if (!dummy_ref) { \
ret = AVERROR(ENOMEM); \
goto fail; \
} \
ref_out = av_buffer_create(data, data_size, compat_release_buffer, \
dummy_ref, 0); \
if (!ref_out) { \
av_frame_unref(frame); \
ret = AVERROR(ENOMEM); \
goto fail; \
} \
} while (0)
if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) {
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
|
c977039e |
planes = av_pix_fmt_count_planes(frame->format); |
c24469e8 |
/* workaround for AVHWAccel plane count of 0, buf[0] is used as
check for allocated buffers: make libavcodec happy */ |
e6c4ac7b |
if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) |
ac9b056d |
planes = 1; |
c977039e |
if (!desc || planes <= 0) { |
759001c5 |
ret = AVERROR(EINVAL);
goto fail;
} |
385c820b |
|
759001c5 |
for (i = 0; i < planes; i++) { |
65528080 |
int v_shift = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
int plane_size = (frame->height >> v_shift) * frame->linesize[i]; |
1e491e29 |
|
759001c5 |
WRAP_PLANE(frame->buf[i], frame->data[i], plane_size);
}
} else {
int planar = av_sample_fmt_is_planar(frame->format);
planes = planar ? avctx->channels : 1;
if (planes > FF_ARRAY_ELEMS(frame->buf)) {
frame->nb_extended_buf = planes - FF_ARRAY_ELEMS(frame->buf);
frame->extended_buf = av_malloc(sizeof(*frame->extended_buf) *
frame->nb_extended_buf);
if (!frame->extended_buf) {
ret = AVERROR(ENOMEM);
goto fail;
}
} |
e1c2a5a0 |
|
759001c5 |
for (i = 0; i < FFMIN(planes, FF_ARRAY_ELEMS(frame->buf)); i++)
WRAP_PLANE(frame->buf[i], frame->extended_data[i], frame->linesize[0]); |
0eea2129 |
|
669cc0f3 |
for (i = 0; i < frame->nb_extended_buf; i++) |
759001c5 |
WRAP_PLANE(frame->extended_buf[i],
frame->extended_data[i + FF_ARRAY_ELEMS(frame->buf)],
frame->linesize[0]);
}
av_buffer_unref(&dummy_buf); |
e1c2a5a0 |
|
cdfa1c89 |
end: |
a7f46586 |
frame->width = avctx->width;
frame->height = avctx->height;
|
759001c5 |
return 0; |
0eea2129 |
|
759001c5 |
fail:
avctx->release_buffer(avctx, frame);
av_freep(&priv);
av_buffer_unref(&dummy_buf);
return ret; |
44e19145 |
} |
7950e519 |
FF_ENABLE_DEPRECATION_WARNINGS |
759001c5 |
#endif
|
a7f46586 |
ret = avctx->get_buffer2(avctx, frame, flags);
|
f7e85ee9 |
if (avctx->codec_type == AVMEDIA_TYPE_VIDEO && !override_dimensions) { |
a7f46586 |
frame->width = avctx->width;
frame->height = avctx->height;
}
return ret; |
759001c5 |
}
|
1ec94b0f |
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
{
int ret = get_buffer_internal(avctx, frame, flags);
if (ret < 0)
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return ret;
}
static int reget_buffer_internal(AVCodecContext *avctx, AVFrame *frame) |
759001c5 |
{ |
8feac29c |
AVFrame *tmp; |
759001c5 |
int ret; |
44e19145 |
|
759001c5 |
av_assert0(avctx->codec_type == AVMEDIA_TYPE_VIDEO); |
e49780f7 |
|
80e9e63c |
if (frame->data[0] && (frame->width != avctx->width || frame->height != avctx->height || frame->format != avctx->pix_fmt)) {
av_log(avctx, AV_LOG_WARNING, "Picture changed from size:%dx%d fmt:%s to size:%dx%d fmt:%s in reget buffer()\n",
frame->width, frame->height, av_get_pix_fmt_name(frame->format), avctx->width, avctx->height, av_get_pix_fmt_name(avctx->pix_fmt));
av_frame_unref(frame); |
e1c2a5a0 |
}
|
80e9e63c |
ff_init_buffer_info(avctx, frame); |
87840eeb |
|
759001c5 |
if (!frame->data[0])
return ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF); |
87840eeb |
|
52771346 |
if (av_frame_is_writable(frame)) {
frame->pkt_pts = avctx->internal->pkt ? avctx->internal->pkt->pts : AV_NOPTS_VALUE;
frame->reordered_opaque = avctx->reordered_opaque; |
e1c2a5a0 |
return 0; |
52771346 |
} |
e1c2a5a0 |
|
8feac29c |
tmp = av_frame_alloc();
if (!tmp)
return AVERROR(ENOMEM);
av_frame_move_ref(tmp, frame); |
759001c5 |
ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF);
if (ret < 0) { |
8feac29c |
av_frame_free(&tmp); |
93931143 |
return ret; |
b8af4fe9 |
} |
e1c2a5a0 |
|
8feac29c |
av_frame_copy(frame, tmp);
av_frame_free(&tmp); |
759001c5 |
|
e1c2a5a0 |
return 0;
}
|
1ec94b0f |
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame)
{
int ret = reget_buffer_internal(avctx, frame);
if (ret < 0)
av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
return ret;
}
|
759001c5 |
#if FF_API_GET_BUFFER
void avcodec_default_release_buffer(AVCodecContext *s, AVFrame *pic)
{ |
80e9e63c |
av_assert0(s->codec_type == AVMEDIA_TYPE_VIDEO);
|
759001c5 |
av_frame_unref(pic);
}
int avcodec_default_reget_buffer(AVCodecContext *s, AVFrame *pic)
{
av_assert0(0); |
252c0bfd |
return AVERROR_BUG; |
759001c5 |
}
#endif
|
419ffb23 |
int avcodec_default_execute(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2), void *arg, int *ret, int count, int size)
{ |
9c3d33d6 |
int i;
|
419ffb23 |
for (i = 0; i < count; i++) {
int r = func(c, (char *)arg + i * size);
if (ret)
ret[i] = r; |
9c3d33d6 |
}
return 0;
}
|
419ffb23 |
int avcodec_default_execute2(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2, int jobnr, int threadnr), void *arg, int *ret, int count)
{ |
8d23a86f |
int i;
|
419ffb23 |
for (i = 0; i < count; i++) {
int r = func(c, arg, i, 0);
if (ret)
ret[i] = r; |
8d23a86f |
}
return 0;
}
|
cc085993 |
static int is_hwaccel_pix_fmt(enum AVPixelFormat pix_fmt)
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt); |
e6c4ac7b |
return desc->flags & AV_PIX_FMT_FLAG_HWACCEL; |
cc085993 |
}
|
716d413c |
enum AVPixelFormat avcodec_default_get_format(struct AVCodecContext *s, const enum AVPixelFormat *fmt) |
419ffb23 |
{ |
cc085993 |
while (*fmt != AV_PIX_FMT_NONE && is_hwaccel_pix_fmt(*fmt)) |
c269cf68 |
++fmt; |
a33c7159 |
return fmt[0];
}
|
d7b3ee9a |
#if FF_API_AVFRAME_LAVC |
a83499b1 |
void avcodec_get_frame_defaults(AVFrame *frame) |
419ffb23 |
{ |
46a35959 |
#if LIBAVCODEC_VERSION_MAJOR >= 55
// extended_data should explicitly be freed when needed, this code is unsafe currently
// also this is not compatible to the <55 ABI/API
if (frame->extended_data != frame->data && 0) |
2bc0de38 |
av_freep(&frame->extended_data); |
46a35959 |
#endif |
2bc0de38 |
|
a83499b1 |
memset(frame, 0, sizeof(AVFrame)); |
6aed7bfd |
av_frame_unref(frame); |
9740beff |
}
|
419ffb23 |
AVFrame *avcodec_alloc_frame(void)
{ |
5abdda21 |
return av_frame_alloc(); |
1e491e29 |
}
|
a42aadab |
void avcodec_free_frame(AVFrame **frame)
{ |
94313562 |
av_frame_free(frame); |
1e491e29 |
} |
94313562 |
#endif |
1e491e29 |
|
0eb0b310 |
MAKE_ACCESSORS(AVCodecContext, codec, AVRational, pkt_timebase) |
2d3acbfe |
MAKE_ACCESSORS(AVCodecContext, codec, const AVCodecDescriptor *, codec_descriptor) |
2ee29b5f |
MAKE_ACCESSORS(AVCodecContext, codec, int, lowres) |
0f99aad8 |
MAKE_ACCESSORS(AVCodecContext, codec, int, seek_preroll) |
cbcfd7da |
MAKE_ACCESSORS(AVCodecContext, codec, uint16_t*, chroma_intra_matrix) |
0eb0b310 |
|
e57dba0d |
int av_codec_get_max_lowres(const AVCodec *codec)
{
return codec->max_lowres;
}
|
3ee8ca9b |
static void avcodec_get_subtitle_defaults(AVSubtitle *sub)
{
memset(sub, 0, sizeof(*sub));
sub->pts = AV_NOPTS_VALUE;
}
|
3293b1ad |
static int get_bit_rate(AVCodecContext *ctx)
{
int bit_rate;
int bits_per_sample;
|
67d501b4 |
switch (ctx->codec_type) { |
3293b1ad |
case AVMEDIA_TYPE_VIDEO:
case AVMEDIA_TYPE_DATA:
case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_ATTACHMENT:
bit_rate = ctx->bit_rate;
break;
case AVMEDIA_TYPE_AUDIO:
bits_per_sample = av_get_bits_per_sample(ctx->codec_id);
bit_rate = bits_per_sample ? ctx->sample_rate * ctx->channels * bits_per_sample : ctx->bit_rate;
break;
default:
bit_rate = 0;
break;
}
return bit_rate;
}
|
2470851f |
int attribute_align_arg ff_codec_open2_recursive(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options)
{
int ret = 0;
|
25fec859 |
ff_unlock_avcodec(); |
2470851f |
ret = avcodec_open2(avctx, codec, options);
|
25fec859 |
ff_lock_avcodec(avctx); |
2470851f |
return ret;
}
|
0a0f19b5 |
int attribute_align_arg avcodec_open2(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options) |
0b950fe2 |
{ |
31d76ec2 |
int ret = 0; |
0b950fe2 |
AVDictionary *tmp = NULL;
|
af08d9ae |
if (avcodec_is_open(avctx))
return 0;
|
bc901998 |
if ((!codec && !avctx->codec)) { |
7bc533c4 |
av_log(avctx, AV_LOG_ERROR, "No codec provided to avcodec_open2()\n"); |
bc901998 |
return AVERROR(EINVAL);
}
if ((codec && avctx->codec && codec != avctx->codec)) {
av_log(avctx, AV_LOG_ERROR, "This AVCodecContext was allocated for %s, " |
7bc533c4 |
"but %s passed to avcodec_open2()\n", avctx->codec->name, codec->name); |
bc901998 |
return AVERROR(EINVAL);
}
if (!codec)
codec = avctx->codec;
|
4df30f71 |
if (avctx->extradata_size < 0 || avctx->extradata_size >= FF_MAX_EXTRADATA_SIZE)
return AVERROR(EINVAL);
|
0b950fe2 |
if (options)
av_dict_copy(&tmp, *options, 0); |
115329f1 |
|
25fec859 |
ret = ff_lock_avcodec(avctx);
if (ret < 0) |
2dec950f |
return ret; |
de6d9b64 |
|
f3a29b75 |
avctx->internal = av_mallocz(sizeof(AVCodecInternal));
if (!avctx->internal) {
ret = AVERROR(ENOMEM);
goto end;
}
|
759001c5 |
avctx->internal->pool = av_mallocz(sizeof(*avctx->internal->pool));
if (!avctx->internal->pool) {
ret = AVERROR(ENOMEM);
goto free_and_end;
}
|
37a74901 |
avctx->internal->to_free = av_frame_alloc();
if (!avctx->internal->to_free) {
ret = AVERROR(ENOMEM);
goto free_and_end;
}
|
0edf8a7a |
if (codec->priv_data_size > 0) { |
90f06cea |
if (!avctx->priv_data) { |
419ffb23 |
avctx->priv_data = av_mallocz(codec->priv_data_size);
if (!avctx->priv_data) {
ret = AVERROR(ENOMEM);
goto end;
}
if (codec->priv_class) {
*(const AVClass **)avctx->priv_data = codec->priv_class;
av_opt_set_defaults(avctx->priv_data);
} |
dc51a72b |
} |
419ffb23 |
if (codec->priv_class && (ret = av_opt_set_dict(avctx->priv_data, &tmp)) < 0)
goto free_and_end; |
0edf8a7a |
} else {
avctx->priv_data = NULL;
} |
0b950fe2 |
if ((ret = av_opt_set_dict(avctx, &tmp)) < 0)
goto free_and_end; |
21adafec |
|
66f436ad |
// only call ff_set_dimensions() for non H.264/VP6F codecs so as not to overwrite previously setup dimensions |
33b05498 |
if (!(avctx->coded_width && avctx->coded_height && avctx->width && avctx->height &&
(avctx->codec_id == AV_CODEC_ID_H264 || avctx->codec_id == AV_CODEC_ID_VP6F))) { |
419ffb23 |
if (avctx->coded_width && avctx->coded_height) |
0f6c1d6d |
ret = ff_set_dimensions(avctx, avctx->coded_width, avctx->coded_height); |
419ffb23 |
else if (avctx->width && avctx->height) |
0f6c1d6d |
ret = ff_set_dimensions(avctx, avctx->width, avctx->height);
if (ret < 0)
goto free_and_end; |
4b4a02b8 |
} |
21adafec |
|
82eac2f3 |
if ((avctx->coded_width || avctx->coded_height || avctx->width || avctx->height)
&& ( av_image_check_size(avctx->coded_width, avctx->coded_height, 0, avctx) < 0
|| av_image_check_size(avctx->width, avctx->height, 0, avctx) < 0)) { |
7bc533c4 |
av_log(avctx, AV_LOG_WARNING, "Ignoring invalid width/height values\n"); |
0f6c1d6d |
ff_set_dimensions(avctx, 0, 0); |
82eac2f3 |
}
|
f19c58b4 |
/* if the decoder init function was already called previously, |
419ffb23 |
* free the already allocated subtitle_header before overwriting it */ |
44fe77b3 |
if (av_codec_is_decoder(codec)) |
f19c58b4 |
av_freep(&avctx->subtitle_header);
|
bb6941af |
if (avctx->channels > FF_SANE_NB_CHANNELS) { |
7868349a |
ret = AVERROR(EINVAL); |
2a9b5c9b |
goto free_and_end; |
0ecca7a4 |
}
|
b5c85991 |
avctx->codec = codec; |
72415b2a |
if ((avctx->codec_type == AVMEDIA_TYPE_UNKNOWN || avctx->codec_type == codec->type) && |
36ef5369 |
avctx->codec_id == AV_CODEC_ID_NONE) { |
681c180d |
avctx->codec_type = codec->type;
avctx->codec_id = codec->id;
} |
e83c716e |
if (avctx->codec_id != codec->id || (avctx->codec_type != codec->type |
419ffb23 |
&& avctx->codec_type != AVMEDIA_TYPE_ATTACHMENT)) { |
7bc533c4 |
av_log(avctx, AV_LOG_ERROR, "Codec type or id mismatches\n"); |
31d76ec2 |
ret = AVERROR(EINVAL); |
2a9b5c9b |
goto free_and_end; |
4c0dda2b |
} |
b5c85991 |
avctx->frame_number = 0; |
da74e883 |
avctx->codec_descriptor = avcodec_descriptor_get(avctx->codec_id); |
5ea0001f |
|
c854102d |
if (avctx->codec->capabilities & CODEC_CAP_EXPERIMENTAL &&
avctx->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) { |
22793d7b |
const char *codec_string = av_codec_is_encoder(codec) ? "encoder" : "decoder";
AVCodec *codec2; |
b05d8d4c |
av_log(avctx, AV_LOG_ERROR, |
4a227a70 |
"The %s '%s' is experimental but experimental codecs are not enabled, "
"add '-strict %d' if you want to use it.\n", |
22793d7b |
codec_string, codec->name, FF_COMPLIANCE_EXPERIMENTAL);
codec2 = av_codec_is_encoder(codec) ? avcodec_find_encoder(codec->id) : avcodec_find_decoder(codec->id);
if (!(codec2->capabilities & CODEC_CAP_EXPERIMENTAL)) |
b05d8d4c |
av_log(avctx, AV_LOG_ERROR, "Alternatively use the non experimental %s '%s'.\n", |
22793d7b |
codec_string, codec2->name); |
c854102d |
ret = AVERROR_EXPERIMENTAL;
goto free_and_end;
}
|
9a7dc618 |
if (avctx->codec_type == AVMEDIA_TYPE_AUDIO &&
(!avctx->time_base.num || !avctx->time_base.den)) {
avctx->time_base.num = 1;
avctx->time_base.den = avctx->sample_rate; |
02bd11e9 |
}
|
838e3663 |
if (!HAVE_THREADS)
av_log(avctx, AV_LOG_WARNING, "Warning: not compiled with thread support, using thread emulation\n");
|
b76d8536 |
if (CONFIG_FRAME_THREAD_ENCODER) { |
d7169280 |
ff_unlock_avcodec(); //we will instanciate a few encoders thus kick the counter to prevent false detection of a problem |
85c83033 |
ret = ff_frame_thread_encoder_init(avctx, options ? *options : NULL); |
d7169280 |
ff_lock_avcodec(avctx); |
85c83033 |
if (ret < 0)
goto free_and_end;
} |
fde1bc64 |
|
36d34903 |
if (HAVE_THREADS |
fde1bc64 |
&& !(avctx->internal->frame_thread_encoder && (avctx->active_thread_type&FF_THREAD_FRAME))) { |
46027c72 |
ret = ff_thread_init(avctx); |
b38f008e |
if (ret < 0) {
goto free_and_end;
}
} |
b6064d9a |
if (!HAVE_THREADS && !(codec->capabilities & CODEC_CAP_AUTO_THREADS))
avctx->thread_count = 1; |
b38f008e |
|
7a54edaa |
if (avctx->codec->max_lowres < avctx->lowres || avctx->lowres < 0) { |
ac3c895d |
av_log(avctx, AV_LOG_ERROR, "The maximum value for lowres supported by the decoder is %d\n", |
527c91e3 |
avctx->codec->max_lowres); |
ac3c895d |
ret = AVERROR(EINVAL);
goto free_and_end; |
527c91e3 |
} |
92ef4be4 |
|
44fe77b3 |
if (av_codec_is_encoder(avctx->codec)) { |
7ade06cc |
int i; |
8b00ab01 |
if (avctx->codec->sample_fmts) { |
37f701f1 |
for (i = 0; avctx->codec->sample_fmts[i] != AV_SAMPLE_FMT_NONE; i++) { |
3dfc3e70 |
if (avctx->sample_fmt == avctx->codec->sample_fmts[i])
break; |
37f701f1 |
if (avctx->channels == 1 &&
av_get_planar_sample_fmt(avctx->sample_fmt) ==
av_get_planar_sample_fmt(avctx->codec->sample_fmts[i])) {
avctx->sample_fmt = avctx->codec->sample_fmts[i];
break;
}
} |
3dfc3e70 |
if (avctx->codec->sample_fmts[i] == AV_SAMPLE_FMT_NONE) { |
1cd9c81d |
char buf[128];
snprintf(buf, sizeof(buf), "%d", avctx->sample_fmt);
av_log(avctx, AV_LOG_ERROR, "Specified sample format %s is invalid or not supported\n",
(char *)av_x_if_null(av_get_sample_fmt_name(avctx->sample_fmt), buf)); |
3dfc3e70 |
ret = AVERROR(EINVAL);
goto free_and_end;
} |
8b00ab01 |
} |
dcd2b55e |
if (avctx->codec->pix_fmts) { |
716d413c |
for (i = 0; avctx->codec->pix_fmts[i] != AV_PIX_FMT_NONE; i++) |
dcd2b55e |
if (avctx->pix_fmt == avctx->codec->pix_fmts[i])
break; |
ac627b3d |
if (avctx->codec->pix_fmts[i] == AV_PIX_FMT_NONE |
7a72695c |
&& !((avctx->codec_id == AV_CODEC_ID_MJPEG || avctx->codec_id == AV_CODEC_ID_LJPEG) |
dded4cb2 |
&& avctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL)) { |
1cd9c81d |
char buf[128];
snprintf(buf, sizeof(buf), "%d", avctx->pix_fmt);
av_log(avctx, AV_LOG_ERROR, "Specified pixel format %s is invalid or not supported\n",
(char *)av_x_if_null(av_get_pix_fmt_name(avctx->pix_fmt), buf)); |
dcd2b55e |
ret = AVERROR(EINVAL);
goto free_and_end;
}
} |
8b00ab01 |
if (avctx->codec->supported_samplerates) {
for (i = 0; avctx->codec->supported_samplerates[i] != 0; i++)
if (avctx->sample_rate == avctx->codec->supported_samplerates[i])
break;
if (avctx->codec->supported_samplerates[i] == 0) { |
1cd9c81d |
av_log(avctx, AV_LOG_ERROR, "Specified sample rate %d is not supported\n",
avctx->sample_rate); |
8b00ab01 |
ret = AVERROR(EINVAL);
goto free_and_end;
}
}
if (avctx->codec->channel_layouts) {
if (!avctx->channel_layout) { |
1cd9c81d |
av_log(avctx, AV_LOG_WARNING, "Channel layout not specified\n"); |
8b00ab01 |
} else {
for (i = 0; avctx->codec->channel_layouts[i] != 0; i++)
if (avctx->channel_layout == avctx->codec->channel_layouts[i])
break;
if (avctx->codec->channel_layouts[i] == 0) { |
1cd9c81d |
char buf[512];
av_get_channel_layout_string(buf, sizeof(buf), -1, avctx->channel_layout);
av_log(avctx, AV_LOG_ERROR, "Specified channel layout '%s' is not supported\n", buf); |
8b00ab01 |
ret = AVERROR(EINVAL);
goto free_and_end;
}
}
} |
168f9e8c |
if (avctx->channel_layout && avctx->channels) { |
1cd9c81d |
int channels = av_get_channel_layout_nb_channels(avctx->channel_layout);
if (channels != avctx->channels) {
char buf[512];
av_get_channel_layout_string(buf, sizeof(buf), -1, avctx->channel_layout);
av_log(avctx, AV_LOG_ERROR,
"Channel layout '%s' with %d channels does not match number of specified channels %d\n",
buf, channels, avctx->channels); |
168f9e8c |
ret = AVERROR(EINVAL);
goto free_and_end;
} |
688b09fa |
} else if (avctx->channel_layout) {
avctx->channels = av_get_channel_layout_nb_channels(avctx->channel_layout); |
7ade06cc |
} |
e8e57563 |
if(avctx->codec_type == AVMEDIA_TYPE_VIDEO &&
avctx->codec_id != AV_CODEC_ID_PNG // For mplayer
) { |
419ade4b |
if (avctx->width <= 0 || avctx->height <= 0) {
av_log(avctx, AV_LOG_ERROR, "dimensions not set\n");
ret = AVERROR(EINVAL);
goto free_and_end;
}
} |
2f436b1f |
if ( (avctx->codec_type == AVMEDIA_TYPE_VIDEO || avctx->codec_type == AVMEDIA_TYPE_AUDIO)
&& avctx->bit_rate>0 && avctx->bit_rate<1000) { |
dae76e8c |
av_log(avctx, AV_LOG_WARNING, "Bitrate %d is extremely low, maybe you mean %dk\n", avctx->bit_rate, avctx->bit_rate); |
2f436b1f |
} |
bff36075 |
if (!avctx->rc_initial_buffer_occupancy)
avctx->rc_initial_buffer_occupancy = avctx->rc_buffer_size * 3 / 4; |
7ade06cc |
} |
0fd0ef79 |
|
76ad67ca |
avctx->pts_correction_num_faulty_pts =
avctx->pts_correction_num_faulty_dts = 0;
avctx->pts_correction_last_pts =
avctx->pts_correction_last_dts = INT64_MIN;
|
67d501b4 |
if ( avctx->codec->init && (!(avctx->active_thread_type&FF_THREAD_FRAME)
|| avctx->internal->frame_thread_encoder)) { |
2de4f9eb |
ret = avctx->codec->init(avctx);
if (ret < 0) { |
2a9b5c9b |
goto free_and_end; |
2de4f9eb |
} |
6e546aaa |
} |
d375c104 |
|
ddebfb15 |
ret=0; |
6bfb3042 |
|
1337de0c |
if (av_codec_is_decoder(avctx->codec)) { |
6bfb3042 |
if (!avctx->bit_rate)
avctx->bit_rate = get_bit_rate(avctx); |
1337de0c |
/* validate channel layout from the decoder */ |
12e1e834 |
if (avctx->channel_layout) {
int channels = av_get_channel_layout_nb_channels(avctx->channel_layout);
if (!avctx->channels)
avctx->channels = channels;
else if (channels != avctx->channels) { |
1cd9c81d |
char buf[512];
av_get_channel_layout_string(buf, sizeof(buf), -1, avctx->channel_layout); |
12e1e834 |
av_log(avctx, AV_LOG_WARNING, |
1cd9c81d |
"Channel layout '%s' with %d channels does not match specified number of channels %d: "
"ignoring specified channel layout\n",
buf, channels, avctx->channels); |
12e1e834 |
avctx->channel_layout = 0;
} |
1337de0c |
} |
0366664e |
if (avctx->channels && avctx->channels < 0 ||
avctx->channels > FF_SANE_NB_CHANNELS) {
ret = AVERROR(EINVAL);
goto free_and_end;
} |
f7963993 |
if (avctx->sub_charenc) {
if (avctx->codec_type != AVMEDIA_TYPE_SUBTITLE) {
av_log(avctx, AV_LOG_ERROR, "Character encoding is only "
"supported with subtitles codecs\n");
ret = AVERROR(EINVAL);
goto free_and_end;
} else if (avctx->codec_descriptor->props & AV_CODEC_PROP_BITMAP_SUB) {
av_log(avctx, AV_LOG_WARNING, "Codec '%s' is bitmap-based, "
"subtitles character encoding will be ignored\n",
avctx->codec_descriptor->name);
avctx->sub_charenc_mode = FF_SUB_CHARENC_MODE_DO_NOTHING;
} else {
/* input character encoding is set for a text based subtitle
* codec at this point */
if (avctx->sub_charenc_mode == FF_SUB_CHARENC_MODE_AUTOMATIC)
avctx->sub_charenc_mode = FF_SUB_CHARENC_MODE_PRE_DECODER;
|
160979f4 |
if (avctx->sub_charenc_mode == FF_SUB_CHARENC_MODE_PRE_DECODER) {
#if CONFIG_ICONV
iconv_t cd = iconv_open("UTF-8", avctx->sub_charenc);
if (cd == (iconv_t)-1) {
av_log(avctx, AV_LOG_ERROR, "Unable to open iconv context "
"with input character encoding \"%s\"\n", avctx->sub_charenc);
ret = AVERROR(errno);
goto free_and_end;
}
iconv_close(cd);
#else |
f7963993 |
av_log(avctx, AV_LOG_ERROR, "Character encoding subtitles "
"conversion needs a libavcodec built with iconv support "
"for this codec\n");
ret = AVERROR(ENOSYS);
goto free_and_end; |
160979f4 |
#endif |
f7963993 |
}
}
} |
1337de0c |
} |
ddebfb15 |
end: |
25fec859 |
ff_unlock_avcodec(); |
0b950fe2 |
if (options) {
av_dict_free(options);
*options = tmp;
}
|
ddebfb15 |
return ret; |
2a9b5c9b |
free_and_end: |
0b950fe2 |
av_dict_free(&tmp); |
2a9b5c9b |
av_freep(&avctx->priv_data); |
8b285f03 |
if (avctx->internal) {
av_frame_free(&avctx->internal->to_free); |
759001c5 |
av_freep(&avctx->internal->pool); |
8b285f03 |
} |
f3a29b75 |
av_freep(&avctx->internal); |
419ffb23 |
avctx->codec = NULL; |
2a9b5c9b |
goto end; |
de6d9b64 |
}
|
9a0e2081 |
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size) |
de6d9b64 |
{ |
47c9887e |
if (avpkt->size < 0) {
av_log(avctx, AV_LOG_ERROR, "Invalid negative user packet size %d\n", avpkt->size);
return AVERROR(EINVAL);
}
if (size < 0 || size > INT_MAX - FF_INPUT_BUFFER_PADDING_SIZE) { |
9a0e2081 |
av_log(avctx, AV_LOG_ERROR, "Invalid minimum required packet size %"PRId64" (max allowed is %d)\n", |
47c9887e |
size, INT_MAX - FF_INPUT_BUFFER_PADDING_SIZE); |
b2c75b6e |
return AVERROR(EINVAL); |
00663de3 |
} |
b2c75b6e |
|
d3abbb1d |
if (avctx) { |
1463bd90 |
av_assert0(!avpkt->data || avpkt->data != avctx->internal->byte_buffer);
if (!avpkt->data || avpkt->size < size) {
av_fast_padded_malloc(&avctx->internal->byte_buffer, &avctx->internal->byte_buffer_size, size);
avpkt->data = avctx->internal->byte_buffer;
avpkt->size = avctx->internal->byte_buffer_size;
avpkt->destruct = NULL;
} |
d3abbb1d |
} |
740b9ff4 |
|
b2c75b6e |
if (avpkt->data) { |
1afddbe5 |
AVBufferRef *buf = avpkt->buf;
#if FF_API_DESTRUCT_PACKET |
7950e519 |
FF_DISABLE_DEPRECATION_WARNINGS |
e42e9b0e |
void *destruct = avpkt->destruct; |
7950e519 |
FF_ENABLE_DEPRECATION_WARNINGS |
1afddbe5 |
#endif |
b2c75b6e |
|
00663de3 |
if (avpkt->size < size) { |
9a0e2081 |
av_log(avctx, AV_LOG_ERROR, "User packet is too small (%d < %"PRId64")\n", avpkt->size, size); |
b2c75b6e |
return AVERROR(EINVAL); |
00663de3 |
} |
b2c75b6e |
av_init_packet(avpkt); |
1afddbe5 |
#if FF_API_DESTRUCT_PACKET |
7950e519 |
FF_DISABLE_DEPRECATION_WARNINGS |
e42e9b0e |
avpkt->destruct = destruct; |
7950e519 |
FF_ENABLE_DEPRECATION_WARNINGS |
1afddbe5 |
#endif
avpkt->buf = buf; |
419ffb23 |
avpkt->size = size; |
b2c75b6e |
return 0;
} else { |
00663de3 |
int ret = av_new_packet(avpkt, size);
if (ret < 0) |
9a0e2081 |
av_log(avctx, AV_LOG_ERROR, "Failed to allocate packet of size %"PRId64"\n", size); |
00663de3 |
return ret; |
0ecca7a4 |
} |
b2c75b6e |
}
|
00663de3 |
int ff_alloc_packet(AVPacket *avpkt, int size)
{
return ff_alloc_packet2(NULL, avpkt, size);
}
|
a5117a24 |
/**
* Pad last frame with silence.
*/
static int pad_last_frame(AVCodecContext *s, AVFrame **dst, const AVFrame *src)
{
AVFrame *frame = NULL;
int ret;
|
5b9c3b45 |
if (!(frame = av_frame_alloc())) |
a5117a24 |
return AVERROR(ENOMEM);
|
799f57ac |
frame->format = src->format;
frame->channel_layout = src->channel_layout; |
5d199c3a |
av_frame_set_channels(frame, av_frame_get_channels(src)); |
799f57ac |
frame->nb_samples = s->frame_size;
ret = av_frame_get_buffer(frame, 32);
if (ret < 0) |
a5117a24 |
goto fail;
|
799f57ac |
ret = av_frame_copy_props(frame, src);
if (ret < 0) |
a5117a24 |
goto fail;
if ((ret = av_samples_copy(frame->extended_data, src->extended_data, 0, 0,
src->nb_samples, s->channels, s->sample_fmt)) < 0)
goto fail;
if ((ret = av_samples_set_silence(frame->extended_data, src->nb_samples,
frame->nb_samples - src->nb_samples,
s->channels, s->sample_fmt)) < 0)
goto fail;
*dst = frame;
return 0;
fail: |
799f57ac |
av_frame_free(&frame); |
a5117a24 |
return ret;
}
|
b2c75b6e |
int attribute_align_arg avcodec_encode_audio2(AVCodecContext *avctx,
AVPacket *avpkt,
const AVFrame *frame,
int *got_packet_ptr)
{ |
c22953b8 |
AVFrame tmp; |
a5117a24 |
AVFrame *padded_frame = NULL; |
b2c75b6e |
int ret; |
740b9ff4 |
AVPacket user_pkt = *avpkt; |
532f1c7a |
int needs_realloc = !user_pkt.data; |
b2c75b6e |
|
52953d61 |
*got_packet_ptr = 0;
|
b2c75b6e |
if (!(avctx->codec->capabilities & CODEC_CAP_DELAY) && !frame) { |
7fb6c922 |
av_free_packet(avpkt); |
b2c75b6e |
av_init_packet(avpkt); |
6f824977 |
return 0; |
b2c75b6e |
}
|
c22953b8 |
/* ensure that extended_data is properly set */
if (frame && !frame->extended_data) {
if (av_sample_fmt_is_planar(avctx->sample_fmt) &&
avctx->channels > AV_NUM_DATA_POINTERS) {
av_log(avctx, AV_LOG_ERROR, "Encoding to a planar sample format, " |
419ffb23 |
"with more than %d channels, but extended_data is not set.\n", |
c22953b8 |
AV_NUM_DATA_POINTERS);
return AVERROR(EINVAL);
}
av_log(avctx, AV_LOG_WARNING, "extended_data is not set.\n");
tmp = *frame;
tmp.extended_data = tmp.data;
frame = &tmp;
}
|
b2c75b6e |
/* check for valid frame size */
if (frame) {
if (avctx->codec->capabilities & CODEC_CAP_SMALL_LAST_FRAME) { |
7001eee1 |
if (frame->nb_samples > avctx->frame_size) {
av_log(avctx, AV_LOG_ERROR, "more samples than frame size (avcodec_encode_audio2)\n"); |
b2c75b6e |
return AVERROR(EINVAL); |
7001eee1 |
} |
b2c75b6e |
} else if (!(avctx->codec->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE)) { |
a5117a24 |
if (frame->nb_samples < avctx->frame_size &&
!avctx->internal->last_audio_frame) {
ret = pad_last_frame(avctx, &padded_frame, frame);
if (ret < 0)
return ret;
frame = padded_frame;
avctx->internal->last_audio_frame = 1;
}
|
7001eee1 |
if (frame->nb_samples != avctx->frame_size) {
av_log(avctx, AV_LOG_ERROR, "nb_samples (%d) != frame_size (%d) (avcodec_encode_audio2)\n", frame->nb_samples, avctx->frame_size); |
0ccf051a |
ret = AVERROR(EINVAL);
goto end; |
7001eee1 |
} |
b2c75b6e |
}
}
|
fa0319b4 |
ret = avctx->codec->encode2(avctx, avpkt, frame, got_packet_ptr);
if (!ret) {
if (*got_packet_ptr) { |
a75bc764 |
if (!(avctx->codec->capabilities & CODEC_CAP_DELAY)) { |
70749c48 |
if (avpkt->pts == AV_NOPTS_VALUE)
avpkt->pts = frame->pts;
if (!avpkt->duration)
avpkt->duration = ff_samples_to_time_base(avctx,
frame->nb_samples); |
a75bc764 |
}
avpkt->dts = avpkt->pts; |
b758cf73 |
} else {
avpkt->size = 0; |
b2c75b6e |
}
} |
740b9ff4 |
if (avpkt->data && avpkt->data == avctx->internal->byte_buffer) { |
532f1c7a |
needs_realloc = 0; |
740b9ff4 |
if (user_pkt.data) {
if (user_pkt.size >= avpkt->size) {
memcpy(user_pkt.data, avpkt->data, avpkt->size);
} else {
av_log(avctx, AV_LOG_ERROR, "Provided packet is too small, needs to be %d\n", avpkt->size);
avpkt->size = user_pkt.size;
ret = -1;
} |
2653e125 |
avpkt->buf = user_pkt.buf; |
740b9ff4 |
avpkt->data = user_pkt.data;
avpkt->destruct = user_pkt.destruct;
} else {
if (av_dup_packet(avpkt) < 0) {
ret = AVERROR(ENOMEM);
}
}
}
|
a1977e01 |
if (!ret) { |
532f1c7a |
if (needs_realloc && avpkt->data) { |
2653e125 |
ret = av_buffer_realloc(&avpkt->buf, avpkt->size + FF_INPUT_BUFFER_PADDING_SIZE); |
1afddbe5 |
if (ret >= 0)
avpkt->data = avpkt->buf->data; |
a1977e01 |
}
|
b2c75b6e |
avctx->frame_number++; |
a1977e01 |
} |
b2c75b6e |
|
74e10b62 |
if (ret < 0 || !*got_packet_ptr) { |
7fb6c922 |
av_free_packet(avpkt); |
74e10b62 |
av_init_packet(avpkt); |
0ccf051a |
goto end; |
74e10b62 |
} |
7fb6c922 |
|
b2c75b6e |
/* NOTE: if we add any audio encoders which output non-keyframe packets, |
419ffb23 |
* this needs to be moved to the encoders, but for now we can do it
* here to simplify things */ |
b2c75b6e |
avpkt->flags |= AV_PKT_FLAG_KEY;
|
0ccf051a |
end: |
7fff3df6 |
av_frame_free(&padded_frame); |
a5117a24 |
|
b2c75b6e |
return ret; |
de6d9b64 |
}
|
a5ef830b |
#if FF_API_OLD_ENCODE_AUDIO |
b2c75b6e |
int attribute_align_arg avcodec_encode_audio(AVCodecContext *avctx,
uint8_t *buf, int buf_size,
const short *samples)
{
AVPacket pkt;
AVFrame *frame;
int ret, samples_size, got_packet;
av_init_packet(&pkt);
pkt.data = buf;
pkt.size = buf_size;
if (samples) { |
c90f3114 |
frame = av_frame_alloc(); |
38004051 |
if (!frame)
return AVERROR(ENOMEM); |
b2c75b6e |
if (avctx->frame_size) {
frame->nb_samples = avctx->frame_size;
} else {
/* if frame_size is not set, the number of samples must be |
419ffb23 |
* calculated from the buffer size */ |
b2c75b6e |
int64_t nb_samples;
if (!av_get_bits_per_sample(avctx->codec_id)) {
av_log(avctx, AV_LOG_ERROR, "avcodec_encode_audio() does not " |
419ffb23 |
"support this codec\n"); |
c90f3114 |
av_frame_free(&frame); |
b2c75b6e |
return AVERROR(EINVAL);
}
nb_samples = (int64_t)buf_size * 8 /
(av_get_bits_per_sample(avctx->codec_id) * |
419ffb23 |
avctx->channels); |
c90f3114 |
if (nb_samples >= INT_MAX) {
av_frame_free(&frame); |
b2c75b6e |
return AVERROR(EINVAL); |
c90f3114 |
} |
b2c75b6e |
frame->nb_samples = nb_samples;
}
/* it is assumed that the samples buffer is large enough based on the |
419ffb23 |
* relevant parameters */ |
b2c75b6e |
samples_size = av_samples_get_buffer_size(NULL, avctx->channels,
frame->nb_samples,
avctx->sample_fmt, 1);
if ((ret = avcodec_fill_audio_frame(frame, avctx->channels,
avctx->sample_fmt, |
419ffb23 |
(const uint8_t *)samples, |
c90f3114 |
samples_size, 1)) < 0) {
av_frame_free(&frame); |
b2c75b6e |
return ret; |
c90f3114 |
} |
b2c75b6e |
/* fabricate frame pts from sample count. |
419ffb23 |
* this is needed because the avcodec_encode_audio() API does not have
* a way for the user to provide pts */ |
67d501b4 |
if (avctx->sample_rate && avctx->time_base.num) |
eadd4264 |
frame->pts = ff_samples_to_time_base(avctx, |
67d501b4 |
avctx->internal->sample_count); |
67f5650a |
else
frame->pts = AV_NOPTS_VALUE; |
b2c75b6e |
avctx->internal->sample_count += frame->nb_samples;
} else {
frame = NULL;
}
got_packet = 0;
ret = avcodec_encode_audio2(avctx, &pkt, frame, &got_packet);
if (!ret && got_packet && avctx->coded_frame) {
avctx->coded_frame->pts = pkt.pts;
avctx->coded_frame->key_frame = !!(pkt.flags & AV_PKT_FLAG_KEY);
}
/* free any side data since we cannot return it */ |
98fd8a78 |
av_packet_free_side_data(&pkt); |
b2c75b6e |
if (frame && frame->extended_data != frame->data) |
af5004b5 |
av_freep(&frame->extended_data); |
b2c75b6e |
|
c90f3114 |
av_frame_free(&frame); |
b2c75b6e |
return ret ? ret : pkt.size;
} |
419ffb23 |
|
b2c75b6e |
#endif
|
52f82a11 |
#if FF_API_OLD_ENCODE_VIDEO |
5e4c7ca2 |
int attribute_align_arg avcodec_encode_video(AVCodecContext *avctx, uint8_t *buf, int buf_size, |
419ffb23 |
const AVFrame *pict) |
de6d9b64 |
{ |
52f82a11 |
AVPacket pkt;
int ret, got_packet = 0;
|
419ffb23 |
if (buf_size < FF_MIN_BUFFER_SIZE) { |
5286d11f |
av_log(avctx, AV_LOG_ERROR, "buffer smaller than minimum size\n"); |
0ecca7a4 |
return -1;
} |
115329f1 |
|
52f82a11 |
av_init_packet(&pkt);
pkt.data = buf;
pkt.size = buf_size;
ret = avcodec_encode_video2(avctx, &pkt, pict, &got_packet);
if (!ret && got_packet && avctx->coded_frame) {
avctx->coded_frame->pts = pkt.pts;
avctx->coded_frame->key_frame = !!(pkt.flags & AV_PKT_FLAG_KEY);
}
/* free any side data since we cannot return it */
if (pkt.side_data_elems > 0) {
int i;
for (i = 0; i < pkt.side_data_elems; i++)
av_free(pkt.side_data[i].data);
av_freep(&pkt.side_data);
pkt.side_data_elems = 0;
}
return ret ? ret : pkt.size;
} |
419ffb23 |
|
52f82a11 |
#endif
int attribute_align_arg avcodec_encode_video2(AVCodecContext *avctx,
AVPacket *avpkt,
const AVFrame *frame,
int *got_packet_ptr)
{
int ret; |
740b9ff4 |
AVPacket user_pkt = *avpkt; |
532f1c7a |
int needs_realloc = !user_pkt.data; |
52f82a11 |
|
d55fa6f9 |
*got_packet_ptr = 0;
|
b76d8536 |
if(CONFIG_FRAME_THREAD_ENCODER &&
avctx->internal->frame_thread_encoder && (avctx->active_thread_type&FF_THREAD_FRAME)) |
fde1bc64 |
return ff_thread_video_encode_frame(avctx, avpkt, frame, got_packet_ptr);
|
d9bec3b6 |
if ((avctx->flags&CODEC_FLAG_PASS1) && avctx->stats_out)
avctx->stats_out[0] = '\0';
|
52f82a11 |
if (!(avctx->codec->capabilities & CODEC_CAP_DELAY) && !frame) { |
7fb6c922 |
av_free_packet(avpkt); |
52f82a11 |
av_init_packet(avpkt); |
419ffb23 |
avpkt->size = 0; |
6f824977 |
return 0; |
52f82a11 |
}
if (av_image_check_size(avctx->width, avctx->height, 0, avctx))
return AVERROR(EINVAL);
|
ff311c09 |
av_assert0(avctx->codec->encode2); |
52f82a11 |
|
ff311c09 |
ret = avctx->codec->encode2(avctx, avpkt, frame, got_packet_ptr); |
740b9ff4 |
av_assert0(ret <= 0);
if (avpkt->data && avpkt->data == avctx->internal->byte_buffer) { |
532f1c7a |
needs_realloc = 0; |
740b9ff4 |
if (user_pkt.data) {
if (user_pkt.size >= avpkt->size) {
memcpy(user_pkt.data, avpkt->data, avpkt->size);
} else {
av_log(avctx, AV_LOG_ERROR, "Provided packet is too small, needs to be %d\n", avpkt->size);
avpkt->size = user_pkt.size;
ret = -1;
} |
2653e125 |
avpkt->buf = user_pkt.buf; |
740b9ff4 |
avpkt->data = user_pkt.data;
avpkt->destruct = user_pkt.destruct;
} else {
if (av_dup_packet(avpkt) < 0) {
ret = AVERROR(ENOMEM);
}
}
}
|
ff311c09 |
if (!ret) {
if (!*got_packet_ptr)
avpkt->size = 0;
else if (!(avctx->codec->capabilities & CODEC_CAP_DELAY))
avpkt->pts = avpkt->dts = frame->pts; |
52f82a11 |
|
2653e125 |
if (needs_realloc && avpkt->data) {
ret = av_buffer_realloc(&avpkt->buf, avpkt->size + FF_INPUT_BUFFER_PADDING_SIZE); |
1afddbe5 |
if (ret >= 0)
avpkt->data = avpkt->buf->data; |
eb727387 |
}
|
52f82a11 |
avctx->frame_number++; |
03ca0a5b |
} |
52f82a11 |
|
7fb6c922 |
if (ret < 0 || !*got_packet_ptr)
av_free_packet(avpkt); |
7b80b3ce |
else
av_packet_merge_side_data(avpkt); |
7fb6c922 |
|
52f82a11 |
emms_c();
return ret; |
de6d9b64 |
}
|
115329f1 |
int avcodec_encode_subtitle(AVCodecContext *avctx, uint8_t *buf, int buf_size, |
240c1657 |
const AVSubtitle *sub)
{
int ret; |
419ffb23 |
if (sub->start_display_time) { |
9413db9e |
av_log(avctx, AV_LOG_ERROR, "start_display_time must be 0.\n");
return -1;
} |
423986fc |
|
466b39ef |
ret = avctx->codec->encode_sub(avctx, buf, buf_size, sub); |
240c1657 |
avctx->frame_number++;
return ret;
}
|
76ad67ca |
/**
* Attempt to guess proper monotonic timestamps for decoded video frames
* which might have incorrect times. Input timestamps may wrap around, in
* which case the output will as well.
*
* @param pts the pts field of the decoded AVPacket, as passed through
* AVFrame.pkt_pts
* @param dts the dts field of the decoded AVPacket
* @return one of the input values, may be AV_NOPTS_VALUE
*/
static int64_t guess_correct_pts(AVCodecContext *ctx,
int64_t reordered_pts, int64_t dts)
{
int64_t pts = AV_NOPTS_VALUE;
if (dts != AV_NOPTS_VALUE) {
ctx->pts_correction_num_faulty_dts += dts <= ctx->pts_correction_last_dts;
ctx->pts_correction_last_dts = dts; |
68a959cb |
} else if (reordered_pts != AV_NOPTS_VALUE)
ctx->pts_correction_last_dts = reordered_pts;
|
76ad67ca |
if (reordered_pts != AV_NOPTS_VALUE) {
ctx->pts_correction_num_faulty_pts += reordered_pts <= ctx->pts_correction_last_pts;
ctx->pts_correction_last_pts = reordered_pts; |
68a959cb |
} else if(dts != AV_NOPTS_VALUE)
ctx->pts_correction_last_pts = dts;
|
76ad67ca |
if ((ctx->pts_correction_num_faulty_pts<=ctx->pts_correction_num_faulty_dts || dts == AV_NOPTS_VALUE)
&& reordered_pts != AV_NOPTS_VALUE)
pts = reordered_pts;
else
pts = dts;
return pts;
}
|
b9589f5a |
static int apply_param_change(AVCodecContext *avctx, AVPacket *avpkt) |
867f923d |
{ |
0f6c1d6d |
int size = 0, ret; |
867f923d |
const uint8_t *data;
uint32_t flags;
data = av_packet_get_side_data(avpkt, AV_PKT_DATA_PARAM_CHANGE, &size); |
b9589f5a |
if (!data)
return 0;
if (!(avctx->codec->capabilities & CODEC_CAP_PARAM_CHANGE)) {
av_log(avctx, AV_LOG_ERROR, "This decoder does not support parameter "
"changes, but PARAM_CHANGE side data was sent to it.\n");
return AVERROR(EINVAL);
}
if (size < 4)
goto fail;
|
867f923d |
flags = bytestream_get_le32(&data);
size -= 4; |
b9589f5a |
|
867f923d |
if (flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT) { |
b9589f5a |
if (size < 4)
goto fail; |
867f923d |
avctx->channels = bytestream_get_le32(&data);
size -= 4;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT) {
if (size < 8) |
b9589f5a |
goto fail; |
867f923d |
avctx->channel_layout = bytestream_get_le64(&data);
size -= 8;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE) { |
b9589f5a |
if (size < 4)
goto fail; |
867f923d |
avctx->sample_rate = bytestream_get_le32(&data);
size -= 4;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS) {
if (size < 8) |
b9589f5a |
goto fail; |
867f923d |
avctx->width = bytestream_get_le32(&data);
avctx->height = bytestream_get_le32(&data);
size -= 8; |
0f6c1d6d |
ret = ff_set_dimensions(avctx, avctx->width, avctx->height);
if (ret < 0)
return ret; |
867f923d |
} |
b9589f5a |
return 0;
fail:
av_log(avctx, AV_LOG_ERROR, "PARAM_CHANGE side data too small.\n");
return AVERROR_INVALIDDATA; |
867f923d |
}
|
6fb2fd89 |
static int add_metadata_from_side_data(AVCodecContext *avctx, AVFrame *frame)
{ |
34b7c82d |
int size; |
6fb2fd89 |
const uint8_t *side_metadata;
|
34b7c82d |
AVDictionary **frame_md = avpriv_frame_get_metadatap(frame);
|
ab71be09 |
side_metadata = av_packet_get_side_data(avctx->internal->pkt, |
6fb2fd89 |
AV_PKT_DATA_STRINGS_METADATA, &size); |
34b7c82d |
return av_packet_unpack_dictionary(side_metadata, size, frame_md); |
6fb2fd89 |
}
|
37a74901 |
static int unrefcount_frame(AVCodecInternal *avci, AVFrame *frame)
{
int ret;
/* move the original frame to our backup */
av_frame_unref(avci->to_free);
av_frame_move_ref(avci->to_free, frame);
/* now copy everything except the AVBufferRefs back
* note that we make a COPY of the side data, so calling av_frame_free() on
* the caller's frame will work properly */
ret = av_frame_copy_props(frame, avci->to_free);
if (ret < 0)
return ret;
memcpy(frame->data, avci->to_free->data, sizeof(frame->data));
memcpy(frame->linesize, avci->to_free->linesize, sizeof(frame->linesize));
if (avci->to_free->extended_data != avci->to_free->data) { |
999ee281 |
int planes = av_frame_get_channels(avci->to_free); |
37a74901 |
int size = planes * sizeof(*frame->extended_data);
if (!size) {
av_frame_unref(frame);
return AVERROR_BUG;
}
frame->extended_data = av_malloc(size);
if (!frame->extended_data) {
av_frame_unref(frame);
return AVERROR(ENOMEM);
}
memcpy(frame->extended_data, avci->to_free->extended_data,
size);
} else
frame->extended_data = frame->data;
frame->format = avci->to_free->format;
frame->width = avci->to_free->width;
frame->height = avci->to_free->height;
frame->channel_layout = avci->to_free->channel_layout;
frame->nb_samples = avci->to_free->nb_samples; |
7a901eb3 |
av_frame_set_channels(frame, av_frame_get_channels(avci->to_free)); |
37a74901 |
return 0;
}
|
7a00bbad |
int attribute_align_arg avcodec_decode_video2(AVCodecContext *avctx, AVFrame *picture, |
419ffb23 |
int *got_picture_ptr, |
67d501b4 |
const AVPacket *avpkt) |
7a00bbad |
{ |
759001c5 |
AVCodecInternal *avci = avctx->internal; |
de6d9b64 |
int ret; |
c4ba5198 |
// copy to ensure we do not change avpkt
AVPacket tmp = *avpkt; |
115329f1 |
|
41f3c60f |
if (!avctx->codec)
return AVERROR(EINVAL); |
7c9d6936 |
if (avctx->codec->type != AVMEDIA_TYPE_VIDEO) {
av_log(avctx, AV_LOG_ERROR, "Invalid media type for video\n");
return AVERROR(EINVAL);
}
|
419ffb23 |
*got_picture_ptr = 0;
if ((avctx->coded_width || avctx->coded_height) && av_image_check_size(avctx->coded_width, avctx->coded_height, 0, avctx)) |
3cc1a898 |
return AVERROR(EINVAL); |
393cbb96 |
|
95a8a5ac |
av_frame_unref(picture); |
1bc64c28 |
|
419ffb23 |
if ((avctx->codec->capabilities & CODEC_CAP_DELAY) || avpkt->size || (avctx->active_thread_type & FF_THREAD_FRAME)) { |
c4ba5198 |
int did_split = av_packet_split_side_data(&tmp); |
558784f1 |
ret = apply_param_change(avctx, &tmp);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error applying parameter changes.\n");
if (avctx->err_recognition & AV_EF_EXPLODE)
goto fail;
}
|
ab71be09 |
avctx->internal->pkt = &tmp; |
419ffb23 |
if (HAVE_THREADS && avctx->active_thread_type & FF_THREAD_FRAME)
ret = ff_thread_decode_frame(avctx, picture, got_picture_ptr, |
67d501b4 |
&tmp); |
b38f008e |
else {
ret = avctx->codec->decode(avctx, picture, got_picture_ptr, |
67d501b4 |
&tmp); |
bd255f9f |
picture->pkt_dts = avpkt->dts; |
01d3ebaf |
if(!avctx->has_b_frames){ |
cc38ca67 |
av_frame_set_pkt_pos(picture, avpkt->pos); |
af2f7970 |
}
//FIXME these should be under if(!avctx->has_b_frames) |
bd255f9f |
/* get_buffer is supposed to set frame parameters */
if (!(avctx->codec->capabilities & CODEC_CAP_DR1)) { |
86b4dc62 |
if (!picture->sample_aspect_ratio.num) picture->sample_aspect_ratio = avctx->sample_aspect_ratio;
if (!picture->width) picture->width = avctx->width;
if (!picture->height) picture->height = avctx->height;
if (picture->format == AV_PIX_FMT_NONE) picture->format = avctx->pix_fmt; |
bd255f9f |
} |
b38f008e |
} |
6fb2fd89 |
add_metadata_from_side_data(avctx, picture); |
6bb925f4 |
|
558784f1 |
fail: |
bb628dae |
emms_c(); //needed to avoid an emms_c() call before every return; |
115329f1 |
|
ab71be09 |
avctx->internal->pkt = NULL; |
a1bb0823 |
if (did_split) { |
98fd8a78 |
av_packet_free_side_data(&tmp); |
a1bb0823 |
if(ret == tmp.size)
ret = avpkt->size;
} |
1919feaf |
|
759001c5 |
if (*got_picture_ptr) {
if (!avctx->refcounted_frames) { |
37a74901 |
int err = unrefcount_frame(avci, picture);
if (err < 0)
return err; |
759001c5 |
}
|
934982c4 |
avctx->frame_number++; |
cc38ca67 |
av_frame_set_best_effort_timestamp(picture,
guess_correct_pts(avctx,
picture->pkt_pts,
picture->pkt_dts)); |
a1ee1648 |
} else
av_frame_unref(picture); |
419ffb23 |
} else
ret = 0; |
934982c4 |
|
b437cec1 |
/* many decoders assign whole AVFrames, thus overwriting extended_data;
* make sure it's set correctly */ |
985c5f22 |
av_assert0(!picture->extended_data || picture->extended_data == picture->data); |
b437cec1 |
|
de6d9b64 |
return ret;
}
|
0eea2129 |
#if FF_API_OLD_DECODE_AUDIO |
7a00bbad |
int attribute_align_arg avcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples, |
419ffb23 |
int *frame_size_ptr,
AVPacket *avpkt) |
7a00bbad |
{ |
c90f3114 |
AVFrame *frame = av_frame_alloc(); |
0eea2129 |
int ret, got_frame = 0;
|
38004051 |
if (!frame)
return AVERROR(ENOMEM); |
0eea2129 |
if (avctx->get_buffer != avcodec_default_get_buffer) { |
e2ff436e |
av_log(avctx, AV_LOG_ERROR, "Custom get_buffer() for use with" |
419ffb23 |
"avcodec_decode_audio3() detected. Overriding with avcodec_default_get_buffer\n"); |
e2ff436e |
av_log(avctx, AV_LOG_ERROR, "Please port your application to " |
419ffb23 |
"avcodec_decode_audio4()\n"); |
c3846e3e |
avctx->get_buffer = avcodec_default_get_buffer;
avctx->release_buffer = avcodec_default_release_buffer; |
0eea2129 |
}
|
c90f3114 |
ret = avcodec_decode_audio4(avctx, frame, &got_frame, avpkt); |
0eea2129 |
if (ret >= 0 && got_frame) {
int ch, plane_size; |
419ffb23 |
int planar = av_sample_fmt_is_planar(avctx->sample_fmt); |
0eea2129 |
int data_size = av_samples_get_buffer_size(&plane_size, avctx->channels, |
c90f3114 |
frame->nb_samples, |
0eea2129 |
avctx->sample_fmt, 1);
if (*frame_size_ptr < data_size) {
av_log(avctx, AV_LOG_ERROR, "output buffer size is too small for " |
419ffb23 |
"the current frame (%d < %d)\n", *frame_size_ptr, data_size); |
c90f3114 |
av_frame_free(&frame); |
0eea2129 |
return AVERROR(EINVAL);
}
|
c90f3114 |
memcpy(samples, frame->extended_data[0], plane_size); |
0eea2129 |
if (planar && avctx->channels > 1) {
uint8_t *out = ((uint8_t *)samples) + plane_size;
for (ch = 1; ch < avctx->channels; ch++) { |
c90f3114 |
memcpy(out, frame->extended_data[ch], plane_size); |
0eea2129 |
out += plane_size;
}
}
*frame_size_ptr = data_size;
} else {
*frame_size_ptr = 0;
} |
c90f3114 |
av_frame_free(&frame); |
0eea2129 |
return ret;
} |
419ffb23 |
|
0eea2129 |
#endif
int attribute_align_arg avcodec_decode_audio4(AVCodecContext *avctx,
AVFrame *frame,
int *got_frame_ptr, |
069cf86d |
const AVPacket *avpkt) |
0eea2129 |
{ |
759001c5 |
AVCodecInternal *avci = avctx->internal; |
0eea2129 |
int ret = 0;
*got_frame_ptr = 0; |
de6d9b64 |
|
6326afd5 |
if (!avpkt->data && avpkt->size) {
av_log(avctx, AV_LOG_ERROR, "invalid packet: NULL data, size != 0\n");
return AVERROR(EINVAL);
} |
b42bcaef |
if (!avctx->codec)
return AVERROR(EINVAL); |
7c9d6936 |
if (avctx->codec->type != AVMEDIA_TYPE_AUDIO) {
av_log(avctx, AV_LOG_ERROR, "Invalid media type for audio\n");
return AVERROR(EINVAL);
} |
6326afd5 |
|
95a8a5ac |
av_frame_unref(frame); |
1bc64c28 |
|
d5f7f1fe |
if ((avctx->codec->capabilities & CODEC_CAP_DELAY) || avpkt->size || (avctx->active_thread_type & FF_THREAD_FRAME)) { |
2fe18640 |
uint8_t *side;
int side_size; |
889bc79b |
uint32_t discard_padding = 0; |
069cf86d |
// copy to ensure we do not change avpkt
AVPacket tmp = *avpkt;
int did_split = av_packet_split_side_data(&tmp); |
558784f1 |
ret = apply_param_change(avctx, &tmp);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error applying parameter changes.\n");
if (avctx->err_recognition & AV_EF_EXPLODE)
goto fail;
} |
52c522c7 |
|
ab71be09 |
avctx->internal->pkt = &tmp; |
d5f7f1fe |
if (HAVE_THREADS && avctx->active_thread_type & FF_THREAD_FRAME)
ret = ff_thread_decode_frame(avctx, frame, got_frame_ptr, &tmp);
else {
ret = avctx->codec->decode(avctx, frame, got_frame_ptr, &tmp);
frame->pkt_dts = avpkt->dts;
} |
0eea2129 |
if (ret >= 0 && *got_frame_ptr) { |
4331484b |
add_metadata_from_side_data(avctx, frame); |
0eea2129 |
avctx->frame_number++; |
cc38ca67 |
av_frame_set_best_effort_timestamp(frame,
guess_correct_pts(avctx,
frame->pkt_pts,
frame->pkt_dts)); |
8a4a5f6f |
if (frame->format == AV_SAMPLE_FMT_NONE)
frame->format = avctx->sample_fmt; |
4b0521ec |
if (!frame->channel_layout)
frame->channel_layout = avctx->channel_layout; |
cc38ca67 |
if (!av_frame_get_channels(frame))
av_frame_set_channels(frame, avctx->channels); |
128dda70 |
if (!frame->sample_rate)
frame->sample_rate = avctx->sample_rate; |
9c856d62 |
} |
069cf86d |
|
ab71be09 |
side= av_packet_get_side_data(avctx->internal->pkt, AV_PKT_DATA_SKIP_SAMPLES, &side_size); |
2fe18640 |
if(side && side_size>=10) {
avctx->internal->skip_samples = AV_RL32(side); |
11ce1cf9 |
av_log(avctx, AV_LOG_DEBUG, "skip %d samples due to side data\n",
avctx->internal->skip_samples); |
889bc79b |
discard_padding = AV_RL32(side + 4); |
2fe18640 |
} |
8a644916 |
if (avctx->internal->skip_samples && *got_frame_ptr) { |
2fe18640 |
if(frame->nb_samples <= avctx->internal->skip_samples){
*got_frame_ptr = 0;
avctx->internal->skip_samples -= frame->nb_samples; |
11ce1cf9 |
av_log(avctx, AV_LOG_DEBUG, "skip whole frame, skip left: %d\n",
avctx->internal->skip_samples); |
2fe18640 |
} else {
av_samples_copy(frame->extended_data, frame->extended_data, 0, avctx->internal->skip_samples,
frame->nb_samples - avctx->internal->skip_samples, avctx->channels, frame->format); |
aa32971d |
if(avctx->pkt_timebase.num && avctx->sample_rate) { |
cd089003 |
int64_t diff_ts = av_rescale_q(avctx->internal->skip_samples,
(AVRational){1, avctx->sample_rate},
avctx->pkt_timebase); |
aa32971d |
if(frame->pkt_pts!=AV_NOPTS_VALUE) |
cd089003 |
frame->pkt_pts += diff_ts; |
aa32971d |
if(frame->pkt_dts!=AV_NOPTS_VALUE) |
cd089003 |
frame->pkt_dts += diff_ts; |
cc38ca67 |
if (av_frame_get_pkt_duration(frame) >= diff_ts)
av_frame_set_pkt_duration(frame, av_frame_get_pkt_duration(frame) - diff_ts); |
0e18ac56 |
} else {
av_log(avctx, AV_LOG_WARNING, "Could not update timestamps for skipped samples.\n"); |
aa32971d |
} |
11ce1cf9 |
av_log(avctx, AV_LOG_DEBUG, "skip %d/%d samples\n",
avctx->internal->skip_samples, frame->nb_samples); |
2fe18640 |
frame->nb_samples -= avctx->internal->skip_samples;
avctx->internal->skip_samples = 0;
}
}
|
889bc79b |
if (discard_padding > 0 && discard_padding <= frame->nb_samples && *got_frame_ptr) {
if (discard_padding == frame->nb_samples) {
*got_frame_ptr = 0;
} else {
if(avctx->pkt_timebase.num && avctx->sample_rate) {
int64_t diff_ts = av_rescale_q(frame->nb_samples - discard_padding,
(AVRational){1, avctx->sample_rate},
avctx->pkt_timebase);
if (av_frame_get_pkt_duration(frame) >= diff_ts)
av_frame_set_pkt_duration(frame, av_frame_get_pkt_duration(frame) - diff_ts);
} else {
av_log(avctx, AV_LOG_WARNING, "Could not update timestamps for discarded samples.\n");
}
av_log(avctx, AV_LOG_DEBUG, "discard %d/%d samples\n",
discard_padding, frame->nb_samples);
frame->nb_samples -= discard_padding;
}
} |
558784f1 |
fail: |
ab71be09 |
avctx->internal->pkt = NULL; |
a1bb0823 |
if (did_split) { |
98fd8a78 |
av_packet_free_side_data(&tmp); |
a1bb0823 |
if(ret == tmp.size)
ret = avpkt->size;
} |
759001c5 |
|
fe6a36e7 |
if (ret >= 0 && *got_frame_ptr) { |
759001c5 |
if (!avctx->refcounted_frames) { |
37a74901 |
int err = unrefcount_frame(avci, frame);
if (err < 0)
return err; |
759001c5 |
} |
85f947ae |
} else |
759001c5 |
av_frame_unref(frame); |
ac66834c |
} |
b437cec1 |
|
de6d9b64 |
return ret;
}
|
f7963993 |
#define UTF8_MAX_BYTES 4 /* 5 and 6 bytes sequences should not be used */
static int recode_subtitle(AVCodecContext *avctx,
AVPacket *outpkt, const AVPacket *inpkt)
{ |
7b2d50f8 |
#if CONFIG_ICONV |
f7963993 |
iconv_t cd = (iconv_t)-1;
int ret = 0;
char *inb, *outb;
size_t inl, outl;
AVPacket tmp;
#endif
|
dbe6f9f2 |
if (avctx->sub_charenc_mode != FF_SUB_CHARENC_MODE_PRE_DECODER || inpkt->size == 0) |
f7963993 |
return 0;
|
7b2d50f8 |
#if CONFIG_ICONV |
f7963993 |
cd = iconv_open("UTF-8", avctx->sub_charenc); |
160979f4 |
av_assert0(cd != (iconv_t)-1); |
f7963993 |
inb = inpkt->data;
inl = inpkt->size;
if (inl >= INT_MAX / UTF8_MAX_BYTES - FF_INPUT_BUFFER_PADDING_SIZE) {
av_log(avctx, AV_LOG_ERROR, "Subtitles packet is too big for recoding\n");
ret = AVERROR(ENOMEM);
goto end;
}
ret = av_new_packet(&tmp, inl * UTF8_MAX_BYTES);
if (ret < 0)
goto end; |
4b35be32 |
outpkt->buf = tmp.buf; |
f7963993 |
outpkt->data = tmp.data;
outpkt->size = tmp.size;
outb = outpkt->data;
outl = outpkt->size;
if (iconv(cd, &inb, &inl, &outb, &outl) == (size_t)-1 ||
iconv(cd, NULL, NULL, &outb, &outl) == (size_t)-1 ||
outl >= outpkt->size || inl != 0) {
av_log(avctx, AV_LOG_ERROR, "Unable to recode subtitle event \"%s\" "
"from %s to UTF-8\n", inpkt->data, avctx->sub_charenc);
av_free_packet(&tmp);
ret = AVERROR(errno);
goto end;
}
outpkt->size -= outl; |
0884d04d |
memset(outpkt->data + outpkt->size, 0, outl); |
f7963993 |
end:
if (cd != (iconv_t)-1)
iconv_close(cd);
return ret;
#else
av_assert0(!"requesting subtitles recoding without iconv");
#endif
}
|
70feca92 |
static int utf8_check(const uint8_t *str)
{
const uint8_t *byte;
uint32_t codepoint, min;
while (*str) {
byte = str;
GET_UTF8(codepoint, *(byte++), return 0;);
min = byte - str == 1 ? 0 : byte - str == 2 ? 0x80 :
1 << (5 * (byte - str) - 4);
if (codepoint < min || codepoint >= 0x110000 ||
codepoint == 0xFFFE /* BOM */ ||
codepoint >= 0xD800 && codepoint <= 0xDFFF /* surrogates */)
return 0;
str = byte;
}
return 1;
}
|
7a00bbad |
int avcodec_decode_subtitle2(AVCodecContext *avctx, AVSubtitle *sub, |
419ffb23 |
int *got_sub_ptr,
AVPacket *avpkt) |
7a00bbad |
{ |
70feca92 |
int i, ret = 0; |
240c1657 |
|
b2d9790c |
if (!avpkt->data && avpkt->size) {
av_log(avctx, AV_LOG_ERROR, "invalid packet: NULL data, size != 0\n");
return AVERROR(EINVAL);
}
if (!avctx->codec)
return AVERROR(EINVAL); |
7c9d6936 |
if (avctx->codec->type != AVMEDIA_TYPE_SUBTITLE) {
av_log(avctx, AV_LOG_ERROR, "Invalid media type for subtitles\n");
return AVERROR(EINVAL);
}
|
240c1657 |
*got_sub_ptr = 0; |
3ee8ca9b |
avcodec_get_subtitle_defaults(sub); |
60d9ee1b |
|
dbe6f9f2 |
if ((avctx->codec->capabilities & CODEC_CAP_DELAY) || avpkt->size) { |
f7963993 |
AVPacket pkt_recoded; |
60d9ee1b |
AVPacket tmp = *avpkt;
int did_split = av_packet_split_side_data(&tmp);
//apply_param_change(avctx, &tmp);
|
01923bab |
if (did_split) {
/* FFMIN() prevents overflow in case the packet wasn't allocated with
* proper padding.
* If the side data is smaller than the buffer padding size, the
* remaining bytes should have already been filled with zeros by the
* original packet allocation anyway. */
memset(tmp.data + tmp.size, 0,
FFMIN(avpkt->size - tmp.size, FF_INPUT_BUFFER_PADDING_SIZE));
}
|
f7963993 |
pkt_recoded = tmp;
ret = recode_subtitle(avctx, &pkt_recoded, &tmp);
if (ret < 0) {
*got_sub_ptr = 0;
} else { |
ab71be09 |
avctx->internal->pkt = &pkt_recoded; |
b1e6b144 |
if (avctx->pkt_timebase.den && avpkt->pts != AV_NOPTS_VALUE)
sub->pts = av_rescale_q(avpkt->pts,
avctx->pkt_timebase, AV_TIME_BASE_Q);
ret = avctx->codec->decode(avctx, sub, got_sub_ptr, &pkt_recoded); |
ccc7bcc4 |
av_assert1((ret >= 0) >= !!*got_sub_ptr &&
!!*got_sub_ptr >= !!sub->num_rects); |
83affcde |
if (sub->num_rects && !sub->end_display_time && avpkt->duration &&
avctx->pkt_timebase.num) {
AVRational ms = { 1, 1000 };
sub->end_display_time = av_rescale_q(avpkt->duration,
avctx->pkt_timebase, ms);
}
|
70feca92 |
for (i = 0; i < sub->num_rects; i++) {
if (sub->rects[i]->ass && !utf8_check(sub->rects[i]->ass)) {
av_log(avctx, AV_LOG_ERROR,
"Invalid UTF-8 in decoded subtitles text; "
"maybe missing -sub_charenc option\n");
avsubtitle_free(sub);
return AVERROR_INVALIDDATA;
}
}
|
4b35be32 |
if (tmp.data != pkt_recoded.data) { // did we recode?
/* prevent from destroying side data from original packet */
pkt_recoded.side_data = NULL;
pkt_recoded.side_data_elems = 0;
av_free_packet(&pkt_recoded);
} |
63c01135 |
if (avctx->codec_descriptor->props & AV_CODEC_PROP_BITMAP_SUB)
sub->format = 0;
else if (avctx->codec_descriptor->props & AV_CODEC_PROP_TEXT_SUB)
sub->format = 1; |
ab71be09 |
avctx->internal->pkt = NULL; |
f7963993 |
}
|
60d9ee1b |
if (did_split) { |
98fd8a78 |
av_packet_free_side_data(&tmp); |
60d9ee1b |
if(ret == tmp.size)
ret = avpkt->size;
}
|
e3c25860 |
if (*got_sub_ptr)
avctx->frame_number++; |
60d9ee1b |
}
|
240c1657 |
return ret;
}
|
e1d7c883 |
void avsubtitle_free(AVSubtitle *sub)
{
int i;
|
419ffb23 |
for (i = 0; i < sub->num_rects; i++) { |
8b834ac5 |
av_freep(&sub->rects[i]->pict.data[0]);
av_freep(&sub->rects[i]->pict.data[1]);
av_freep(&sub->rects[i]->pict.data[2]);
av_freep(&sub->rects[i]->pict.data[3]);
av_freep(&sub->rects[i]->text);
av_freep(&sub->rects[i]->ass);
av_freep(&sub->rects[i]); |
e1d7c883 |
}
|
8b834ac5 |
av_freep(&sub->rects); |
e1d7c883 |
memset(sub, 0, sizeof(AVSubtitle));
}
|
2470851f |
av_cold int ff_codec_close_recursive(AVCodecContext *avctx)
{
int ret = 0;
|
25fec859 |
ff_unlock_avcodec(); |
2470851f |
ret = avcodec_close(avctx);
|
25fec859 |
ff_lock_avcodec(NULL); |
2470851f |
return ret;
}
|
0752cd39 |
av_cold int avcodec_close(AVCodecContext *avctx) |
de6d9b64 |
{ |
3e1f507f |
int ret;
if (!avctx)
return 0;
ret = ff_lock_avcodec(avctx); |
25fec859 |
if (ret < 0)
return ret; |
ddebfb15 |
|
0e72ad95 |
if (avcodec_is_open(avctx)) { |
759001c5 |
FramePool *pool = avctx->internal->pool;
int i; |
b76d8536 |
if (CONFIG_FRAME_THREAD_ENCODER &&
avctx->internal->frame_thread_encoder && avctx->thread_count > 1) { |
d7169280 |
ff_unlock_avcodec(); |
fde1bc64 |
ff_frame_thread_encoder_free(avctx); |
d7169280 |
ff_lock_avcodec(avctx); |
fde1bc64 |
} |
38ecc370 |
if (HAVE_THREADS && avctx->internal->thread_ctx) |
0e72ad95 |
ff_thread_free(avctx);
if (avctx->codec && avctx->codec->close)
avctx->codec->close(avctx);
avctx->coded_frame = NULL; |
740b9ff4 |
avctx->internal->byte_buffer_size = 0;
av_freep(&avctx->internal->byte_buffer); |
37a74901 |
av_frame_free(&avctx->internal->to_free); |
759001c5 |
for (i = 0; i < FF_ARRAY_ELEMS(pool->pools); i++)
av_buffer_pool_uninit(&pool->pools[i]);
av_freep(&avctx->internal->pool); |
0e72ad95 |
av_freep(&avctx->internal);
}
if (avctx->priv_data && avctx->codec && avctx->codec->priv_class) |
36773283 |
av_opt_free(avctx->priv_data);
av_opt_free(avctx); |
3123dd79 |
av_freep(&avctx->priv_data); |
44fe77b3 |
if (av_codec_is_encoder(avctx->codec)) |
c4f267ab |
av_freep(&avctx->extradata); |
de6d9b64 |
avctx->codec = NULL; |
b38f008e |
avctx->active_thread_type = 0; |
f988ce6c |
|
25fec859 |
ff_unlock_avcodec(); |
de6d9b64 |
return 0;
}
|
7a72695c |
static enum AVCodecID remap_deprecated_codec_id(enum AVCodecID id) |
e7f008a5 |
{
switch(id){ |
4c677df2 |
//This is for future deprecatec codec ids, its empty since |
6851130f |
//last major bump but will fill up again over time, please don't remove it |
7a72695c |
// case AV_CODEC_ID_UTVIDEO_DEPRECATED: return AV_CODEC_ID_UTVIDEO; |
7ed9ec03 |
case AV_CODEC_ID_OPUS_DEPRECATED: return AV_CODEC_ID_OPUS; |
3a7ef8dc |
case AV_CODEC_ID_TAK_DEPRECATED : return AV_CODEC_ID_TAK; |
b654aa6b |
case AV_CODEC_ID_PCM_S24LE_PLANAR_DEPRECATED : return AV_CODEC_ID_PCM_S24LE_PLANAR;
case AV_CODEC_ID_PCM_S32LE_PLANAR_DEPRECATED : return AV_CODEC_ID_PCM_S32LE_PLANAR; |
ed0aed09 |
case AV_CODEC_ID_ESCAPE130_DEPRECATED : return AV_CODEC_ID_ESCAPE130; |
e5cdf9c0 |
case AV_CODEC_ID_G2M_DEPRECATED : return AV_CODEC_ID_G2M; |
b26ccf21 |
case AV_CODEC_ID_WEBP_DEPRECATED: return AV_CODEC_ID_WEBP; |
5eb1704d |
case AV_CODEC_ID_HEVC_DEPRECATED: return AV_CODEC_ID_HEVC; |
e7f008a5 |
default : return id;
}
}
|
a4aa20fb |
static AVCodec *find_encdec(enum AVCodecID id, int encoder) |
de6d9b64 |
{ |
419ffb23 |
AVCodec *p, *experimental = NULL; |
de6d9b64 |
p = first_avcodec; |
e7f008a5 |
id= remap_deprecated_codec_id(id); |
de6d9b64 |
while (p) { |
a4aa20fb |
if ((encoder ? av_codec_is_encoder(p) : av_codec_is_decoder(p)) &&
p->id == id) { |
93ebfeea |
if (p->capabilities & CODEC_CAP_EXPERIMENTAL && !experimental) {
experimental = p;
} else
return p;
} |
de6d9b64 |
p = p->next;
} |
93ebfeea |
return experimental; |
de6d9b64 |
}
|
a4aa20fb |
AVCodec *avcodec_find_encoder(enum AVCodecID id)
{
return find_encdec(id, 1);
}
|
98f3b098 |
AVCodec *avcodec_find_encoder_by_name(const char *name)
{
AVCodec *p; |
fc228c90 |
if (!name)
return NULL; |
98f3b098 |
p = first_avcodec;
while (p) { |
419ffb23 |
if (av_codec_is_encoder(p) && strcmp(name, p->name) == 0) |
98f3b098 |
return p;
p = p->next;
}
return NULL;
}
|
36ef5369 |
AVCodec *avcodec_find_decoder(enum AVCodecID id) |
de6d9b64 |
{ |
a4aa20fb |
return find_encdec(id, 0); |
de6d9b64 |
}
AVCodec *avcodec_find_decoder_by_name(const char *name)
{
AVCodec *p; |
fc228c90 |
if (!name)
return NULL; |
de6d9b64 |
p = first_avcodec;
while (p) { |
419ffb23 |
if (av_codec_is_decoder(p) && strcmp(name, p->name) == 0) |
de6d9b64 |
return p;
p = p->next;
}
return NULL;
}
|
7a72695c |
const char *avcodec_get_name(enum AVCodecID id) |
b3be9f4a |
{ |
9bb936a8 |
const AVCodecDescriptor *cd; |
b3be9f4a |
AVCodec *codec;
|
c28d80f4 |
if (id == AV_CODEC_ID_NONE)
return "none"; |
9bb936a8 |
cd = avcodec_descriptor_get(id);
if (cd)
return cd->name; |
b3be9f4a |
av_log(NULL, AV_LOG_WARNING, "Codec 0x%x is not in the full list.\n", id);
codec = avcodec_find_decoder(id);
if (codec)
return codec->name;
codec = avcodec_find_encoder(id);
if (codec)
return codec->name;
return "unknown_codec";
}
|
7e566bbe |
size_t av_get_codec_tag_string(char *buf, size_t buf_size, unsigned int codec_tag)
{
int i, len, ret = 0;
|
70762508 |
#define TAG_PRINT(x) \ |
67b76310 |
(((x) >= '0' && (x) <= '9') || \
((x) >= 'a' && (x) <= 'z') || ((x) >= 'A' && (x) <= 'Z') || \ |
ec4a0845 |
((x) == '.' || (x) == ' ' || (x) == '-' || (x) == '_')) |
67b76310 |
|
7e566bbe |
for (i = 0; i < 4; i++) {
len = snprintf(buf, buf_size, |
70762508 |
TAG_PRINT(codec_tag & 0xFF) ? "%c" : "[%d]", codec_tag & 0xFF); |
419ffb23 |
buf += len;
buf_size = buf_size > len ? buf_size - len : 0;
ret += len;
codec_tag >>= 8; |
7e566bbe |
}
return ret;
}
|
de6d9b64 |
void avcodec_string(char *buf, int buf_size, AVCodecContext *enc, int encode)
{ |
d2d7b713 |
const char *codec_type; |
de6d9b64 |
const char *codec_name; |
2a81f4bd |
const char *profile = NULL; |
0a0f19b5 |
const AVCodec *p; |
a96b68b7 |
int bitrate; |
59771f71 |
AVRational display_aspect_ratio; |
de6d9b64 |
|
d2d7b713 |
if (!buf || buf_size <= 0)
return;
codec_type = av_get_media_type_string(enc->codec_type);
codec_name = avcodec_get_name(enc->codec_id);
if (enc->profile != FF_PROFILE_UNKNOWN) { |
e366e6bf |
if (enc->codec)
p = enc->codec;
else
p = encode ? avcodec_find_encoder(enc->codec_id) :
avcodec_find_decoder(enc->codec_id); |
d2d7b713 |
if (p)
profile = av_get_profile_name(p, enc->profile);
}
|
59a3b6b7 |
snprintf(buf, buf_size, "%s: %s", codec_type ? codec_type : "unknown",
codec_name); |
d2d7b713 |
buf[0] ^= 'a' ^ 'A'; /* first letter in uppercase */ |
24bb01f0 |
if (enc->codec && strcmp(enc->codec->name, codec_name))
snprintf(buf + strlen(buf), buf_size - strlen(buf), " (%s)", enc->codec->name);
|
d2d7b713 |
if (profile)
snprintf(buf + strlen(buf), buf_size - strlen(buf), " (%s)", profile);
if (enc->codec_tag) { |
ab0b5378 |
char tag_buf[32];
av_get_codec_tag_string(tag_buf, sizeof(tag_buf), enc->codec_tag); |
d2d7b713 |
snprintf(buf + strlen(buf), buf_size - strlen(buf),
" (%s / 0x%04X)", tag_buf, enc->codec_tag); |
de6d9b64 |
}
|
419ffb23 |
switch (enc->codec_type) { |
72415b2a |
case AVMEDIA_TYPE_VIDEO: |
716d413c |
if (enc->pix_fmt != AV_PIX_FMT_NONE) { |
336982a0 |
char detail[256] = "("; |
9c8aeacf |
const char *colorspace_name; |
cf087595 |
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", %s", |
94bed8e5 |
av_get_pix_fmt_name(enc->pix_fmt)); |
71b19129 |
if (enc->bits_per_raw_sample &&
enc->bits_per_raw_sample <= av_pix_fmt_desc_get(enc->pix_fmt)->comp[0].depth_minus1) |
336982a0 |
av_strlcatf(detail, sizeof(detail), "%d bpc, ", enc->bits_per_raw_sample); |
ea07dbde |
if (enc->color_range != AVCOL_RANGE_UNSPECIFIED)
av_strlcatf(detail, sizeof(detail),
enc->color_range == AVCOL_RANGE_MPEG ? "tv, ": "pc, "); |
9c8aeacf |
colorspace_name = av_get_colorspace_name(enc->colorspace);
if (colorspace_name)
av_strlcatf(detail, sizeof(detail), "%s, ", colorspace_name);
|
336982a0 |
if (strlen(detail) > 1) {
detail[strlen(detail) - 2] = 0;
av_strlcatf(buf, buf_size, "%s)", detail);
} |
cf087595 |
} |
de6d9b64 |
if (enc->width) {
snprintf(buf + strlen(buf), buf_size - strlen(buf), |
21189011 |
", %dx%d",
enc->width, enc->height); |
7ee4dd02 |
if (enc->sample_aspect_ratio.num) { |
cbaf50f8 |
av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, |
419ffb23 |
enc->width * enc->sample_aspect_ratio.num,
enc->height * enc->sample_aspect_ratio.den,
1024 * 1024); |
cbaf50f8 |
snprintf(buf + strlen(buf), buf_size - strlen(buf), |
fdd130a2 |
" [SAR %d:%d DAR %d:%d]", |
cbaf50f8 |
enc->sample_aspect_ratio.num, enc->sample_aspect_ratio.den,
display_aspect_ratio.num, display_aspect_ratio.den); |
7ee4dd02 |
} |
419ffb23 |
if (av_log_get_level() >= AV_LOG_DEBUG) {
int g = av_gcd(enc->time_base.num, enc->time_base.den); |
21189011 |
snprintf(buf + strlen(buf), buf_size - strlen(buf), |
419ffb23 |
", %d/%d",
enc->time_base.num / g, enc->time_base.den / g); |
21189011 |
} |
de6d9b64 |
} |
4bfad535 |
if (encode) {
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", q=%d-%d", enc->qmin, enc->qmax);
} |
de6d9b64 |
break; |
72415b2a |
case AVMEDIA_TYPE_AUDIO: |
de6d9b64 |
if (enc->sample_rate) {
snprintf(buf + strlen(buf), buf_size - strlen(buf), |
0d72e7d0 |
", %d Hz", enc->sample_rate); |
de6d9b64 |
} |
0d72e7d0 |
av_strlcat(buf, ", ", buf_size); |
63e8d976 |
av_get_channel_layout_string(buf + strlen(buf), buf_size - strlen(buf), enc->channels, enc->channel_layout); |
5d6e4c16 |
if (enc->sample_fmt != AV_SAMPLE_FMT_NONE) { |
9e82a113 |
snprintf(buf + strlen(buf), buf_size - strlen(buf), |
ba7d6e79 |
", %s", av_get_sample_fmt_name(enc->sample_fmt)); |
9e82a113 |
} |
de6d9b64 |
break; |
b75ac7c7 |
case AVMEDIA_TYPE_DATA:
if (av_log_get_level() >= AV_LOG_DEBUG) {
int g = av_gcd(enc->time_base.num, enc->time_base.den);
if (g)
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", %d/%d",
enc->time_base.num / g, enc->time_base.den / g);
}
break; |
6fcfafff |
case AVMEDIA_TYPE_SUBTITLE:
if (enc->width)
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", %dx%d", enc->width, enc->height);
break; |
de6d9b64 |
default: |
9fe5a7b8 |
return; |
de6d9b64 |
} |
4bfad535 |
if (encode) {
if (enc->flags & CODEC_FLAG_PASS1)
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", pass 1");
if (enc->flags & CODEC_FLAG_PASS2)
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", pass 2");
} |
406aa93f |
bitrate = get_bit_rate(enc); |
a96b68b7 |
if (bitrate != 0) { |
115329f1 |
snprintf(buf + strlen(buf), buf_size - strlen(buf), |
a96b68b7 |
", %d kb/s", bitrate / 1000); |
9375879d |
} else if (enc->rc_max_rate > 0) {
snprintf(buf + strlen(buf), buf_size - strlen(buf),
", max. %d kb/s", enc->rc_max_rate / 1000); |
de6d9b64 |
}
}
|
060ec0a8 |
const char *av_get_profile_name(const AVCodec *codec, int profile)
{
const AVProfile *p;
if (profile == FF_PROFILE_UNKNOWN || !codec->profiles)
return NULL;
for (p = codec->profiles; p->profile != FF_PROFILE_UNKNOWN; p++)
if (p->profile == profile)
return p->name;
return NULL;
}
|
419ffb23 |
unsigned avcodec_version(void) |
156e5023 |
{ |
7a72695c |
// av_assert0(AV_CODEC_ID_V410==164);
av_assert0(AV_CODEC_ID_PCM_S8_PLANAR==65563);
av_assert0(AV_CODEC_ID_ADPCM_G722==69660);
// av_assert0(AV_CODEC_ID_BMV_AUDIO==86071);
av_assert0(AV_CODEC_ID_SRT==94216); |
f0e90e00 |
av_assert0(LIBAVCODEC_VERSION_MICRO >= 100); |
83b46713 |
|
cfc1efc7 |
av_assert0(CODEC_ID_CLLC == AV_CODEC_ID_CLLC);
av_assert0(CODEC_ID_PCM_S8_PLANAR == AV_CODEC_ID_PCM_S8_PLANAR);
av_assert0(CODEC_ID_ADPCM_IMA_APC == AV_CODEC_ID_ADPCM_IMA_APC);
av_assert0(CODEC_ID_ILBC == AV_CODEC_ID_ILBC);
av_assert0(CODEC_ID_SRT == AV_CODEC_ID_SRT); |
419ffb23 |
return LIBAVCODEC_VERSION_INT; |
156e5023 |
} |
cf087595 |
|
41600690 |
const char *avcodec_configuration(void) |
c1736936 |
{ |
e528cdac |
return FFMPEG_CONFIGURATION; |
c1736936 |
}
|
41600690 |
const char *avcodec_license(void) |
c1736936 |
{
#define LICENSE_PREFIX "libavcodec license: " |
0cb88628 |
return LICENSE_PREFIX FFMPEG_LICENSE + sizeof(LICENSE_PREFIX) - 1; |
c1736936 |
}
|
1c2a8c7f |
void avcodec_flush_buffers(AVCodecContext *avctx)
{ |
419ffb23 |
if (HAVE_THREADS && avctx->active_thread_type & FF_THREAD_FRAME) |
b38f008e |
ff_thread_flush(avctx); |
419ffb23 |
else if (avctx->codec->flush) |
7a06ff14 |
avctx->codec->flush(avctx); |
c58290e5 |
avctx->pts_correction_last_pts =
avctx->pts_correction_last_dts = INT64_MIN; |
a4a2e894 |
|
985f34b7 |
if (!avctx->refcounted_frames) |
37a74901 |
av_frame_unref(avctx->internal->to_free); |
1c2a8c7f |
}
|
36ef5369 |
int av_get_exact_bits_per_sample(enum AVCodecID codec_id) |
6699d074 |
{ |
419ffb23 |
switch (codec_id) { |
01eed8c6 |
case AV_CODEC_ID_8SVX_EXP:
case AV_CODEC_ID_8SVX_FIB: |
36ef5369 |
case AV_CODEC_ID_ADPCM_CT:
case AV_CODEC_ID_ADPCM_IMA_APC:
case AV_CODEC_ID_ADPCM_IMA_EA_SEAD: |
3d8e684f |
case AV_CODEC_ID_ADPCM_IMA_OKI: |
36ef5369 |
case AV_CODEC_ID_ADPCM_IMA_WS:
case AV_CODEC_ID_ADPCM_G722:
case AV_CODEC_ID_ADPCM_YAMAHA: |
f32fd318 |
return 4; |
36ef5369 |
case AV_CODEC_ID_PCM_ALAW:
case AV_CODEC_ID_PCM_MULAW:
case AV_CODEC_ID_PCM_S8: |
da8242e2 |
case AV_CODEC_ID_PCM_S8_PLANAR: |
36ef5369 |
case AV_CODEC_ID_PCM_U8:
case AV_CODEC_ID_PCM_ZORK: |
ac3e1834 |
return 8; |
36ef5369 |
case AV_CODEC_ID_PCM_S16BE: |
9ba41ae6 |
case AV_CODEC_ID_PCM_S16BE_PLANAR: |
36ef5369 |
case AV_CODEC_ID_PCM_S16LE:
case AV_CODEC_ID_PCM_S16LE_PLANAR:
case AV_CODEC_ID_PCM_U16BE:
case AV_CODEC_ID_PCM_U16LE: |
ac3e1834 |
return 16; |
36ef5369 |
case AV_CODEC_ID_PCM_S24DAUD:
case AV_CODEC_ID_PCM_S24BE:
case AV_CODEC_ID_PCM_S24LE: |
467dfd5d |
case AV_CODEC_ID_PCM_S24LE_PLANAR: |
36ef5369 |
case AV_CODEC_ID_PCM_U24BE:
case AV_CODEC_ID_PCM_U24LE: |
ac3e1834 |
return 24; |
36ef5369 |
case AV_CODEC_ID_PCM_S32BE:
case AV_CODEC_ID_PCM_S32LE: |
467dfd5d |
case AV_CODEC_ID_PCM_S32LE_PLANAR: |
36ef5369 |
case AV_CODEC_ID_PCM_U32BE:
case AV_CODEC_ID_PCM_U32LE:
case AV_CODEC_ID_PCM_F32BE:
case AV_CODEC_ID_PCM_F32LE: |
ac3e1834 |
return 32; |
36ef5369 |
case AV_CODEC_ID_PCM_F64BE:
case AV_CODEC_ID_PCM_F64LE: |
143a5d6f |
return 64; |
ac3e1834 |
default:
return 0;
}
}
|
7a72695c |
enum AVCodecID av_get_pcm_codec(enum AVSampleFormat fmt, int be) |
9cbf17e9 |
{ |
7a72695c |
static const enum AVCodecID map[AV_SAMPLE_FMT_NB][2] = {
[AV_SAMPLE_FMT_U8 ] = { AV_CODEC_ID_PCM_U8, AV_CODEC_ID_PCM_U8 },
[AV_SAMPLE_FMT_S16 ] = { AV_CODEC_ID_PCM_S16LE, AV_CODEC_ID_PCM_S16BE },
[AV_SAMPLE_FMT_S32 ] = { AV_CODEC_ID_PCM_S32LE, AV_CODEC_ID_PCM_S32BE },
[AV_SAMPLE_FMT_FLT ] = { AV_CODEC_ID_PCM_F32LE, AV_CODEC_ID_PCM_F32BE },
[AV_SAMPLE_FMT_DBL ] = { AV_CODEC_ID_PCM_F64LE, AV_CODEC_ID_PCM_F64BE },
[AV_SAMPLE_FMT_U8P ] = { AV_CODEC_ID_PCM_U8, AV_CODEC_ID_PCM_U8 },
[AV_SAMPLE_FMT_S16P] = { AV_CODEC_ID_PCM_S16LE, AV_CODEC_ID_PCM_S16BE },
[AV_SAMPLE_FMT_S32P] = { AV_CODEC_ID_PCM_S32LE, AV_CODEC_ID_PCM_S32BE },
[AV_SAMPLE_FMT_FLTP] = { AV_CODEC_ID_PCM_F32LE, AV_CODEC_ID_PCM_F32BE },
[AV_SAMPLE_FMT_DBLP] = { AV_CODEC_ID_PCM_F64LE, AV_CODEC_ID_PCM_F64BE }, |
9cbf17e9 |
};
if (fmt < 0 || fmt >= AV_SAMPLE_FMT_NB) |
7a72695c |
return AV_CODEC_ID_NONE; |
9cbf17e9 |
if (be < 0 || be > 1)
be = AV_NE(1, 0);
return map[fmt][be]; |
f095391a |
}
|
36ef5369 |
int av_get_bits_per_sample(enum AVCodecID codec_id) |
6699d074 |
{
switch (codec_id) { |
36ef5369 |
case AV_CODEC_ID_ADPCM_SBPRO_2: |
6699d074 |
return 2; |
36ef5369 |
case AV_CODEC_ID_ADPCM_SBPRO_3: |
6699d074 |
return 3; |
36ef5369 |
case AV_CODEC_ID_ADPCM_SBPRO_4:
case AV_CODEC_ID_ADPCM_IMA_WAV:
case AV_CODEC_ID_ADPCM_IMA_QT:
case AV_CODEC_ID_ADPCM_SWF:
case AV_CODEC_ID_ADPCM_MS: |
6699d074 |
return 4;
default:
return av_get_exact_bits_per_sample(codec_id);
}
}
|
9524cf79 |
int av_get_audio_frame_duration(AVCodecContext *avctx, int frame_bytes)
{
int id, sr, ch, ba, tag, bps;
id = avctx->codec_id;
sr = avctx->sample_rate;
ch = avctx->channels;
ba = avctx->block_align;
tag = avctx->codec_tag;
bps = av_get_exact_bits_per_sample(avctx->codec_id);
/* codecs with an exact constant bits per sample */ |
8ea5df4f |
if (bps > 0 && ch > 0 && frame_bytes > 0 && ch < 32768 && bps < 32768)
return (frame_bytes * 8LL) / (bps * ch); |
9524cf79 |
bps = avctx->bits_per_coded_sample;
/* codecs with a fixed packet duration */
switch (id) { |
36ef5369 |
case AV_CODEC_ID_ADPCM_ADX: return 32;
case AV_CODEC_ID_ADPCM_IMA_QT: return 64;
case AV_CODEC_ID_ADPCM_EA_XAS: return 128;
case AV_CODEC_ID_AMR_NB: |
098d3891 |
case AV_CODEC_ID_EVRC: |
36ef5369 |
case AV_CODEC_ID_GSM:
case AV_CODEC_ID_QCELP:
case AV_CODEC_ID_RA_288: return 160;
case AV_CODEC_ID_AMR_WB:
case AV_CODEC_ID_GSM_MS: return 320;
case AV_CODEC_ID_MP1: return 384;
case AV_CODEC_ID_ATRAC1: return 512;
case AV_CODEC_ID_ATRAC3: return 1024;
case AV_CODEC_ID_MP2:
case AV_CODEC_ID_MUSEPACK7: return 1152;
case AV_CODEC_ID_AC3: return 1536; |
9524cf79 |
}
if (sr > 0) {
/* calc from sample rate */ |
36ef5369 |
if (id == AV_CODEC_ID_TTA) |
9524cf79 |
return 256 * sr / 245;
if (ch > 0) {
/* calc from sample rate and channels */ |
36ef5369 |
if (id == AV_CODEC_ID_BINKAUDIO_DCT) |
9524cf79 |
return (480 << (sr / 22050)) / ch;
}
}
if (ba > 0) {
/* calc from block_align */ |
36ef5369 |
if (id == AV_CODEC_ID_SIPR) { |
9524cf79 |
switch (ba) {
case 20: return 160;
case 19: return 144;
case 29: return 288;
case 37: return 480;
} |
36ef5369 |
} else if (id == AV_CODEC_ID_ILBC) { |
3641b048 |
switch (ba) {
case 38: return 160;
case 50: return 240;
} |
9524cf79 |
}
}
if (frame_bytes > 0) {
/* calc from frame_bytes only */ |
36ef5369 |
if (id == AV_CODEC_ID_TRUESPEECH) |
9524cf79 |
return 240 * (frame_bytes / 32); |
36ef5369 |
if (id == AV_CODEC_ID_NELLYMOSER) |
9524cf79 |
return 256 * (frame_bytes / 64); |
747774ec |
if (id == AV_CODEC_ID_RA_144)
return 160 * (frame_bytes / 20); |
7adc6002 |
if (id == AV_CODEC_ID_G723_1)
return 240 * (frame_bytes / 24); |
9524cf79 |
if (bps > 0) {
/* calc from frame_bytes and bits_per_coded_sample */ |
36ef5369 |
if (id == AV_CODEC_ID_ADPCM_G726) |
9524cf79 |
return frame_bytes * 8 / bps;
}
if (ch > 0) {
/* calc from frame_bytes and channels */
switch (id) { |
5a337081 |
case AV_CODEC_ID_ADPCM_AFC:
return frame_bytes / (9 * ch) * 16; |
e7814ed8 |
case AV_CODEC_ID_ADPCM_DTK:
return frame_bytes / (16 * ch) * 28; |
36ef5369 |
case AV_CODEC_ID_ADPCM_4XM:
case AV_CODEC_ID_ADPCM_IMA_ISS: |
9524cf79 |
return (frame_bytes - 4 * ch) * 2 / ch; |
36ef5369 |
case AV_CODEC_ID_ADPCM_IMA_SMJPEG: |
9524cf79 |
return (frame_bytes - 4) * 2 / ch; |
36ef5369 |
case AV_CODEC_ID_ADPCM_IMA_AMV: |
9524cf79 |
return (frame_bytes - 8) * 2 / ch; |
36ef5369 |
case AV_CODEC_ID_ADPCM_XA: |
9524cf79 |
return (frame_bytes / 128) * 224 / ch; |
36ef5369 |
case AV_CODEC_ID_INTERPLAY_DPCM: |
9524cf79 |
return (frame_bytes - 6 - ch) / ch; |
36ef5369 |
case AV_CODEC_ID_ROQ_DPCM: |
9524cf79 |
return (frame_bytes - 8) / ch; |
36ef5369 |
case AV_CODEC_ID_XAN_DPCM: |
9524cf79 |
return (frame_bytes - 2 * ch) / ch; |
36ef5369 |
case AV_CODEC_ID_MACE3: |
9524cf79 |
return 3 * frame_bytes / ch; |
36ef5369 |
case AV_CODEC_ID_MACE6: |
9524cf79 |
return 6 * frame_bytes / ch; |
36ef5369 |
case AV_CODEC_ID_PCM_LXF: |
9524cf79 |
return 2 * (frame_bytes / (5 * ch)); |
dc239b3b |
case AV_CODEC_ID_IAC: |
28bcca46 |
case AV_CODEC_ID_IMC:
return 4 * frame_bytes / ch; |
9524cf79 |
}
if (tag) {
/* calc from frame_bytes, channels, and codec_tag */ |
36ef5369 |
if (id == AV_CODEC_ID_SOL_DPCM) { |
9524cf79 |
if (tag == 3)
return frame_bytes / ch;
else
return frame_bytes * 2 / ch;
}
}
if (ba > 0) {
/* calc from frame_bytes, channels, and block_align */
int blocks = frame_bytes / ba;
switch (avctx->codec_id) { |
36ef5369 |
case AV_CODEC_ID_ADPCM_IMA_WAV: |
6516a25f |
if (bps < 2 || bps > 5)
return 0;
return blocks * (1 + (ba - 4 * ch) / (bps * ch) * 8); |
36ef5369 |
case AV_CODEC_ID_ADPCM_IMA_DK3: |
c346f630 |
return blocks * (((ba - 16) * 2 / 3 * 4) / ch); |
36ef5369 |
case AV_CODEC_ID_ADPCM_IMA_DK4: |
9524cf79 |
return blocks * (1 + (ba - 4 * ch) * 2 / ch); |
67fad0d2 |
case AV_CODEC_ID_ADPCM_IMA_RAD:
return blocks * ((ba - 4 * ch) * 2 / ch); |
36ef5369 |
case AV_CODEC_ID_ADPCM_MS: |
9524cf79 |
return blocks * (2 + (ba - 7 * ch) * 2 / ch);
}
}
if (bps > 0) {
/* calc from frame_bytes, channels, and bits_per_coded_sample */
switch (avctx->codec_id) { |
36ef5369 |
case AV_CODEC_ID_PCM_DVD: |
a5ad3c23 |
if(bps<4)
return 0; |
9524cf79 |
return 2 * (frame_bytes / ((bps * 2 / 8) * ch)); |
36ef5369 |
case AV_CODEC_ID_PCM_BLURAY: |
a5ad3c23 |
if(bps<4)
return 0; |
9524cf79 |
return frame_bytes / ((FFALIGN(ch, 2) * bps) / 8); |
36ef5369 |
case AV_CODEC_ID_S302M: |
9524cf79 |
return 2 * (frame_bytes / ((bps + 4) / 4)) / ch;
}
}
}
}
return 0; |
9cbf17e9 |
}
|
b250f9c6 |
#if !HAVE_THREADS |
419ffb23 |
int ff_thread_init(AVCodecContext *s)
{ |
ca8ad847 |
return -1;
} |
419ffb23 |
|
ca8ad847 |
#endif |
ad2b531d |
unsigned int av_xiphlacing(unsigned char *s, unsigned int v)
{
unsigned int n = 0;
|
419ffb23 |
while (v >= 0xff) { |
ad2b531d |
*s++ = 0xff;
v -= 0xff;
n++;
}
*s = v;
n++;
return n;
} |
1005f542 |
|
419ffb23 |
int ff_match_2uint16(const uint16_t(*tab)[2], int size, int a, int b)
{ |
c46eeae2 |
int i; |
419ffb23 |
for (i = 0; i < size && !(tab[i][0] == a && tab[i][1] == b); i++) ; |
c46eeae2 |
return i;
}
|
f099d3d1 |
#if FF_API_MISSING_SAMPLE |
7950e519 |
FF_DISABLE_DEPRECATION_WARNINGS |
ce863d7f |
void av_log_missing_feature(void *avc, const char *feature, int want_sample) |
ea779d91 |
{ |
ef9fe5be |
av_log(avc, AV_LOG_WARNING, "%s is not implemented. Update your FFmpeg " |
db323491 |
"version to the newest one from Git. If the problem still " |
ea779d91 |
"occurs, it means that your file has a feature which has not " |
3fd3632f |
"been implemented.\n", feature); |
ea779d91 |
if(want_sample) |
ce863d7f |
av_log_ask_for_sample(avc, NULL); |
0ba39dd1 |
}
|
44f566b7 |
void av_log_ask_for_sample(void *avc, const char *msg, ...) |
0ba39dd1 |
{ |
44f566b7 |
va_list argument_list;
va_start(argument_list, msg);
|
0ba39dd1 |
if (msg) |
44f566b7 |
av_vlog(avc, AV_LOG_WARNING, msg, argument_list); |
0ba39dd1 |
av_log(avc, AV_LOG_WARNING, "If you want to help, upload a sample "
"of this file to ftp://upload.ffmpeg.org/MPlayer/incoming/ "
"and contact the ffmpeg-devel mailing list.\n"); |
44f566b7 |
va_end(argument_list); |
ea779d91 |
} |
7950e519 |
FF_ENABLE_DEPRECATION_WARNINGS |
f099d3d1 |
#endif /* FF_API_MISSING_SAMPLE */ |
c895618b |
static AVHWAccel *first_hwaccel = NULL; |
ec464c96 |
static AVHWAccel **last_hwaccel = &first_hwaccel; |
c895618b |
void av_register_hwaccel(AVHWAccel *hwaccel)
{ |
ec464c96 |
AVHWAccel **p = last_hwaccel; |
c895618b |
hwaccel->next = NULL; |
133fbfc7 |
while(*p || avpriv_atomic_ptr_cas((void * volatile *)p, NULL, hwaccel)) |
8738d942 |
p = &(*p)->next; |
ec464c96 |
last_hwaccel = &hwaccel->next; |
c895618b |
} |
414d9d7f |
AVHWAccel *av_hwaccel_next(AVHWAccel *hwaccel)
{
return hwaccel ? hwaccel->next : first_hwaccel;
} |
6059f13c |
|
08303d77 |
AVHWAccel *ff_find_hwaccel(AVCodecContext *avctx) |
6059f13c |
{ |
08303d77 |
enum AVCodecID codec_id = avctx->codec->id;
enum AVPixelFormat pix_fmt = avctx->pix_fmt;
|
419ffb23 |
AVHWAccel *hwaccel = NULL; |
6059f13c |
|
419ffb23 |
while ((hwaccel = av_hwaccel_next(hwaccel)))
if (hwaccel->id == codec_id |
6059f13c |
&& hwaccel->pix_fmt == pix_fmt)
return hwaccel;
return NULL;
} |
f988ce6c |
int av_lockmgr_register(int (*cb)(void **mutex, enum AVLockOp op))
{ |
088f38a4 |
if (lockmgr_cb) {
if (lockmgr_cb(&codec_mutex, AV_LOCK_DESTROY)) |
f988ce6c |
return -1; |
088f38a4 |
if (lockmgr_cb(&avformat_mutex, AV_LOCK_DESTROY)) |
2d1b6fb7 |
return -1; |
f988ce6c |
}
|
088f38a4 |
lockmgr_cb = cb; |
f988ce6c |
|
088f38a4 |
if (lockmgr_cb) {
if (lockmgr_cb(&codec_mutex, AV_LOCK_CREATE)) |
f988ce6c |
return -1; |
088f38a4 |
if (lockmgr_cb(&avformat_mutex, AV_LOCK_CREATE)) |
2d1b6fb7 |
return -1;
}
return 0;
}
|
25fec859 |
int ff_lock_avcodec(AVCodecContext *log_ctx)
{ |
04fa8187 |
if (lockmgr_cb) {
if ((*lockmgr_cb)(&codec_mutex, AV_LOCK_OBTAIN)) |
25fec859 |
return -1;
}
entangled_thread_counter++;
if (entangled_thread_counter != 1) {
av_log(log_ctx, AV_LOG_ERROR, "Insufficient thread locking around avcodec_open/close()\n"); |
274a50ab |
if (!lockmgr_cb)
av_log(log_ctx, AV_LOG_ERROR, "No lock manager is set, please see av_lockmgr_register()\n"); |
7885fa76 |
ff_avcodec_locked = 1;
ff_unlock_avcodec(); |
25fec859 |
return AVERROR(EINVAL);
}
av_assert0(!ff_avcodec_locked);
ff_avcodec_locked = 1;
return 0;
}
int ff_unlock_avcodec(void)
{
av_assert0(ff_avcodec_locked);
ff_avcodec_locked = 0;
entangled_thread_counter--; |
04fa8187 |
if (lockmgr_cb) {
if ((*lockmgr_cb)(&codec_mutex, AV_LOCK_RELEASE)) |
25fec859 |
return -1;
}
return 0;
}
|
2d1b6fb7 |
int avpriv_lock_avformat(void)
{ |
088f38a4 |
if (lockmgr_cb) {
if ((*lockmgr_cb)(&avformat_mutex, AV_LOCK_OBTAIN)) |
2d1b6fb7 |
return -1;
}
return 0;
}
int avpriv_unlock_avformat(void)
{ |
088f38a4 |
if (lockmgr_cb) {
if ((*lockmgr_cb)(&avformat_mutex, AV_LOCK_RELEASE)) |
2d1b6fb7 |
return -1; |
f988ce6c |
}
return 0;
} |
603a5f04 |
|
0842d589 |
unsigned int avpriv_toupper4(unsigned int x) |
603a5f04 |
{ |
efa7f420 |
return av_toupper(x & 0xFF) +
(av_toupper((x >> 8) & 0xFF) << 8) +
(av_toupper((x >> 16) & 0xFF) << 16) +
(av_toupper((x >> 24) & 0xFF) << 24); |
603a5f04 |
} |
b38f008e |
|
759001c5 |
int ff_thread_ref_frame(ThreadFrame *dst, ThreadFrame *src)
{
int ret;
dst->owner = src->owner;
ret = av_frame_ref(dst->f, src->f);
if (ret < 0)
return ret;
if (src->progress &&
!(dst->progress = av_buffer_ref(src->progress))) {
ff_thread_release_buffer(dst->owner, dst);
return AVERROR(ENOMEM);
}
return 0;
}
|
27237d52 |
#if !HAVE_THREADS |
b38f008e |
|
c10d498b |
enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
{
return avctx->get_format(avctx, fmt);
}
|
80e9e63c |
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags) |
b38f008e |
{
f->owner = avctx; |
80e9e63c |
return ff_get_buffer(avctx, f->f, flags); |
b38f008e |
}
|
80e9e63c |
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f) |
b38f008e |
{ |
80e9e63c |
av_frame_unref(f->f); |
b38f008e |
}
void ff_thread_finish_setup(AVCodecContext *avctx)
{
}
|
80e9e63c |
void ff_thread_report_progress(ThreadFrame *f, int progress, int field) |
b38f008e |
{
}
|
80e9e63c |
void ff_thread_await_progress(ThreadFrame *f, int progress, int field) |
b38f008e |
{
}
|
59a4b735 |
int ff_thread_can_start_frame(AVCodecContext *avctx)
{
return 1;
}
|
e146c326 |
int ff_alloc_entries(AVCodecContext *avctx, int count)
{
return 0;
}
void ff_reset_entries(AVCodecContext *avctx)
{
}
void ff_thread_await_progress2(AVCodecContext *avctx, int field, int thread, int shift)
{
}
void ff_thread_report_progress2(AVCodecContext *avctx, int field, int thread, int n)
{
}
|
b38f008e |
#endif |
65af48b5 |
|
36ef5369 |
enum AVMediaType avcodec_get_type(enum AVCodecID codec_id) |
bca06e77 |
{ |
913bc799 |
AVCodec *c= avcodec_find_decoder(codec_id);
if(!c)
c= avcodec_find_encoder(codec_id);
if(c)
return c->type;
|
36ef5369 |
if (codec_id <= AV_CODEC_ID_NONE) |
bca06e77 |
return AVMEDIA_TYPE_UNKNOWN; |
36ef5369 |
else if (codec_id < AV_CODEC_ID_FIRST_AUDIO) |
bca06e77 |
return AVMEDIA_TYPE_VIDEO; |
36ef5369 |
else if (codec_id < AV_CODEC_ID_FIRST_SUBTITLE) |
bca06e77 |
return AVMEDIA_TYPE_AUDIO; |
36ef5369 |
else if (codec_id < AV_CODEC_ID_FIRST_UNKNOWN) |
bca06e77 |
return AVMEDIA_TYPE_SUBTITLE;
return AVMEDIA_TYPE_UNKNOWN;
} |
af08d9ae |
int avcodec_is_open(AVCodecContext *s)
{
return !!s->internal;
} |
36e61e24 |
|
67286fa9 |
int avpriv_bprint_to_extradata(AVCodecContext *avctx, struct AVBPrint *buf) |
36e61e24 |
{
int ret;
char *str;
ret = av_bprint_finalize(buf, &str);
if (ret < 0)
return ret;
avctx->extradata = str;
/* Note: the string is NUL terminated (so extradata can be read as a
* string), but the ending character is not accounted in the size (in
* binary formats you are likely not supposed to mux that character). When
* extradata is copied, it is also padded with FF_INPUT_BUFFER_PADDING_SIZE
* zeros. */
avctx->extradata_size = buf->len;
return 0;
} |
2cdedcbc |
|
b19604cc |
const uint8_t *avpriv_find_start_code(const uint8_t *av_restrict p, |
f1e93986 |
const uint8_t *end, |
b19604cc |
uint32_t *av_restrict state) |
75644335 |
{
int i;
|
55db06af |
av_assert0(p <= end); |
75644335 |
if (p >= end)
return end;
for (i = 0; i < 3; i++) {
uint32_t tmp = *state << 8;
*state = tmp + *(p++);
if (tmp == 0x100 || p == end)
return p;
}
while (p < end) {
if (p[-1] > 1 ) p += 3;
else if (p[-2] ) p += 2;
else if (p[-3]|(p[-1]-1)) p++;
else {
p++;
break;
}
}
p = FFMIN(p, end) - 4;
*state = AV_RB32(p);
return p + 4;
} |