Compare commits
45 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
acf511de34 | ||
![]() |
fd2951bb53 | ||
![]() |
fa004f4854 | ||
![]() |
1155bdb754 | ||
![]() |
01838c5732 | ||
![]() |
f09f33031b | ||
![]() |
b2a9f64e1b | ||
![]() |
15ea618ef6 | ||
![]() |
ec33423273 | ||
![]() |
d5dd54df69 | ||
![]() |
e7a4c34e7c | ||
![]() |
2881bfbfd6 | ||
![]() |
80fb38153e | ||
![]() |
b79f337f8a | ||
![]() |
f593ac1c21 | ||
![]() |
baf92305a6 | ||
![]() |
d6d168e87b | ||
![]() |
50f9c4acc3 | ||
![]() |
211374e52a | ||
![]() |
1bf2461765 | ||
![]() |
64444cd578 | ||
![]() |
0047a31090 | ||
![]() |
d73ce6cb56 | ||
![]() |
9a6d3eee59 | ||
![]() |
8b221d60fa | ||
![]() |
9da9b36435 | ||
![]() |
09b33f9a82 | ||
![]() |
fa6b6dad3d | ||
![]() |
e0d88cfd18 | ||
![]() |
18043e3d22 | ||
![]() |
ccf470fdb6 | ||
![]() |
8f9bc6f2ce | ||
![]() |
fcab45f39b | ||
![]() |
bc44d06c3d | ||
![]() |
7740e36a89 | ||
![]() |
6127f792f9 | ||
![]() |
fd2cf9c45d | ||
![]() |
fc3dec8b62 | ||
![]() |
a7315116dd | ||
![]() |
37268dcc86 | ||
![]() |
ea28e74205 | ||
![]() |
c1c84f0a55 | ||
![]() |
56bf38859b | ||
![]() |
1cda4aa1e0 | ||
![]() |
9711b52739 |
43
MAINTAINERS
43
MAINTAINERS
@@ -46,7 +46,7 @@ Miscellaneous Areas
|
||||
documentation Mike Melanson
|
||||
website Robert Swain, Lou Logan
|
||||
build system (configure,Makefiles) Diego Biurrun, Mans Rullgard
|
||||
project server Árpád Gereöffy, Michael Niedermayer, Reimar Döffinger
|
||||
project server Árpád Gereöffy, Michael Niedermayer, Reimar Döffinger, Alexander Strasser
|
||||
mailinglists Michael Niedermayer, Baptiste Coudurier, Lou Logan
|
||||
presets Robert Swain
|
||||
metadata subsystem Aurelien Jacobs
|
||||
@@ -62,13 +62,20 @@ Internal Interfaces:
|
||||
libavutil/common.h Michael Niedermayer
|
||||
|
||||
Other:
|
||||
intfloat* Michael Niedermayer
|
||||
rational.c, rational.h Michael Niedermayer
|
||||
mathematics.c, mathematics.h Michael Niedermayer
|
||||
integer.c, integer.h Michael Niedermayer
|
||||
bprint Nicolas George
|
||||
bswap.h
|
||||
des Reimar Doeffinger
|
||||
float_dsp Loren Merritt
|
||||
hash Reimar Doeffinger
|
||||
intfloat* Michael Niedermayer
|
||||
integer.c, integer.h Michael Niedermayer
|
||||
lzo Reimar Doeffinger
|
||||
mathematics.c, mathematics.h Michael Niedermayer
|
||||
opencl.c, opencl.h Wei Gao
|
||||
rational.c, rational.h Michael Niedermayer
|
||||
rc4 Reimar Doeffinger
|
||||
ripemd.c, ripemd.h James Almer
|
||||
timecode Clément Bœsch
|
||||
|
||||
|
||||
libavcodec
|
||||
@@ -131,8 +138,8 @@ Codecs:
|
||||
binkaudio.c Peter Ross
|
||||
bmp.c Mans Rullgard, Kostya Shishkov
|
||||
cavs* Stefan Gehrer
|
||||
celp_filters.* Vitor Sessak
|
||||
cdxl.c Paul B Mahol
|
||||
celp_filters.* Vitor Sessak
|
||||
cinepak.c Roberto Togni
|
||||
cljr Alex Beregszaszi
|
||||
cllc.c Derek Buitenhuis
|
||||
@@ -143,8 +150,8 @@ Codecs:
|
||||
dca.c Kostya Shishkov, Benjamin Larsson
|
||||
dnxhd* Baptiste Coudurier
|
||||
dpcm.c Mike Melanson
|
||||
dxa.c Kostya Shishkov
|
||||
dv.c Roman Shaposhnik
|
||||
dxa.c Kostya Shishkov
|
||||
eacmv*, eaidct*, eat* Peter Ross
|
||||
ffv1.c Michael Niedermayer
|
||||
ffwavesynth.c Nicolas George
|
||||
@@ -154,9 +161,9 @@ Codecs:
|
||||
g722.c Martin Storsjo
|
||||
g726.c Roman Shaposhnik
|
||||
gifdec.c Baptiste Coudurier
|
||||
h264* Loren Merritt, Michael Niedermayer
|
||||
h261* Michael Niedermayer
|
||||
h263* Michael Niedermayer
|
||||
h264* Loren Merritt, Michael Niedermayer
|
||||
huffyuv.c Michael Niedermayer
|
||||
idcinvideo.c Mike Melanson
|
||||
imc* Benjamin Larsson
|
||||
@@ -171,8 +178,8 @@ Codecs:
|
||||
kmvc.c Kostya Shishkov
|
||||
lcl*.c Roberto Togni, Reimar Doeffinger
|
||||
libcelt_dec.c Nicolas George
|
||||
libgsm.c Michel Bardiaux
|
||||
libdirac* David Conrad
|
||||
libgsm.c Michel Bardiaux
|
||||
libopenjpeg.c Jaikrishnan Menon
|
||||
libopenjpegenc.c Michael Bradshaw
|
||||
libschroedinger* David Conrad
|
||||
@@ -180,8 +187,8 @@ Codecs:
|
||||
libtheoraenc.c David Conrad
|
||||
libutvideo* Derek Buitenhuis
|
||||
libvorbis.c David Conrad
|
||||
libxavs.c Stefan Gehrer
|
||||
libx264.c Mans Rullgard, Jason Garrett-Glaser
|
||||
libxavs.c Stefan Gehrer
|
||||
loco.c Kostya Shishkov
|
||||
lzo.h, lzo.c Reimar Doeffinger
|
||||
mdec.c Michael Niedermayer
|
||||
@@ -243,8 +250,8 @@ Codecs:
|
||||
vda_h264_dec.c Xidorn Quan
|
||||
vima.c Paul B Mahol
|
||||
vmnc.c Kostya Shishkov
|
||||
vorbis_enc.c Oded Shimon
|
||||
vorbis_dec.c Denes Balatoni, David Conrad
|
||||
vorbis_enc.c Oded Shimon
|
||||
vp3* Mike Melanson
|
||||
vp5 Aurelien Jacobs
|
||||
vp6 Aurelien Jacobs
|
||||
@@ -278,11 +285,11 @@ libavdevice
|
||||
libavdevice/avdevice.h
|
||||
|
||||
|
||||
dshow.c Roger Pack
|
||||
iec61883.c Georg Lippitsch
|
||||
libdc1394.c Roman Shaposhnik
|
||||
v4l2.c Luca Abeni
|
||||
vfwcap.c Ramiro Polla
|
||||
dshow.c Roger Pack
|
||||
|
||||
libavfilter
|
||||
===========
|
||||
@@ -292,11 +299,13 @@ Generic parts:
|
||||
|
||||
Filters:
|
||||
af_amerge.c Nicolas George
|
||||
af_aresample.c Michael Niedermayer
|
||||
af_astreamsync.c Nicolas George
|
||||
af_atempo.c Pavel Koshevoy
|
||||
af_pan.c Nicolas George
|
||||
vf_delogo.c Jean Delvare (CC <khali@linux-fr.org>)
|
||||
vf_drawbox.c/drawgrid Andrey Utkin
|
||||
vf_scale.c Michael Niedermayer
|
||||
vf_yadif.c Michael Niedermayer
|
||||
|
||||
Sources:
|
||||
@@ -316,7 +325,8 @@ Muxers/Demuxers:
|
||||
4xm.c Mike Melanson
|
||||
adtsenc.c Robert Swain
|
||||
afc.c Paul B Mahol
|
||||
aiff.c Baptiste Coudurier
|
||||
aiffdec.c Baptiste Coudurier, Matthieu Bouron
|
||||
aiffenc.c Baptiste Coudurier, Matthieu Bouron
|
||||
ape.c Kostya Shishkov
|
||||
ass* Aurelien Jacobs
|
||||
astdec.c Paul B Mahol
|
||||
@@ -344,8 +354,8 @@ Muxers/Demuxers:
|
||||
idcin.c Mike Melanson
|
||||
idroqdec.c Mike Melanson
|
||||
iff.c Jaikrishnan Menon
|
||||
ipmovie.c Mike Melanson
|
||||
img2*.c Michael Niedermayer
|
||||
ipmovie.c Mike Melanson
|
||||
ircam* Paul B Mahol
|
||||
iss.c Stefan Gehrer
|
||||
jacosub* Clément Bœsch
|
||||
@@ -359,11 +369,11 @@ Muxers/Demuxers:
|
||||
matroskadec.c Aurelien Jacobs
|
||||
matroskaenc.c David Conrad
|
||||
metadata* Aurelien Jacobs
|
||||
microdvd* Aurelien Jacobs
|
||||
mgsts.c Paul B Mahol
|
||||
microdvd* Aurelien Jacobs
|
||||
mm.c Peter Ross
|
||||
mov.c Michael Niedermayer, Baptiste Coudurier
|
||||
movenc.c Michael Niedermayer, Baptiste Coudurier
|
||||
movenc.c Baptiste Coudurier, Matthieu Bouron
|
||||
mpc.c Kostya Shishkov
|
||||
mpeg.c Michael Niedermayer
|
||||
mpegenc.c Michael Niedermayer
|
||||
@@ -458,7 +468,6 @@ Releases
|
||||
|
||||
2.0 Michael Niedermayer
|
||||
1.2 Michael Niedermayer
|
||||
1.1 Michael Niedermayer
|
||||
|
||||
If you want to maintain an older release, please contact us
|
||||
|
||||
|
@@ -31,7 +31,7 @@ PROJECT_NAME = FFmpeg
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2.0
|
||||
PROJECT_NUMBER = 2.0.1
|
||||
|
||||
# With the PROJECT_LOGO tag one can specify an logo or icon that is included
|
||||
# in the documentation. The maximum height of the logo should not exceed 55
|
||||
|
@@ -389,7 +389,7 @@ static int kempf_decode_tile(G2MContext *c, int tile_x, int tile_y,
|
||||
return 0;
|
||||
zsize = (src[0] << 8) | src[1]; src += 2;
|
||||
|
||||
if (src_end - src < zsize)
|
||||
if (src_end - src < zsize + (sub_type != 2))
|
||||
return AVERROR_INVALIDDATA;
|
||||
|
||||
ret = uncompress(c->kempf_buf, &dlen, src, zsize);
|
||||
@@ -411,6 +411,8 @@ static int kempf_decode_tile(G2MContext *c, int tile_x, int tile_y,
|
||||
for (i = 0; i < (FFALIGN(height, 16) >> 4); i++) {
|
||||
for (j = 0; j < (FFALIGN(width, 16) >> 4); j++) {
|
||||
if (!bits) {
|
||||
if (src >= src_end)
|
||||
return AVERROR_INVALIDDATA;
|
||||
bitbuf = *src++;
|
||||
bits = 8;
|
||||
}
|
||||
|
@@ -802,7 +802,7 @@ static void truncpasses(Jpeg2000EncoderContext *s, Jpeg2000Tile *tile)
|
||||
Jpeg2000Cblk *cblk = prec->cblk + cblkno;
|
||||
|
||||
cblk->ninclpasses = getcut(cblk, s->lambda,
|
||||
(int64_t)dwt_norms[codsty->transform == FF_DWT53][bandpos][lev] * (int64_t)band->i_stepsize >> 16);
|
||||
(int64_t)dwt_norms[codsty->transform == FF_DWT53][bandpos][lev] * (int64_t)band->i_stepsize >> 15);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -863,7 +863,7 @@ static int encode_tile(Jpeg2000EncoderContext *s, Jpeg2000Tile *tile, int tileno
|
||||
int *ptr = t1.data[y-yy0];
|
||||
for (x = xx0; x < xx1; x++){
|
||||
*ptr = (comp->i_data[(comp->coord[0][1] - comp->coord[0][0]) * y + x]);
|
||||
*ptr = (int64_t)*ptr * (int64_t)(16384 * 65536 / band->i_stepsize) >> 14 - NMSEDEC_FRACBITS;
|
||||
*ptr = (int64_t)*ptr * (int64_t)(16384 * 65536 / band->i_stepsize) >> 15 - NMSEDEC_FRACBITS;
|
||||
ptr++;
|
||||
}
|
||||
}
|
||||
@@ -1016,7 +1016,7 @@ static av_cold int j2kenc_init(AVCodecContext *avctx)
|
||||
}
|
||||
|
||||
ff_jpeg2000_init_tier1_luts();
|
||||
|
||||
ff_mqc_init_context_tables();
|
||||
init_luts();
|
||||
|
||||
init_quantization(s);
|
||||
|
@@ -320,7 +320,7 @@ int ff_jpeg2000_init_component(Jpeg2000Component *comp,
|
||||
if (!av_codec_is_encoder(avctx->codec))
|
||||
band->f_stepsize *= 0.5;
|
||||
|
||||
band->i_stepsize = band->f_stepsize * (1 << 16);
|
||||
band->i_stepsize = band->f_stepsize * (1 << 15);
|
||||
|
||||
/* computation of tbx_0, tbx_1, tby_0, tby_1
|
||||
* see ISO/IEC 15444-1:2002 B.5 eq. B-15 and tbl B.1
|
||||
|
@@ -28,6 +28,7 @@
|
||||
#include "libavutil/avassert.h"
|
||||
#include "libavutil/common.h"
|
||||
#include "libavutil/opt.h"
|
||||
#include "libavutil/pixdesc.h"
|
||||
#include "avcodec.h"
|
||||
#include "bytestream.h"
|
||||
#include "internal.h"
|
||||
@@ -37,6 +38,7 @@
|
||||
#define JP2_SIG_TYPE 0x6A502020
|
||||
#define JP2_SIG_VALUE 0x0D0A870A
|
||||
#define JP2_CODESTREAM 0x6A703263
|
||||
#define JP2_HEADER 0x6A703268
|
||||
|
||||
#define HAD_COC 0x01
|
||||
#define HAD_QCC 0x02
|
||||
@@ -72,6 +74,10 @@ typedef struct Jpeg2000DecoderContext {
|
||||
int cdx[4], cdy[4];
|
||||
int precision;
|
||||
int ncomponents;
|
||||
int colour_space;
|
||||
uint32_t palette[256];
|
||||
int8_t pal8;
|
||||
int cdef[4];
|
||||
int tile_width, tile_height;
|
||||
unsigned numXtiles, numYtiles;
|
||||
int maxtilelen;
|
||||
@@ -154,12 +160,74 @@ static int tag_tree_decode(Jpeg2000DecoderContext *s, Jpeg2000TgtNode *node,
|
||||
return curval;
|
||||
}
|
||||
|
||||
static int pix_fmt_match(enum AVPixelFormat pix_fmt, int components,
|
||||
int bpc, uint32_t log2_chroma_wh, int pal8)
|
||||
{
|
||||
int match = 1;
|
||||
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
|
||||
|
||||
if (desc->nb_components != components) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
switch (components) {
|
||||
case 4:
|
||||
match = match && desc->comp[3].depth_minus1 + 1 >= bpc &&
|
||||
(log2_chroma_wh >> 14 & 3) == 0 &&
|
||||
(log2_chroma_wh >> 12 & 3) == 0;
|
||||
case 3:
|
||||
match = match && desc->comp[2].depth_minus1 + 1 >= bpc &&
|
||||
(log2_chroma_wh >> 10 & 3) == desc->log2_chroma_w &&
|
||||
(log2_chroma_wh >> 8 & 3) == desc->log2_chroma_h;
|
||||
case 2:
|
||||
match = match && desc->comp[1].depth_minus1 + 1 >= bpc &&
|
||||
(log2_chroma_wh >> 6 & 3) == desc->log2_chroma_w &&
|
||||
(log2_chroma_wh >> 4 & 3) == desc->log2_chroma_h;
|
||||
|
||||
case 1:
|
||||
match = match && desc->comp[0].depth_minus1 + 1 >= bpc &&
|
||||
(log2_chroma_wh >> 2 & 3) == 0 &&
|
||||
(log2_chroma_wh & 3) == 0 &&
|
||||
(desc->flags & AV_PIX_FMT_FLAG_PAL) == pal8 * AV_PIX_FMT_FLAG_PAL;
|
||||
}
|
||||
return match;
|
||||
}
|
||||
|
||||
// pix_fmts with lower bpp have to be listed before
|
||||
// similar pix_fmts with higher bpp.
|
||||
#define RGB_PIXEL_FORMATS AV_PIX_FMT_PAL8,AV_PIX_FMT_RGB24,AV_PIX_FMT_RGBA,AV_PIX_FMT_RGB48,AV_PIX_FMT_RGBA64
|
||||
#define GRAY_PIXEL_FORMATS AV_PIX_FMT_GRAY8,AV_PIX_FMT_GRAY8A,AV_PIX_FMT_GRAY16
|
||||
#define YUV_PIXEL_FORMATS AV_PIX_FMT_YUV410P,AV_PIX_FMT_YUV411P,AV_PIX_FMT_YUVA420P, \
|
||||
AV_PIX_FMT_YUV420P,AV_PIX_FMT_YUV422P,AV_PIX_FMT_YUVA422P, \
|
||||
AV_PIX_FMT_YUV440P,AV_PIX_FMT_YUV444P,AV_PIX_FMT_YUVA444P, \
|
||||
AV_PIX_FMT_YUV420P9,AV_PIX_FMT_YUV422P9,AV_PIX_FMT_YUV444P9, \
|
||||
AV_PIX_FMT_YUVA420P9,AV_PIX_FMT_YUVA422P9,AV_PIX_FMT_YUVA444P9, \
|
||||
AV_PIX_FMT_YUV420P10,AV_PIX_FMT_YUV422P10,AV_PIX_FMT_YUV444P10, \
|
||||
AV_PIX_FMT_YUVA420P10,AV_PIX_FMT_YUVA422P10,AV_PIX_FMT_YUVA444P10, \
|
||||
AV_PIX_FMT_YUV420P12,AV_PIX_FMT_YUV422P12,AV_PIX_FMT_YUV444P12, \
|
||||
AV_PIX_FMT_YUV420P14,AV_PIX_FMT_YUV422P14,AV_PIX_FMT_YUV444P14, \
|
||||
AV_PIX_FMT_YUV420P16,AV_PIX_FMT_YUV422P16,AV_PIX_FMT_YUV444P16, \
|
||||
AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P16,AV_PIX_FMT_YUVA444P16
|
||||
#define XYZ_PIXEL_FORMATS AV_PIX_FMT_XYZ12
|
||||
|
||||
static const enum AVPixelFormat rgb_pix_fmts[] = {RGB_PIXEL_FORMATS};
|
||||
static const enum AVPixelFormat gray_pix_fmts[] = {GRAY_PIXEL_FORMATS};
|
||||
static const enum AVPixelFormat yuv_pix_fmts[] = {YUV_PIXEL_FORMATS};
|
||||
static const enum AVPixelFormat xyz_pix_fmts[] = {XYZ_PIXEL_FORMATS};
|
||||
static const enum AVPixelFormat all_pix_fmts[] = {RGB_PIXEL_FORMATS,
|
||||
GRAY_PIXEL_FORMATS,
|
||||
YUV_PIXEL_FORMATS,
|
||||
XYZ_PIXEL_FORMATS};
|
||||
|
||||
/* marker segments */
|
||||
/* get sizes and offsets of image, tiles; number of components */
|
||||
static int get_siz(Jpeg2000DecoderContext *s)
|
||||
{
|
||||
int i;
|
||||
int ncomponents;
|
||||
uint32_t log2_chroma_wh = 0;
|
||||
const enum AVPixelFormat *possible_fmts = NULL;
|
||||
int possible_fmts_nb = 0;
|
||||
|
||||
if (bytestream2_get_bytes_left(&s->g) < 36)
|
||||
return AVERROR_INVALIDDATA;
|
||||
@@ -205,13 +273,7 @@ static int get_siz(Jpeg2000DecoderContext *s)
|
||||
s->sgnd[i] = !!(x & 0x80);
|
||||
s->cdx[i] = bytestream2_get_byteu(&s->g);
|
||||
s->cdy[i] = bytestream2_get_byteu(&s->g);
|
||||
if (s->cdx[i] != 1 || s->cdy[i] != 1) {
|
||||
avpriv_request_sample(s->avctx,
|
||||
"CDxy values %d %d for component %d",
|
||||
s->cdx[i], s->cdy[i], i);
|
||||
if (!s->cdx[i] || !s->cdy[i])
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
log2_chroma_wh |= s->cdy[i] >> 1 << i * 4 | s->cdx[i] >> 1 << i * 4 + 2;
|
||||
}
|
||||
|
||||
s->numXtiles = ff_jpeg2000_ceildiv(s->width - s->tile_offset_x, s->tile_width);
|
||||
@@ -242,35 +304,46 @@ static int get_siz(Jpeg2000DecoderContext *s)
|
||||
s->avctx->height = ff_jpeg2000_ceildivpow2(s->height - s->image_offset_y,
|
||||
s->reduction_factor);
|
||||
|
||||
switch (s->ncomponents) {
|
||||
case 1:
|
||||
if (s->precision > 8)
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_GRAY16;
|
||||
else
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_GRAY8;
|
||||
break;
|
||||
case 3:
|
||||
switch (s->avctx->profile) {
|
||||
case FF_PROFILE_JPEG2000_DCINEMA_2K:
|
||||
case FF_PROFILE_JPEG2000_DCINEMA_4K:
|
||||
/* XYZ color-space for digital cinema profiles */
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_XYZ12;
|
||||
if (s->avctx->profile == FF_PROFILE_JPEG2000_DCINEMA_2K ||
|
||||
s->avctx->profile == FF_PROFILE_JPEG2000_DCINEMA_4K) {
|
||||
possible_fmts = xyz_pix_fmts;
|
||||
possible_fmts_nb = FF_ARRAY_ELEMS(xyz_pix_fmts);
|
||||
} else {
|
||||
switch (s->colour_space) {
|
||||
case 16:
|
||||
possible_fmts = rgb_pix_fmts;
|
||||
possible_fmts_nb = FF_ARRAY_ELEMS(rgb_pix_fmts);
|
||||
break;
|
||||
case 17:
|
||||
possible_fmts = gray_pix_fmts;
|
||||
possible_fmts_nb = FF_ARRAY_ELEMS(gray_pix_fmts);
|
||||
break;
|
||||
case 18:
|
||||
possible_fmts = yuv_pix_fmts;
|
||||
possible_fmts_nb = FF_ARRAY_ELEMS(yuv_pix_fmts);
|
||||
break;
|
||||
default:
|
||||
if (s->precision > 8)
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_RGB48;
|
||||
else
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_RGB24;
|
||||
possible_fmts = all_pix_fmts;
|
||||
possible_fmts_nb = FF_ARRAY_ELEMS(all_pix_fmts);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_RGBA;
|
||||
break;
|
||||
default:
|
||||
/* pixel format can not be identified */
|
||||
s->avctx->pix_fmt = AV_PIX_FMT_NONE;
|
||||
break;
|
||||
}
|
||||
for (i = 0; i < possible_fmts_nb; ++i) {
|
||||
if (pix_fmt_match(possible_fmts[i], ncomponents, s->precision, log2_chroma_wh, s->pal8)) {
|
||||
s->avctx->pix_fmt = possible_fmts[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
|
||||
av_log(s->avctx, AV_LOG_ERROR,
|
||||
"Unknown pix_fmt, profile: %d, colour_space: %d, "
|
||||
"components: %d, precision: %d, "
|
||||
"cdx[1]: %d, cdy[1]: %d, cdx[2]: %d, cdy[2]: %d\n",
|
||||
s->avctx->profile, s->colour_space, ncomponents, s->precision,
|
||||
ncomponents > 2 ? s->cdx[1] : 0,
|
||||
ncomponents > 2 ? s->cdy[1] : 0,
|
||||
ncomponents > 2 ? s->cdx[2] : 0,
|
||||
ncomponents > 2 ? s->cdy[2] : 0);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@@ -1024,7 +1097,7 @@ static void dequantization_int(int x, int y, Jpeg2000Cblk *cblk,
|
||||
int32_t *datap = &comp->i_data[(comp->coord[0][1] - comp->coord[0][0]) * (y + j) + x];
|
||||
int *src = t1->data[j];
|
||||
for (i = 0; i < w; ++i)
|
||||
datap[i] = (src[i] * band->i_stepsize + (1 << 15)) >> 16;
|
||||
datap[i] = (src[i] * band->i_stepsize + (1 << 14)) >> 15;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1157,6 +1230,13 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
if (tile->codsty[0].mct)
|
||||
mct_decode(s, tile);
|
||||
|
||||
if (s->cdef[0] < 0) {
|
||||
for (x = 0; x < s->ncomponents; x++)
|
||||
s->cdef[x] = x + 1;
|
||||
if ((s->ncomponents & 1) == 0)
|
||||
s->cdef[s->ncomponents-1] = 0;
|
||||
}
|
||||
|
||||
if (s->precision <= 8) {
|
||||
for (compno = 0; compno < s->ncomponents; compno++) {
|
||||
Jpeg2000Component *comp = tile->comp + compno;
|
||||
@@ -1165,14 +1245,21 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
int32_t *i_datap = comp->i_data;
|
||||
int cbps = s->cbps[compno];
|
||||
int w = tile->comp[compno].coord[0][1] - s->image_offset_x;
|
||||
int planar = !!picture->data[2];
|
||||
int pixelsize = planar ? 1 : s->ncomponents;
|
||||
int plane = 0;
|
||||
|
||||
if (planar)
|
||||
plane = s->cdef[compno] ? s->cdef[compno]-1 : (s->ncomponents-1);
|
||||
|
||||
|
||||
y = tile->comp[compno].coord[1][0] - s->image_offset_y;
|
||||
line = picture->data[0] + y * picture->linesize[0];
|
||||
line = picture->data[plane] + y * picture->linesize[plane];
|
||||
for (; y < tile->comp[compno].coord[1][1] - s->image_offset_y; y += s->cdy[compno]) {
|
||||
uint8_t *dst;
|
||||
|
||||
x = tile->comp[compno].coord[0][0] - s->image_offset_x;
|
||||
dst = line + x * s->ncomponents + compno;
|
||||
dst = line + x * pixelsize + compno*!planar;
|
||||
|
||||
if (codsty->transform == FF_DWT97) {
|
||||
for (; x < w; x += s->cdx[compno]) {
|
||||
@@ -1181,7 +1268,7 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
val = av_clip(val, 0, (1 << cbps) - 1);
|
||||
*dst = val << (8 - cbps);
|
||||
datap++;
|
||||
dst += s->ncomponents;
|
||||
dst += pixelsize;
|
||||
}
|
||||
} else {
|
||||
for (; x < w; x += s->cdx[compno]) {
|
||||
@@ -1190,10 +1277,10 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
val = av_clip(val, 0, (1 << cbps) - 1);
|
||||
*dst = val << (8 - cbps);
|
||||
i_datap++;
|
||||
dst += s->ncomponents;
|
||||
dst += pixelsize;
|
||||
}
|
||||
}
|
||||
line += picture->linesize[0];
|
||||
line += picture->linesize[plane];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -1205,14 +1292,20 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
uint16_t *linel;
|
||||
int cbps = s->cbps[compno];
|
||||
int w = tile->comp[compno].coord[0][1] - s->image_offset_x;
|
||||
int planar = !!picture->data[2];
|
||||
int pixelsize = planar ? 1 : s->ncomponents;
|
||||
int plane = 0;
|
||||
|
||||
if (planar)
|
||||
plane = s->cdef[compno] ? s->cdef[compno]-1 : (s->ncomponents-1);
|
||||
|
||||
y = tile->comp[compno].coord[1][0] - s->image_offset_y;
|
||||
linel = (uint16_t *)picture->data[0] + y * (picture->linesize[0] >> 1);
|
||||
linel = (uint16_t *)picture->data[plane] + y * (picture->linesize[plane] >> 1);
|
||||
for (; y < tile->comp[compno].coord[1][1] - s->image_offset_y; y += s->cdy[compno]) {
|
||||
uint16_t *dst;
|
||||
|
||||
x = tile->comp[compno].coord[0][0] - s->image_offset_x;
|
||||
dst = linel + (x * s->ncomponents + compno);
|
||||
dst = linel + (x * pixelsize + compno*!planar);
|
||||
if (codsty->transform == FF_DWT97) {
|
||||
for (; x < w; x += s-> cdx[compno]) {
|
||||
int val = lrintf(*datap) + (1 << (cbps - 1));
|
||||
@@ -1221,7 +1314,7 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
/* align 12 bit values in little-endian mode */
|
||||
*dst = val << (16 - cbps);
|
||||
datap++;
|
||||
dst += s->ncomponents;
|
||||
dst += pixelsize;
|
||||
}
|
||||
} else {
|
||||
for (; x < w; x += s-> cdx[compno]) {
|
||||
@@ -1231,10 +1324,10 @@ static int jpeg2000_decode_tile(Jpeg2000DecoderContext *s, Jpeg2000Tile *tile,
|
||||
/* align 12 bit values in little-endian mode */
|
||||
*dst = val << (16 - cbps);
|
||||
i_datap++;
|
||||
dst += s->ncomponents;
|
||||
dst += pixelsize;
|
||||
}
|
||||
}
|
||||
linel += picture->linesize[0] >> 1;
|
||||
linel += picture->linesize[plane] >> 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1386,6 +1479,104 @@ static int jp2_find_codestream(Jpeg2000DecoderContext *s)
|
||||
atom = bytestream2_get_be32u(&s->g);
|
||||
if (atom == JP2_CODESTREAM) {
|
||||
found_codestream = 1;
|
||||
} else if (atom == JP2_HEADER &&
|
||||
bytestream2_get_bytes_left(&s->g) >= atom_size &&
|
||||
atom_size >= 16) {
|
||||
uint32_t atom2_size, atom2;
|
||||
atom_size -= 8;
|
||||
do {
|
||||
atom2_size = bytestream2_get_be32u(&s->g);
|
||||
atom2 = bytestream2_get_be32u(&s->g);
|
||||
atom_size -= 8;
|
||||
if (atom2_size < 8 || atom2_size - 8 > atom_size)
|
||||
break;
|
||||
atom2_size -= 8;
|
||||
if (atom2 == JP2_CODESTREAM) {
|
||||
return 1;
|
||||
} else if (atom2 == MKBETAG('c','o','l','r') && atom2_size >= 7) {
|
||||
int method = bytestream2_get_byteu(&s->g);
|
||||
bytestream2_skipu(&s->g, 2);
|
||||
atom_size -= 3;
|
||||
atom2_size -= 3;
|
||||
if (method == 1) {
|
||||
s->colour_space = bytestream2_get_be32u(&s->g);
|
||||
atom_size -= 4;
|
||||
atom2_size -= 4;
|
||||
}
|
||||
bytestream2_skipu(&s->g, atom2_size);
|
||||
atom_size -= atom2_size;
|
||||
} else if (atom2 == MKBETAG('p','c','l','r') && atom2_size >= 6) {
|
||||
int i, size, colour_count, colour_channels, colour_depth[3];
|
||||
uint32_t r, g, b;
|
||||
colour_count = bytestream2_get_be16u(&s->g);
|
||||
colour_channels = bytestream2_get_byteu(&s->g);
|
||||
// FIXME: Do not ignore channel_sign
|
||||
colour_depth[0] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
|
||||
colour_depth[1] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
|
||||
colour_depth[2] = (bytestream2_get_byteu(&s->g) & 0x7f) + 1;
|
||||
atom_size -= 6;
|
||||
atom2_size -= 6;
|
||||
size = (colour_depth[0] + 7 >> 3) * colour_count +
|
||||
(colour_depth[1] + 7 >> 3) * colour_count +
|
||||
(colour_depth[2] + 7 >> 3) * colour_count;
|
||||
if (colour_count > 256 ||
|
||||
colour_channels != 3 ||
|
||||
colour_depth[0] > 16 ||
|
||||
colour_depth[1] > 16 ||
|
||||
colour_depth[2] > 16 ||
|
||||
atom2_size < size) {
|
||||
avpriv_request_sample(s->avctx, "Unknown palette");
|
||||
bytestream2_skipu(&s->g, atom2_size);
|
||||
atom_size -= atom2_size;
|
||||
continue;
|
||||
}
|
||||
s->pal8 = 1;
|
||||
for (i = 0; i < colour_count; i++) {
|
||||
if (colour_depth[0] <= 8) {
|
||||
r = bytestream2_get_byteu(&s->g) << 8 - colour_depth[0];
|
||||
r |= r >> colour_depth[0];
|
||||
} else {
|
||||
r = bytestream2_get_be16u(&s->g) >> colour_depth[0] - 8;
|
||||
}
|
||||
if (colour_depth[1] <= 8) {
|
||||
g = bytestream2_get_byteu(&s->g) << 8 - colour_depth[1];
|
||||
r |= r >> colour_depth[1];
|
||||
} else {
|
||||
g = bytestream2_get_be16u(&s->g) >> colour_depth[1] - 8;
|
||||
}
|
||||
if (colour_depth[2] <= 8) {
|
||||
b = bytestream2_get_byteu(&s->g) << 8 - colour_depth[2];
|
||||
r |= r >> colour_depth[2];
|
||||
} else {
|
||||
b = bytestream2_get_be16u(&s->g) >> colour_depth[2] - 8;
|
||||
}
|
||||
s->palette[i] = 0xffu << 24 | r << 16 | g << 8 | b;
|
||||
}
|
||||
atom_size -= size;
|
||||
atom2_size -= size;
|
||||
bytestream2_skipu(&s->g, atom2_size);
|
||||
atom_size -= atom2_size;
|
||||
} else if (atom2 == MKBETAG('c','d','e','f') && atom2_size >= 2 &&
|
||||
bytestream2_get_bytes_left(&s->g) >= atom2_size) {
|
||||
int n = bytestream2_get_be16u(&s->g);
|
||||
atom_size -= 2;
|
||||
atom2_size -= 2;
|
||||
for (; n>0; n--) {
|
||||
int cn = bytestream2_get_be16(&s->g);
|
||||
int av_unused typ = bytestream2_get_be16(&s->g);
|
||||
int asoc = bytestream2_get_be16(&s->g);
|
||||
if (cn < 4 || asoc < 4)
|
||||
s->cdef[cn] = asoc;
|
||||
atom_size -= 6;
|
||||
atom2_size -= 6;
|
||||
}
|
||||
bytestream2_skipu(&s->g, atom2_size);
|
||||
} else {
|
||||
bytestream2_skipu(&s->g, atom2_size);
|
||||
atom_size -= atom2_size;
|
||||
}
|
||||
} while (atom_size >= 8);
|
||||
bytestream2_skipu(&s->g, atom_size);
|
||||
} else {
|
||||
if (bytestream2_get_bytes_left(&s->g) < atom_size - 8)
|
||||
return 0;
|
||||
@@ -1410,6 +1601,7 @@ static int jpeg2000_decode_frame(AVCodecContext *avctx, void *data,
|
||||
s->avctx = avctx;
|
||||
bytestream2_init(&s->g, avpkt->data, avpkt->size);
|
||||
s->curtileno = -1;
|
||||
memset(s->cdef, -1, sizeof(s->cdef));
|
||||
|
||||
if (bytestream2_get_bytes_left(&s->g) < 2) {
|
||||
ret = AVERROR_INVALIDDATA;
|
||||
@@ -1456,6 +1648,9 @@ static int jpeg2000_decode_frame(AVCodecContext *avctx, void *data,
|
||||
|
||||
*got_frame = 1;
|
||||
|
||||
if (s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
|
||||
memcpy(picture->data[1], s->palette, 256 * sizeof(uint32_t));
|
||||
|
||||
return bytestream2_tell(&s->g);
|
||||
|
||||
end:
|
||||
@@ -1466,6 +1661,7 @@ end:
|
||||
static void jpeg2000_init_static_data(AVCodec *codec)
|
||||
{
|
||||
ff_jpeg2000_init_tier1_luts();
|
||||
ff_mqc_init_context_tables();
|
||||
}
|
||||
|
||||
#define OFFSET(x) offsetof(Jpeg2000DecoderContext, x)
|
||||
|
@@ -107,7 +107,7 @@ static int kmvc_decode_intra_8x8(KmvcContext * ctx, int w, int h)
|
||||
val = bytestream2_get_byte(&ctx->g);
|
||||
mx = val & 0xF;
|
||||
my = val >> 4;
|
||||
if ((l0x-mx) + 320*(l0y-my) < 0 || (l0x-mx) + 320*(l0y-my) > 316*196) {
|
||||
if ((l0x-mx) + 320*(l0y-my) < 0 || (l0x-mx) + 320*(l0y-my) > 320*197 - 4) {
|
||||
av_log(ctx->avctx, AV_LOG_ERROR, "Invalid MV\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
@@ -132,7 +132,7 @@ static int kmvc_decode_intra_8x8(KmvcContext * ctx, int w, int h)
|
||||
val = bytestream2_get_byte(&ctx->g);
|
||||
mx = val & 0xF;
|
||||
my = val >> 4;
|
||||
if ((l1x-mx) + 320*(l1y-my) < 0 || (l1x-mx) + 320*(l1y-my) > 318*198) {
|
||||
if ((l1x-mx) + 320*(l1y-my) < 0 || (l1x-mx) + 320*(l1y-my) > 320*199 - 2) {
|
||||
av_log(ctx->avctx, AV_LOG_ERROR, "Invalid MV\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
@@ -207,7 +207,7 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, int w, int h)
|
||||
val = bytestream2_get_byte(&ctx->g);
|
||||
mx = (val & 0xF) - 8;
|
||||
my = (val >> 4) - 8;
|
||||
if ((l0x+mx) + 320*(l0y+my) < 0 || (l0x+mx) + 320*(l0y+my) > 318*198) {
|
||||
if ((l0x+mx) + 320*(l0y+my) < 0 || (l0x+mx) + 320*(l0y+my) > 320*197 - 4) {
|
||||
av_log(ctx->avctx, AV_LOG_ERROR, "Invalid MV\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
@@ -232,7 +232,7 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, int w, int h)
|
||||
val = bytestream2_get_byte(&ctx->g);
|
||||
mx = (val & 0xF) - 8;
|
||||
my = (val >> 4) - 8;
|
||||
if ((l1x+mx) + 320*(l1y+my) < 0 || (l1x+mx) + 320*(l1y+my) > 318*198) {
|
||||
if ((l1x+mx) + 320*(l1y+my) < 0 || (l1x+mx) + 320*(l1y+my) > 320*199 - 2) {
|
||||
av_log(ctx->avctx, AV_LOG_ERROR, "Invalid MV\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
|
@@ -1088,7 +1088,7 @@ static int mjpeg_decode_scan(MJpegDecodeContext *s, int nb_components, int Ah,
|
||||
s->dsp.clear_block(s->block);
|
||||
if (decode_block(s, s->block, i,
|
||||
s->dc_index[i], s->ac_index[i],
|
||||
s->quant_matrixes[s->quant_index[c]]) < 0) {
|
||||
s->quant_matrixes[s->quant_sindex[i]]) < 0) {
|
||||
av_log(s->avctx, AV_LOG_ERROR,
|
||||
"error y=%d x=%d\n", mb_y, mb_x);
|
||||
return AVERROR_INVALIDDATA;
|
||||
@@ -1101,9 +1101,9 @@ static int mjpeg_decode_scan(MJpegDecodeContext *s, int nb_components, int Ah,
|
||||
int16_t *block = s->blocks[c][block_idx];
|
||||
if (Ah)
|
||||
block[0] += get_bits1(&s->gb) *
|
||||
s->quant_matrixes[s->quant_index[c]][0] << Al;
|
||||
s->quant_matrixes[s->quant_sindex[i]][0] << Al;
|
||||
else if (decode_dc_progressive(s, block, i, s->dc_index[i],
|
||||
s->quant_matrixes[s->quant_index[c]],
|
||||
s->quant_matrixes[s->quant_sindex[i]],
|
||||
Al) < 0) {
|
||||
av_log(s->avctx, AV_LOG_ERROR,
|
||||
"error y=%d x=%d\n", mb_y, mb_x);
|
||||
@@ -1136,7 +1136,7 @@ static int mjpeg_decode_scan_progressive_ac(MJpegDecodeContext *s, int ss,
|
||||
uint8_t *data = s->picture.data[c];
|
||||
int linesize = s->linesize[c];
|
||||
int last_scan = 0;
|
||||
int16_t *quant_matrix = s->quant_matrixes[s->quant_index[c]];
|
||||
int16_t *quant_matrix = s->quant_matrixes[s->quant_sindex[0]];
|
||||
|
||||
av_assert0(ss>=0 && Ah>=0 && Al>=0);
|
||||
if (se < ss || se > 63) {
|
||||
@@ -1231,6 +1231,11 @@ int ff_mjpeg_decode_sos(MJpegDecodeContext *s, const uint8_t *mb_bitmask,
|
||||
&& nb_components == 3 && s->nb_components == 3 && i)
|
||||
index = 3 - i;
|
||||
|
||||
s->quant_sindex[i] = s->quant_index[index];
|
||||
s->nb_blocks[i] = s->h_count[index] * s->v_count[index];
|
||||
s->h_scount[i] = s->h_count[index];
|
||||
s->v_scount[i] = s->v_count[index];
|
||||
|
||||
if(nb_components == 3 && s->nb_components == 3 && s->avctx->pix_fmt == AV_PIX_FMT_GBR24P)
|
||||
index = (i+2)%3;
|
||||
if(nb_components == 1 && s->nb_components == 3 && s->avctx->pix_fmt == AV_PIX_FMT_GBR24P)
|
||||
@@ -1238,10 +1243,6 @@ int ff_mjpeg_decode_sos(MJpegDecodeContext *s, const uint8_t *mb_bitmask,
|
||||
|
||||
s->comp_index[i] = index;
|
||||
|
||||
s->nb_blocks[i] = s->h_count[index] * s->v_count[index];
|
||||
s->h_scount[i] = s->h_count[index];
|
||||
s->v_scount[i] = s->v_count[index];
|
||||
|
||||
s->dc_index[i] = get_bits(&s->gb, 4);
|
||||
s->ac_index[i] = get_bits(&s->gb, 4);
|
||||
|
||||
|
@@ -84,6 +84,7 @@ typedef struct MJpegDecodeContext {
|
||||
int nb_blocks[MAX_COMPONENTS];
|
||||
int h_scount[MAX_COMPONENTS];
|
||||
int v_scount[MAX_COMPONENTS];
|
||||
int quant_sindex[MAX_COMPONENTS];
|
||||
int h_max, v_max; /* maximum h and v counts */
|
||||
int quant_index[4]; /* quant table index for each component */
|
||||
int last_dc[MAX_COMPONENTS]; /* last DEQUANTIZED dc (XXX: am I right to do that ?) */
|
||||
|
@@ -2130,7 +2130,8 @@ static int decode_chunks(AVCodecContext *avctx,
|
||||
buf_ptr = avpriv_find_start_code(buf_ptr, buf_end, &start_code);
|
||||
if (start_code > 0x1ff) {
|
||||
if (!skip_frame) {
|
||||
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE)) {
|
||||
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
|
||||
!avctx->hwaccel) {
|
||||
int i;
|
||||
av_assert0(avctx->thread_count > 1);
|
||||
|
||||
@@ -2194,7 +2195,8 @@ static int decode_chunks(AVCodecContext *avctx,
|
||||
s2->intra_dc_precision= 3;
|
||||
s2->intra_matrix[0]= 1;
|
||||
}
|
||||
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) && s->slice_count) {
|
||||
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
|
||||
!avctx->hwaccel && s->slice_count) {
|
||||
int i;
|
||||
|
||||
avctx->execute(avctx, slice_decode_thread,
|
||||
@@ -2369,7 +2371,8 @@ static int decode_chunks(AVCodecContext *avctx,
|
||||
break;
|
||||
}
|
||||
|
||||
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE)) {
|
||||
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
|
||||
!avctx->hwaccel) {
|
||||
int threshold = (s2->mb_height * s->slice_count +
|
||||
s2->slice_context_count / 2) /
|
||||
s2->slice_context_count;
|
||||
|
@@ -92,14 +92,9 @@ uint16_t ff_mqc_qe [2 * 47];
|
||||
uint8_t ff_mqc_nlps[2 * 47];
|
||||
uint8_t ff_mqc_nmps[2 * 47];
|
||||
|
||||
void ff_mqc_init_contexts(MqcState *mqc)
|
||||
void ff_mqc_init_context_tables(void)
|
||||
{
|
||||
int i;
|
||||
memset(mqc->cx_states, 0, sizeof(mqc->cx_states));
|
||||
mqc->cx_states[MQC_CX_UNI] = 2 * 46;
|
||||
mqc->cx_states[MQC_CX_RL] = 2 * 3;
|
||||
mqc->cx_states[0] = 2 * 4;
|
||||
|
||||
for (i = 0; i < 47; i++) {
|
||||
ff_mqc_qe[2 * i] =
|
||||
ff_mqc_qe[2 * i + 1] = cx_states[i].qe;
|
||||
@@ -110,3 +105,11 @@ void ff_mqc_init_contexts(MqcState *mqc)
|
||||
ff_mqc_nmps[2 * i + 1] = 2 * cx_states[i].nmps + 1;
|
||||
}
|
||||
}
|
||||
|
||||
void ff_mqc_init_contexts(MqcState *mqc)
|
||||
{
|
||||
memset(mqc->cx_states, 0, sizeof(mqc->cx_states));
|
||||
mqc->cx_states[MQC_CX_UNI] = 2 * 46;
|
||||
mqc->cx_states[MQC_CX_RL] = 2 * 3;
|
||||
mqc->cx_states[0] = 2 * 4;
|
||||
}
|
||||
|
@@ -78,6 +78,11 @@ int ff_mqc_decode(MqcState *mqc, uint8_t *cxstate);
|
||||
|
||||
/* common */
|
||||
|
||||
/**
|
||||
* MQ-coder Initialize context tables (QE, NLPS, NMPS)
|
||||
*/
|
||||
void ff_mqc_init_context_tables(void);
|
||||
|
||||
/**
|
||||
* MQ-coder context initialisations.
|
||||
* @param mqc MQ-coder context
|
||||
|
@@ -163,6 +163,8 @@ int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s)
|
||||
if (s->maxval >= 256) {
|
||||
if (avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
|
||||
avctx->pix_fmt = AV_PIX_FMT_GRAY16BE;
|
||||
if (s->maxval != 65535)
|
||||
avctx->pix_fmt = AV_PIX_FMT_GRAY16;
|
||||
} else if (avctx->pix_fmt == AV_PIX_FMT_RGB24) {
|
||||
avctx->pix_fmt = AV_PIX_FMT_RGB48BE;
|
||||
} else if (avctx->pix_fmt == AV_PIX_FMT_YUV420P && s->maxval < 65536) {
|
||||
|
@@ -819,7 +819,8 @@ static int synthfilt_build_sb_samples(QDM2Context *q, GetBitContext *gb,
|
||||
int type34_first;
|
||||
float type34_div = 0;
|
||||
float type34_predictor;
|
||||
float samples[10], sign_bits[16];
|
||||
float samples[10];
|
||||
int sign_bits[16] = {0};
|
||||
|
||||
if (length == 0) {
|
||||
// If no data use noise
|
||||
|
@@ -58,7 +58,8 @@ static int expand_rle_row(SgiState *s, uint8_t *out_buf,
|
||||
}
|
||||
|
||||
/* Check for buffer overflow. */
|
||||
if(out_buf + pixelstride * (count-1) >= out_end) return -1;
|
||||
if (out_end - out_buf <= pixelstride * (count - 1))
|
||||
return -1;
|
||||
|
||||
if (pixel & 0x80) {
|
||||
while (count--) {
|
||||
|
@@ -116,7 +116,7 @@ static int tta_encode_frame(AVCodecContext *avctx, AVPacket *avpkt,
|
||||
PutBitContext pb;
|
||||
int ret, i, out_bytes, cur_chan = 0, res = 0, samples = 0;
|
||||
|
||||
if ((ret = ff_alloc_packet2(avctx, avpkt, frame->nb_samples * 2 * s->bps)) < 0)
|
||||
if ((ret = ff_alloc_packet2(avctx, avpkt, frame->nb_samples * 2 * avctx->channels * s->bps)) < 0)
|
||||
return ret;
|
||||
init_put_bits(&pb, avpkt->data, avpkt->size);
|
||||
|
||||
|
@@ -1045,9 +1045,10 @@ static void aw_parse_coords(WMAVoiceContext *s, GetBitContext *gb,
|
||||
* @param gb bit I/O context
|
||||
* @param block_idx block index in frame [0, 1]
|
||||
* @param fcb structure containing fixed codebook vector info
|
||||
* @return -1 on error, 0 otherwise
|
||||
*/
|
||||
static void aw_pulse_set2(WMAVoiceContext *s, GetBitContext *gb,
|
||||
int block_idx, AMRFixed *fcb)
|
||||
static int aw_pulse_set2(WMAVoiceContext *s, GetBitContext *gb,
|
||||
int block_idx, AMRFixed *fcb)
|
||||
{
|
||||
uint16_t use_mask_mem[9]; // only 5 are used, rest is padding
|
||||
uint16_t *use_mask = use_mask_mem + 2;
|
||||
@@ -1109,7 +1110,7 @@ static void aw_pulse_set2(WMAVoiceContext *s, GetBitContext *gb,
|
||||
else if (use_mask[2]) idx = 0x2F;
|
||||
else if (use_mask[3]) idx = 0x3F;
|
||||
else if (use_mask[4]) idx = 0x4F;
|
||||
else return;
|
||||
else return -1;
|
||||
idx -= av_log2_16bit(use_mask[idx >> 4]);
|
||||
}
|
||||
if (use_mask[idx >> 4] & (0x8000 >> (idx & 15))) {
|
||||
@@ -1126,6 +1127,7 @@ static void aw_pulse_set2(WMAVoiceContext *s, GetBitContext *gb,
|
||||
/* set offset for next block, relative to start of that block */
|
||||
n = (MAX_FRAMESIZE / 2 - start_off) % fcb->pitch_lag;
|
||||
s->aw_next_pulse_off_cache = n ? fcb->pitch_lag - n : 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1288,7 +1290,18 @@ static void synth_block_fcb_acb(WMAVoiceContext *s, GetBitContext *gb,
|
||||
* (fixed) codebook pulses of the speech signal. */
|
||||
if (frame_desc->fcb_type == FCB_TYPE_AW_PULSES) {
|
||||
aw_pulse_set1(s, gb, block_idx, &fcb);
|
||||
aw_pulse_set2(s, gb, block_idx, &fcb);
|
||||
if (aw_pulse_set2(s, gb, block_idx, &fcb)) {
|
||||
/* Conceal the block with silence and return.
|
||||
* Skip the correct amount of bits to read the next
|
||||
* block from the correct offset. */
|
||||
int r_idx = pRNG(s->frame_cntr, block_idx, size);
|
||||
|
||||
for (n = 0; n < size; n++)
|
||||
excitation[n] =
|
||||
wmavoice_std_codebook[r_idx + n] * s->silence_gain;
|
||||
skip_bits(gb, 7 + 1);
|
||||
return;
|
||||
}
|
||||
} else /* FCB_TYPE_EXC_PULSES */ {
|
||||
int offset_nbits = 5 - frame_desc->log_n_blocks;
|
||||
|
||||
|
@@ -25,42 +25,48 @@
|
||||
* sample format and channel layout conversion audio filter
|
||||
*/
|
||||
|
||||
#include "libavutil/avstring.h"
|
||||
#include "libavutil/channel_layout.h"
|
||||
#include "libavutil/opt.h"
|
||||
#include "libswresample/swresample.h"
|
||||
#include "avfilter.h"
|
||||
#include "audio.h"
|
||||
#include "internal.h"
|
||||
|
||||
typedef struct {
|
||||
const AVClass *class;
|
||||
enum AVSampleFormat out_sample_fmt;
|
||||
int64_t out_chlayout;
|
||||
struct SwrContext *swr;
|
||||
char *format_str;
|
||||
char *channel_layout_str;
|
||||
} AConvertContext;
|
||||
|
||||
#define OFFSET(x) offsetof(AConvertContext, x)
|
||||
#define A AV_OPT_FLAG_AUDIO_PARAM
|
||||
#define F AV_OPT_FLAG_FILTERING_PARAM
|
||||
static const AVOption aconvert_options[] = {
|
||||
{ "sample_fmt", "", OFFSET(format_str), AV_OPT_TYPE_STRING, .flags = A|F },
|
||||
{ "channel_layout", "", OFFSET(channel_layout_str), AV_OPT_TYPE_STRING, .flags = A|F },
|
||||
{ NULL },
|
||||
};
|
||||
|
||||
AVFILTER_DEFINE_CLASS(aconvert);
|
||||
|
||||
static av_cold int init(AVFilterContext *ctx)
|
||||
{
|
||||
AConvertContext *aconvert = ctx->priv;
|
||||
char *arg, *ptr = NULL;
|
||||
int ret = 0;
|
||||
char *args = av_strdup(NULL);
|
||||
|
||||
av_log(ctx, AV_LOG_WARNING, "This filter is deprecated, use aformat instead\n");
|
||||
|
||||
aconvert->out_sample_fmt = AV_SAMPLE_FMT_NONE;
|
||||
aconvert->out_chlayout = 0;
|
||||
|
||||
if ((arg = av_strtok(args, ":", &ptr)) && strcmp(arg, "auto")) {
|
||||
if ((ret = ff_parse_sample_format(&aconvert->out_sample_fmt, arg, ctx)) < 0)
|
||||
goto end;
|
||||
}
|
||||
if ((arg = av_strtok(NULL, ":", &ptr)) && strcmp(arg, "auto")) {
|
||||
if ((ret = ff_parse_channel_layout(&aconvert->out_chlayout, arg, ctx)) < 0)
|
||||
goto end;
|
||||
}
|
||||
|
||||
end:
|
||||
av_freep(&args);
|
||||
if (aconvert->format_str && strcmp(aconvert->format_str, "auto") &&
|
||||
(ret = ff_parse_sample_format(&aconvert->out_sample_fmt, aconvert->format_str, ctx)) < 0)
|
||||
return ret;
|
||||
if (aconvert->channel_layout_str && strcmp(aconvert->channel_layout_str, "auto"))
|
||||
return ff_parse_channel_layout(&aconvert->out_chlayout, aconvert->channel_layout_str, ctx);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -181,6 +187,7 @@ AVFilter avfilter_af_aconvert = {
|
||||
.name = "aconvert",
|
||||
.description = NULL_IF_CONFIG_SMALL("Convert the input audio to sample_fmt:channel_layout."),
|
||||
.priv_size = sizeof(AConvertContext),
|
||||
.priv_class = &aconvert_class,
|
||||
.init = init,
|
||||
.uninit = uninit,
|
||||
.query_formats = query_formats,
|
||||
|
@@ -114,6 +114,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *insamples)
|
||||
AVFilterLink *outlink = inlink->dst->outputs[0];
|
||||
int16_t *taps, *endin, *in, *out;
|
||||
AVFrame *outsamples = ff_get_audio_buffer(inlink, insamples->nb_samples);
|
||||
int len;
|
||||
|
||||
if (!outsamples) {
|
||||
av_frame_free(&insamples);
|
||||
@@ -125,16 +126,20 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *insamples)
|
||||
out = (int16_t *)outsamples->data[0];
|
||||
in = (int16_t *)insamples ->data[0];
|
||||
|
||||
len = FFMIN(NUMTAPS, 2*insamples->nb_samples);
|
||||
// copy part of new input and process with saved input
|
||||
memcpy(taps+NUMTAPS, in, NUMTAPS * sizeof(*taps));
|
||||
out = scalarproduct(taps, taps + NUMTAPS, out);
|
||||
memcpy(taps+NUMTAPS, in, len * sizeof(*taps));
|
||||
out = scalarproduct(taps, taps + len, out);
|
||||
|
||||
// process current input
|
||||
endin = in + insamples->nb_samples * 2 - NUMTAPS;
|
||||
scalarproduct(in, endin, out);
|
||||
if (2*insamples->nb_samples >= NUMTAPS ){
|
||||
endin = in + insamples->nb_samples * 2 - NUMTAPS;
|
||||
scalarproduct(in, endin, out);
|
||||
|
||||
// save part of input for next round
|
||||
memcpy(taps, endin, NUMTAPS * sizeof(*taps));
|
||||
// save part of input for next round
|
||||
memcpy(taps, endin, NUMTAPS * sizeof(*taps));
|
||||
} else
|
||||
memmove(taps, taps + 2*insamples->nb_samples, NUMTAPS * sizeof(*taps));
|
||||
|
||||
av_frame_free(&insamples);
|
||||
return ff_filter_frame(outlink, outsamples);
|
||||
|
@@ -313,13 +313,13 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
}
|
||||
av_frame_copy_props(out, in);
|
||||
|
||||
for (plane = 0; in->data[plane] && plane < 4; plane++)
|
||||
for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++)
|
||||
hblur(out->data[plane], out->linesize[plane],
|
||||
in ->data[plane], in ->linesize[plane],
|
||||
w[plane], h[plane], s->radius[plane], s->power[plane],
|
||||
s->temp);
|
||||
|
||||
for (plane = 0; in->data[plane] && plane < 4; plane++)
|
||||
for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++)
|
||||
vblur(out->data[plane], out->linesize[plane],
|
||||
out->data[plane], out->linesize[plane],
|
||||
w[plane], h[plane], s->radius[plane], s->power[plane],
|
||||
|
@@ -236,7 +236,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
if (!sar.num)
|
||||
sar.num = sar.den = 1;
|
||||
|
||||
for (plane = 0; plane < 4 && in->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) {
|
||||
int hsub = plane == 1 || plane == 2 ? hsub0 : 0;
|
||||
int vsub = plane == 1 || plane == 2 ? vsub0 : 0;
|
||||
|
||||
|
@@ -608,7 +608,7 @@ static void copy_fields(const FieldMatchContext *fm, AVFrame *dst,
|
||||
const AVFrame *src, int field)
|
||||
{
|
||||
int plane;
|
||||
for (plane = 0; plane < 4 && src->data[plane]; plane++)
|
||||
for (plane = 0; plane < 4 && src->data[plane] && src->linesize[plane]; plane++)
|
||||
av_image_copy_plane(dst->data[plane] + field*dst->linesize[plane], dst->linesize[plane] << 1,
|
||||
src->data[plane] + field*src->linesize[plane], src->linesize[plane] << 1,
|
||||
get_width(fm, src, plane), get_height(fm, src, plane) / 2);
|
||||
|
@@ -106,7 +106,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
|
||||
"picture will move %s one line\n",
|
||||
s->dst_tff ? "up" : "down");
|
||||
h = frame->height;
|
||||
for (plane = 0; plane < 4 && frame->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++) {
|
||||
line_step = frame->linesize[plane];
|
||||
line_size = s->line_size[plane];
|
||||
data = frame->data[plane];
|
||||
|
@@ -200,7 +200,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
av_frame_copy_props(out, in);
|
||||
}
|
||||
|
||||
for (p = 0; p < 4 && in->data[p]; p++) {
|
||||
for (p = 0; p < 4 && in->data[p] && in->linesize[p]; p++) {
|
||||
int w = inlink->w;
|
||||
int h = inlink->h;
|
||||
int r = s->radius;
|
||||
|
@@ -90,7 +90,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
if (av_pix_fmt_desc_get(inlink->format)->flags & AV_PIX_FMT_FLAG_PAL)
|
||||
memcpy(out->data[1], in->data[1], AVPALETTE_SIZE);
|
||||
|
||||
for (plane = 0; plane < 4 && in->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) {
|
||||
const int width = (plane == 1 || plane == 2) ? FF_CEIL_RSHIFT(inlink->w, s->hsub) : inlink->w;
|
||||
const int height = (plane == 1 || plane == 2) ? FF_CEIL_RSHIFT(inlink->h, s->vsub) : inlink->h;
|
||||
step = s->max_step[plane];
|
||||
|
@@ -150,7 +150,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *inpic)
|
||||
av_frame_copy_props(outpic, inpic);
|
||||
outpic->interlaced_frame = 0;
|
||||
|
||||
for (plane = 0; inpic->data[plane] && plane < 4; plane++) {
|
||||
for (plane = 0; plane < 4 && inpic->data[plane] && inpic->linesize[plane]; plane++) {
|
||||
h = plane == 0 ? inlink->h : FF_CEIL_RSHIFT(inlink->h, kerndeint->vsub);
|
||||
bwidth = kerndeint->tmp_bwidth[plane];
|
||||
|
||||
|
@@ -304,7 +304,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
}
|
||||
} else {
|
||||
/* planar */
|
||||
for (plane = 0; plane < 4 && in->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) {
|
||||
int vsub = plane == 1 || plane == 2 ? s->vsub : 0;
|
||||
int hsub = plane == 1 || plane == 2 ? s->hsub : 0;
|
||||
int h = FF_CEIL_RSHIFT(inlink->h, vsub);
|
||||
|
@@ -214,7 +214,7 @@ static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h)
|
||||
frame->width = w;
|
||||
frame->height = h;
|
||||
|
||||
for (plane = 0; plane < 4 && frame->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++) {
|
||||
int hsub = s->draw.hsub[plane];
|
||||
int vsub = s->draw.vsub[plane];
|
||||
frame->data[plane] += (s->x >> hsub) * s->draw.pixelstep[plane] +
|
||||
@@ -311,7 +311,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
int i;
|
||||
|
||||
out = in;
|
||||
for (i = 0; i < 4 && out->data[i]; i++) {
|
||||
for (i = 0; i < 4 && out->data[i] && out->linesize[i]; i++) {
|
||||
int hsub = s->draw.hsub[i];
|
||||
int vsub = s->draw.vsub[i];
|
||||
out->data[i] -= (s->x >> hsub) * s->draw.pixelstep[i] +
|
||||
|
@@ -33,7 +33,7 @@ static int config_props_output(AVFilterLink *outlink)
|
||||
SeparateFieldsContext *sf = ctx->priv;
|
||||
AVFilterLink *inlink = ctx->inputs[0];
|
||||
|
||||
sf->nb_planes = av_pix_fmt_count_planes(inlink->format);;
|
||||
sf->nb_planes = av_pix_fmt_count_planes(inlink->format);
|
||||
|
||||
if (inlink->h & 1) {
|
||||
av_log(ctx, AV_LOG_ERROR, "height must be even\n");
|
||||
|
@@ -38,7 +38,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
|
||||
uint32_t plane_checksum[4] = {0}, checksum = 0;
|
||||
int i, plane, vsub = desc->log2_chroma_h;
|
||||
|
||||
for (plane = 0; plane < 4 && frame->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++) {
|
||||
int64_t linesize = av_image_get_linesize(frame->format, frame->width, plane);
|
||||
uint8_t *data = frame->data[plane];
|
||||
int h = plane == 1 || plane == 2 ? FF_CEIL_RSHIFT(inlink->h, vsub) : inlink->h;
|
||||
@@ -68,7 +68,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
|
||||
av_get_picture_type_char(frame->pict_type),
|
||||
checksum, plane_checksum[0]);
|
||||
|
||||
for (plane = 1; plane < 4 && frame->data[plane]; plane++)
|
||||
for (plane = 1; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
|
||||
av_log(ctx, AV_LOG_INFO, " %08X", plane_checksum[plane]);
|
||||
av_log(ctx, AV_LOG_INFO, "]\n");
|
||||
|
||||
|
@@ -239,7 +239,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
|
||||
} else {
|
||||
int plane;
|
||||
|
||||
for (plane = 0; plane < 4 && in->data[plane]; plane++) {
|
||||
for (plane = 0; plane < 4 && in->data[plane] && in->linesize[plane]; plane++) {
|
||||
uint8_t *dst = out->data[plane];
|
||||
const uint8_t *src = in ->data[plane];
|
||||
const float *fmap = s->fmap;
|
||||
|
@@ -241,6 +241,8 @@ int ffurl_alloc(URLContext **puc, const char *filename, int flags,
|
||||
return url_alloc_for_protocol (puc, up, filename, flags, int_cb);
|
||||
}
|
||||
*puc = NULL;
|
||||
if (!strcmp("https", proto_str))
|
||||
av_log(NULL, AV_LOG_WARNING, "https protocol not found, recompile with openssl or gnutls enabled.\n");
|
||||
return AVERROR_PROTOCOL_NOT_FOUND;
|
||||
}
|
||||
|
||||
|
@@ -62,19 +62,20 @@
|
||||
typedef struct {
|
||||
void *library;
|
||||
#define AVSC_DECLARE_FUNC(name) name##_func name
|
||||
AVSC_DECLARE_FUNC(avs_bit_blt);
|
||||
AVSC_DECLARE_FUNC(avs_clip_get_error);
|
||||
AVSC_DECLARE_FUNC(avs_create_script_environment);
|
||||
AVSC_DECLARE_FUNC(avs_delete_script_environment);
|
||||
AVSC_DECLARE_FUNC(avs_get_error);
|
||||
AVSC_DECLARE_FUNC(avs_clip_get_error);
|
||||
AVSC_DECLARE_FUNC(avs_invoke);
|
||||
AVSC_DECLARE_FUNC(avs_release_value);
|
||||
AVSC_DECLARE_FUNC(avs_get_video_info);
|
||||
AVSC_DECLARE_FUNC(avs_take_clip);
|
||||
AVSC_DECLARE_FUNC(avs_release_clip);
|
||||
AVSC_DECLARE_FUNC(avs_bit_blt);
|
||||
AVSC_DECLARE_FUNC(avs_get_audio);
|
||||
AVSC_DECLARE_FUNC(avs_get_error);
|
||||
AVSC_DECLARE_FUNC(avs_get_frame);
|
||||
AVSC_DECLARE_FUNC(avs_get_version);
|
||||
AVSC_DECLARE_FUNC(avs_get_video_info);
|
||||
AVSC_DECLARE_FUNC(avs_invoke);
|
||||
AVSC_DECLARE_FUNC(avs_release_clip);
|
||||
AVSC_DECLARE_FUNC(avs_release_value);
|
||||
AVSC_DECLARE_FUNC(avs_release_video_frame);
|
||||
AVSC_DECLARE_FUNC(avs_take_clip);
|
||||
#undef AVSC_DECLARE_FUNC
|
||||
} AviSynthLibrary;
|
||||
|
||||
@@ -127,19 +128,20 @@ static av_cold int avisynth_load_library(void) {
|
||||
if(!continue_on_fail && !avs_library->name) \
|
||||
goto fail; \
|
||||
}
|
||||
LOAD_AVS_FUNC(avs_bit_blt, 0);
|
||||
LOAD_AVS_FUNC(avs_clip_get_error, 0);
|
||||
LOAD_AVS_FUNC(avs_create_script_environment, 0);
|
||||
LOAD_AVS_FUNC(avs_delete_script_environment, 0);
|
||||
LOAD_AVS_FUNC(avs_get_error, 1); // New to AviSynth 2.6
|
||||
LOAD_AVS_FUNC(avs_clip_get_error, 0);
|
||||
LOAD_AVS_FUNC(avs_invoke, 0);
|
||||
LOAD_AVS_FUNC(avs_release_value, 0);
|
||||
LOAD_AVS_FUNC(avs_get_video_info, 0);
|
||||
LOAD_AVS_FUNC(avs_take_clip, 0);
|
||||
LOAD_AVS_FUNC(avs_release_clip, 0);
|
||||
LOAD_AVS_FUNC(avs_bit_blt, 0);
|
||||
LOAD_AVS_FUNC(avs_get_audio, 0);
|
||||
LOAD_AVS_FUNC(avs_get_error, 1); // New to AviSynth 2.6
|
||||
LOAD_AVS_FUNC(avs_get_frame, 0);
|
||||
LOAD_AVS_FUNC(avs_get_version, 0);
|
||||
LOAD_AVS_FUNC(avs_get_video_info, 0);
|
||||
LOAD_AVS_FUNC(avs_invoke, 0);
|
||||
LOAD_AVS_FUNC(avs_release_clip, 0);
|
||||
LOAD_AVS_FUNC(avs_release_value, 0);
|
||||
LOAD_AVS_FUNC(avs_release_video_frame, 0);
|
||||
LOAD_AVS_FUNC(avs_take_clip, 0);
|
||||
#undef LOAD_AVS_FUNC
|
||||
|
||||
atexit(avisynth_atexit_handler);
|
||||
@@ -479,7 +481,26 @@ static int avisynth_read_packet_video(AVFormatContext *s, AVPacket *pkt, int dis
|
||||
pitch = -pitch;
|
||||
}
|
||||
|
||||
// An issue with avs_bit_blt on 2.5.8 prevents video from working correctly.
|
||||
// This problem doesn't exist for 2.6 and AvxSynth, so enable the workaround
|
||||
// for 2.5.8 only. This only displays the warning and exits if the script has
|
||||
// video. 2.5.8's internal interface version is 3, so avs_get_version allows
|
||||
// it to work only in the circumstance that the interface is 5 or higher (4 is
|
||||
// unused). There's a strong chance that AvxSynth, having been based on 2.5.8,
|
||||
// would also be identified as interface version 3, but since AvxSynth doesn't
|
||||
// suffer from this problem, special-case it.
|
||||
#ifdef _WIN32
|
||||
if (avs_library->avs_get_version(avs->clip) > 3) {
|
||||
avs_library->avs_bit_blt(avs->env, dst_p, rowsize, src_p, pitch, rowsize, planeheight);
|
||||
} else {
|
||||
av_log(s, AV_LOG_ERROR, "Video input from AviSynth 2.5.8 is not supported. Please upgrade to 2.6.\n");
|
||||
avs->error = 1;
|
||||
av_freep(&pkt->data);
|
||||
return AVERROR_UNKNOWN;
|
||||
}
|
||||
#else
|
||||
avs_library->avs_bit_blt(avs->env, dst_p, rowsize, src_p, pitch, rowsize, planeheight);
|
||||
#endif
|
||||
dst_p += rowsize * planeheight;
|
||||
}
|
||||
|
||||
|
@@ -34,6 +34,7 @@ static int dts_probe(AVProbeData *p)
|
||||
uint32_t state = -1;
|
||||
int markers[3] = {0};
|
||||
int sum, max;
|
||||
int64_t diff = 0;
|
||||
|
||||
buf = p->buf;
|
||||
|
||||
@@ -54,12 +55,16 @@ static int dts_probe(AVProbeData *p)
|
||||
if (state == DCA_MARKER_14B_LE)
|
||||
if ((bytestream_get_be16(&bufp) & 0xF0FF) == 0xF007)
|
||||
markers[2]++;
|
||||
|
||||
if (buf - p->buf >= 4)
|
||||
diff += FFABS(AV_RL16(buf) - AV_RL16(buf-4));
|
||||
}
|
||||
sum = markers[0] + markers[1] + markers[2];
|
||||
max = markers[1] > markers[0];
|
||||
max = markers[2] > markers[max] ? 2 : max;
|
||||
if (markers[max] > 3 && p->buf_size / markers[max] < 32*1024 &&
|
||||
markers[max] * 4 > sum * 3)
|
||||
markers[max] * 4 > sum * 3 &&
|
||||
diff / p->buf_size > 200)
|
||||
return AVPROBE_SCORE_EXTENSION + 1;
|
||||
|
||||
return 0;
|
||||
|
@@ -27,6 +27,7 @@
|
||||
#include "os_support.h"
|
||||
#include "url.h"
|
||||
#include "libavutil/opt.h"
|
||||
#include "libavutil/bprint.h"
|
||||
|
||||
#define CONTROL_BUFFER_SIZE 1024
|
||||
#define CREDENTIALS_BUFFER_SIZE 128
|
||||
@@ -42,8 +43,6 @@ typedef enum {
|
||||
typedef struct {
|
||||
const AVClass *class;
|
||||
URLContext *conn_control; /**< Control connection */
|
||||
int conn_control_block_flag; /**< Controls block/unblock mode of data connection */
|
||||
AVIOInterruptCB conn_control_interrupt_cb; /**< Controls block/unblock mode of data connection */
|
||||
URLContext *conn_data; /**< Data connection, NULL when not connected */
|
||||
uint8_t control_buffer[CONTROL_BUFFER_SIZE]; /**< Control connection buffer */
|
||||
uint8_t *control_buf_ptr, *control_buf_end;
|
||||
@@ -77,18 +76,10 @@ static const AVClass ftp_context_class = {
|
||||
.version = LIBAVUTIL_VERSION_INT,
|
||||
};
|
||||
|
||||
static int ftp_conn_control_block_control(void *data)
|
||||
{
|
||||
FTPContext *s = data;
|
||||
return s->conn_control_block_flag;
|
||||
}
|
||||
|
||||
static int ftp_getc(FTPContext *s)
|
||||
{
|
||||
int len;
|
||||
if (s->control_buf_ptr >= s->control_buf_end) {
|
||||
if (s->conn_control_block_flag)
|
||||
return AVERROR_EXIT;
|
||||
len = ffurl_read(s->conn_control, s->control_buffer, CONTROL_BUFFER_SIZE);
|
||||
if (len < 0) {
|
||||
return len;
|
||||
@@ -106,12 +97,10 @@ static int ftp_get_line(FTPContext *s, char *line, int line_size)
|
||||
{
|
||||
int ch;
|
||||
char *q = line;
|
||||
int ori_block_flag = s->conn_control_block_flag;
|
||||
|
||||
for (;;) {
|
||||
ch = ftp_getc(s);
|
||||
if (ch < 0) {
|
||||
s->conn_control_block_flag = ori_block_flag;
|
||||
return ch;
|
||||
}
|
||||
if (ch == '\n') {
|
||||
@@ -119,35 +108,14 @@ static int ftp_get_line(FTPContext *s, char *line, int line_size)
|
||||
if (q > line && q[-1] == '\r')
|
||||
q--;
|
||||
*q = '\0';
|
||||
|
||||
s->conn_control_block_flag = ori_block_flag;
|
||||
return 0;
|
||||
} else {
|
||||
s->conn_control_block_flag = 0; /* line need to be finished */
|
||||
if ((q - line) < line_size - 1)
|
||||
*q++ = ch;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static int ftp_flush_control_input(FTPContext *s)
|
||||
{
|
||||
char buf[CONTROL_BUFFER_SIZE];
|
||||
int err, ori_block_flag = s->conn_control_block_flag;
|
||||
|
||||
s->conn_control_block_flag = 1;
|
||||
do {
|
||||
err = ftp_get_line(s, buf, sizeof(buf));
|
||||
} while (!err);
|
||||
|
||||
s->conn_control_block_flag = ori_block_flag;
|
||||
|
||||
if (err < 0 && err != AVERROR_EXIT)
|
||||
return err;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* This routine returns ftp server response code.
|
||||
* Server may send more than one response for a certain command, following priorities are used:
|
||||
@@ -156,49 +124,46 @@ static int ftp_flush_control_input(FTPContext *s)
|
||||
*/
|
||||
static int ftp_status(FTPContext *s, char **line, const int response_codes[])
|
||||
{
|
||||
int err, i, result = 0, pref_code_found = 0, wait_count = 100;
|
||||
int err, i, dash = 0, result = 0, code_found = 0;
|
||||
char buf[CONTROL_BUFFER_SIZE];
|
||||
AVBPrint line_buffer;
|
||||
|
||||
/* Set blocking mode */
|
||||
s->conn_control_block_flag = 0;
|
||||
for (;;) {
|
||||
if (line)
|
||||
av_bprint_init(&line_buffer, 0, AV_BPRINT_SIZE_AUTOMATIC);
|
||||
|
||||
while (!code_found || dash) {
|
||||
if ((err = ftp_get_line(s, buf, sizeof(buf))) < 0) {
|
||||
if (err == AVERROR_EXIT) {
|
||||
if (!pref_code_found && wait_count--) {
|
||||
av_usleep(10000);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
av_bprint_finalize(&line_buffer, NULL);
|
||||
return err;
|
||||
}
|
||||
|
||||
av_log(s, AV_LOG_DEBUG, "%s\n", buf);
|
||||
|
||||
if (!pref_code_found) {
|
||||
if (strlen(buf) < 3)
|
||||
if (strlen(buf) < 4)
|
||||
continue;
|
||||
|
||||
err = 0;
|
||||
for (i = 0; i < 3; ++i) {
|
||||
if (buf[i] < '0' || buf[i] > '9')
|
||||
continue;
|
||||
err *= 10;
|
||||
err += buf[i] - '0';
|
||||
}
|
||||
dash = !!(buf[3] == '-');
|
||||
|
||||
err = 0;
|
||||
for (i = 0; i < 3; ++i) {
|
||||
if (buf[i] < '0' || buf[i] > '9')
|
||||
continue;
|
||||
err *= 10;
|
||||
err += buf[i] - '0';
|
||||
}
|
||||
|
||||
for (i = 0; response_codes[i]; ++i) {
|
||||
if (err == response_codes[i]) {
|
||||
/* first code received. Now get all lines in non blocking mode */
|
||||
s->conn_control_block_flag = 1;
|
||||
pref_code_found = 1;
|
||||
result = err;
|
||||
if (line)
|
||||
*line = av_strdup(buf);
|
||||
break;
|
||||
}
|
||||
for (i = 0; response_codes[i]; ++i) {
|
||||
if (err == response_codes[i]) {
|
||||
if (line)
|
||||
av_bprintf(&line_buffer, "%s", buf);
|
||||
code_found = 1;
|
||||
result = err;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (line)
|
||||
av_bprint_finalize(&line_buffer, line);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -207,12 +172,6 @@ static int ftp_send_command(FTPContext *s, const char *command,
|
||||
{
|
||||
int err;
|
||||
|
||||
/* Flush control connection input to get rid of non relevant responses if any */
|
||||
if ((err = ftp_flush_control_input(s)) < 0)
|
||||
return err;
|
||||
|
||||
/* send command in blocking mode */
|
||||
s->conn_control_block_flag = 0;
|
||||
if ((err = ffurl_write(s->conn_control, command, strlen(command))) < 0)
|
||||
return err;
|
||||
if (!err)
|
||||
@@ -434,8 +393,6 @@ static int ftp_connect_control_connection(URLContext *h)
|
||||
FTPContext *s = h->priv_data;
|
||||
const int connect_codes[] = {220, 0};
|
||||
|
||||
s->conn_control_block_flag = 0;
|
||||
|
||||
if (!s->conn_control) {
|
||||
ff_url_join(buf, sizeof(buf), "tcp", NULL,
|
||||
s->hostname, s->server_control_port, NULL);
|
||||
@@ -444,7 +401,7 @@ static int ftp_connect_control_connection(URLContext *h)
|
||||
av_dict_set(&opts, "timeout", opts_format, 0);
|
||||
} /* if option is not given, don't pass it and let tcp use its own default */
|
||||
err = ffurl_open(&s->conn_control, buf, AVIO_FLAG_READ_WRITE,
|
||||
&s->conn_control_interrupt_cb, &opts);
|
||||
&h->interrupt_callback, &opts);
|
||||
av_dict_free(&opts);
|
||||
if (err < 0) {
|
||||
av_log(h, AV_LOG_ERROR, "Cannot open control connection\n");
|
||||
@@ -489,7 +446,7 @@ static int ftp_connect_data_connection(URLContext *h)
|
||||
snprintf(opts_format, sizeof(opts_format), "%d", s->rw_timeout);
|
||||
av_dict_set(&opts, "timeout", opts_format, 0);
|
||||
} /* if option is not given, don't pass it and let tcp use its own default */
|
||||
err = ffurl_open(&s->conn_data, buf, AVIO_FLAG_READ_WRITE,
|
||||
err = ffurl_open(&s->conn_data, buf, h->flags,
|
||||
&h->interrupt_callback, &opts);
|
||||
av_dict_free(&opts);
|
||||
if (err < 0)
|
||||
@@ -553,8 +510,6 @@ static int ftp_open(URLContext *h, const char *url, int flags)
|
||||
s->state = DISCONNECTED;
|
||||
s->filesize = -1;
|
||||
s->position = 0;
|
||||
s->conn_control_interrupt_cb.opaque = s;
|
||||
s->conn_control_interrupt_cb.callback = ftp_conn_control_block_control;
|
||||
|
||||
av_url_split(proto, sizeof(proto),
|
||||
s->credencials, sizeof(s->credencials),
|
||||
|
@@ -2364,6 +2364,7 @@ static int matroska_parse_block(MatroskaDemuxContext *matroska, uint8_t *data,
|
||||
uint32_t *lace_size = NULL;
|
||||
int n, flags, laces = 0;
|
||||
uint64_t num;
|
||||
int trust_default_duration = 1;
|
||||
|
||||
if ((n = matroska_ebmlnum_uint(matroska, data, size, &num)) < 0) {
|
||||
av_log(matroska->ctx, AV_LOG_ERROR, "EBML block data error\n");
|
||||
@@ -2418,7 +2419,15 @@ static int matroska_parse_block(MatroskaDemuxContext *matroska, uint8_t *data,
|
||||
if (res)
|
||||
goto end;
|
||||
|
||||
if (!block_duration)
|
||||
if (track->audio.samplerate == 8000) {
|
||||
// If this is needed for more codecs, then add them here
|
||||
if (st->codec->codec_id == AV_CODEC_ID_AC3) {
|
||||
if(track->audio.samplerate != st->codec->sample_rate || !st->codec->frame_size)
|
||||
trust_default_duration = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (!block_duration && trust_default_duration)
|
||||
block_duration = track->default_duration * laces / matroska->time_scale;
|
||||
|
||||
if (cluster_time != (uint64_t)-1 && (block_time >= 0 || cluster_time >= -block_time))
|
||||
|
@@ -872,6 +872,17 @@ static int mkv_write_tag(AVFormatContext *s, AVDictionary *m, unsigned int eleme
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int mkv_check_tag(AVDictionary *m)
|
||||
{
|
||||
AVDictionaryEntry *t = NULL;
|
||||
|
||||
while ((t = av_dict_get(m, "", t, AV_DICT_IGNORE_SUFFIX)))
|
||||
if (av_strcasecmp(t->key, "title") && av_strcasecmp(t->key, "stereo_mode"))
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int mkv_write_tags(AVFormatContext *s)
|
||||
{
|
||||
ebml_master tags = {0};
|
||||
@@ -879,7 +890,7 @@ static int mkv_write_tags(AVFormatContext *s)
|
||||
|
||||
ff_metadata_conv_ctx(s, ff_mkv_metadata_conv, NULL);
|
||||
|
||||
if (av_dict_get(s->metadata, "", NULL, AV_DICT_IGNORE_SUFFIX)) {
|
||||
if (mkv_check_tag(s->metadata)) {
|
||||
ret = mkv_write_tag(s, s->metadata, 0, 0, &tags);
|
||||
if (ret < 0) return ret;
|
||||
}
|
||||
@@ -887,7 +898,7 @@ static int mkv_write_tags(AVFormatContext *s)
|
||||
for (i = 0; i < s->nb_streams; i++) {
|
||||
AVStream *st = s->streams[i];
|
||||
|
||||
if (!av_dict_get(st->metadata, "", 0, AV_DICT_IGNORE_SUFFIX))
|
||||
if (!mkv_check_tag(st->metadata))
|
||||
continue;
|
||||
|
||||
ret = mkv_write_tag(s, st->metadata, MATROSKA_ID_TAGTARGETS_TRACKUID, i + 1, &tags);
|
||||
@@ -897,7 +908,7 @@ static int mkv_write_tags(AVFormatContext *s)
|
||||
for (i = 0; i < s->nb_chapters; i++) {
|
||||
AVChapter *ch = s->chapters[i];
|
||||
|
||||
if (!av_dict_get(ch->metadata, "", NULL, AV_DICT_IGNORE_SUFFIX))
|
||||
if (!mkv_check_tag(ch->metadata))
|
||||
continue;
|
||||
|
||||
ret = mkv_write_tag(s, ch->metadata, MATROSKA_ID_TAGTARGETS_CHAPTERUID, ch->id, &tags);
|
||||
|
@@ -233,10 +233,11 @@ static int read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
p->current_frame_block++;
|
||||
}
|
||||
|
||||
size = p->video_size - p->frames_offset_table[p->current_frame];
|
||||
if (size < 1)
|
||||
if (p->frames_offset_table[p->current_frame] >= p->video_size)
|
||||
return AVERROR_INVALIDDATA;
|
||||
|
||||
size = p->video_size - p->frames_offset_table[p->current_frame];
|
||||
|
||||
if (av_new_packet(pkt, size) < 0)
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
|
@@ -2783,6 +2783,10 @@ int avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options)
|
||||
} else {
|
||||
pkt = add_to_pktbuf(&ic->packet_buffer, &pkt1,
|
||||
&ic->packet_buffer_end);
|
||||
if (!pkt) {
|
||||
ret = AVERROR(ENOMEM);
|
||||
goto find_stream_info_err;
|
||||
}
|
||||
if ((ret = av_dup_packet(pkt)) < 0)
|
||||
goto find_stream_info_err;
|
||||
}
|
||||
@@ -2855,6 +2859,8 @@ int avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options)
|
||||
|
||||
if (!st->info->duration_error)
|
||||
st->info->duration_error = av_mallocz(sizeof(st->info->duration_error[0])*2);
|
||||
if (!st->info->duration_error)
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
// if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO)
|
||||
// av_log(NULL, AV_LOG_ERROR, "%f\n", dts);
|
||||
|
@@ -191,7 +191,7 @@ int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
|
||||
{
|
||||
void **ptrptr = ptr;
|
||||
*ptrptr = av_realloc_f(*ptrptr, nmemb, size);
|
||||
if (!*ptrptr && !(nmemb && size))
|
||||
if (!*ptrptr && nmemb && size)
|
||||
return AVERROR(ENOMEM);
|
||||
return 0;
|
||||
}
|
||||
|
@@ -754,12 +754,13 @@ static av_always_inline void planar_rgb16_to_y(uint8_t *_dst, const uint8_t *_sr
|
||||
const uint16_t **src = (const uint16_t **)_src;
|
||||
uint16_t *dst = (uint16_t *)_dst;
|
||||
int32_t ry = rgb2yuv[RY_IDX], gy = rgb2yuv[GY_IDX], by = rgb2yuv[BY_IDX];
|
||||
int shift = bpc < 16 ? bpc : 14;
|
||||
for (i = 0; i < width; i++) {
|
||||
int g = rdpx(src[0] + i);
|
||||
int b = rdpx(src[1] + i);
|
||||
int r = rdpx(src[2] + i);
|
||||
|
||||
dst[i] = ((ry*r + gy*g + by*b + (33 << (RGB2YUV_SHIFT + bpc - 9))) >> (RGB2YUV_SHIFT + bpc - 14));
|
||||
dst[i] = ((ry*r + gy*g + by*b + (33 << (RGB2YUV_SHIFT + bpc - 9))) >> (RGB2YUV_SHIFT + shift - 14));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -773,13 +774,14 @@ static av_always_inline void planar_rgb16_to_uv(uint8_t *_dstU, uint8_t *_dstV,
|
||||
uint16_t *dstV = (uint16_t *)_dstV;
|
||||
int32_t ru = rgb2yuv[RU_IDX], gu = rgb2yuv[GU_IDX], bu = rgb2yuv[BU_IDX];
|
||||
int32_t rv = rgb2yuv[RV_IDX], gv = rgb2yuv[GV_IDX], bv = rgb2yuv[BV_IDX];
|
||||
int shift = bpc < 16 ? bpc : 14;
|
||||
for (i = 0; i < width; i++) {
|
||||
int g = rdpx(src[0] + i);
|
||||
int b = rdpx(src[1] + i);
|
||||
int r = rdpx(src[2] + i);
|
||||
|
||||
dstU[i] = (ru*r + gu*g + bu*b + (257 << (RGB2YUV_SHIFT + bpc - 9))) >> (RGB2YUV_SHIFT + bpc - 14);
|
||||
dstV[i] = (rv*r + gv*g + bv*b + (257 << (RGB2YUV_SHIFT + bpc - 9))) >> (RGB2YUV_SHIFT + bpc - 14);
|
||||
dstU[i] = (ru*r + gu*g + bu*b + (257 << (RGB2YUV_SHIFT + bpc - 9))) >> (RGB2YUV_SHIFT + shift - 14);
|
||||
dstV[i] = (rv*r + gv*g + bv*b + (257 << (RGB2YUV_SHIFT + bpc - 9))) >> (RGB2YUV_SHIFT + shift - 14);
|
||||
}
|
||||
}
|
||||
#undef rdpx
|
||||
|
@@ -1,6 +1,6 @@
|
||||
b53f31e572394f225aff0bc82b5d1cc9 *./tests/data/lavf/lavf.mkv
|
||||
472553 ./tests/data/lavf/lavf.mkv
|
||||
1748c0b3221977509c62a158236d2492 *./tests/data/lavf/lavf.mkv
|
||||
472533 ./tests/data/lavf/lavf.mkv
|
||||
./tests/data/lavf/lavf.mkv CRC=0x4780846b
|
||||
84dcb326fe85aeeb5768beb44372f248 *./tests/data/lavf/lavf.mkv
|
||||
320297 ./tests/data/lavf/lavf.mkv
|
||||
0f78dd9299210a51b18faafc971e71f2 *./tests/data/lavf/lavf.mkv
|
||||
320265 ./tests/data/lavf/lavf.mkv
|
||||
./tests/data/lavf/lavf.mkv CRC=0x4780846b
|
||||
|
@@ -1,48 +1,48 @@
|
||||
ret: 0 st: 1 flags:1 dts: 0.000000 pts: 0.000000 pos: 555 size: 208
|
||||
ret: 0 st: 1 flags:1 dts: 0.000000 pts: 0.000000 pos: 523 size: 208
|
||||
ret: 0 st:-1 flags:0 ts:-1.000000
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
ret: 0 st:-1 flags:1 ts: 1.894167
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292185 size: 27834
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292153 size: 27834
|
||||
ret: 0 st: 0 flags:0 ts: 0.788000
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292185 size: 27834
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292153 size: 27834
|
||||
ret: 0 st: 0 flags:1 ts:-0.317000
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
ret:-1 st: 1 flags:0 ts: 2.577000
|
||||
ret: 0 st: 1 flags:1 ts: 1.471000
|
||||
ret: 0 st: 1 flags:1 dts: 0.993000 pts: 0.993000 pos: 320026 size: 209
|
||||
ret: 0 st: 1 flags:1 dts: 0.993000 pts: 0.993000 pos: 319994 size: 209
|
||||
ret: 0 st:-1 flags:0 ts: 0.365002
|
||||
ret: 0 st: 0 flags:1 dts: 0.491000 pts: 0.491000 pos: 146738 size: 27925
|
||||
ret: 0 st: 0 flags:1 dts: 0.491000 pts: 0.491000 pos: 146706 size: 27925
|
||||
ret: 0 st:-1 flags:1 ts:-0.740831
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
ret:-1 st: 0 flags:0 ts: 2.153000
|
||||
ret: 0 st: 0 flags:1 ts: 1.048000
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292185 size: 27834
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292153 size: 27834
|
||||
ret: 0 st: 1 flags:0 ts:-0.058000
|
||||
ret: 0 st: 1 flags:1 dts: 0.000000 pts: 0.000000 pos: 555 size: 208
|
||||
ret: 0 st: 1 flags:1 dts: 0.000000 pts: 0.000000 pos: 523 size: 208
|
||||
ret: 0 st: 1 flags:1 ts: 2.836000
|
||||
ret: 0 st: 1 flags:1 dts: 0.993000 pts: 0.993000 pos: 320026 size: 209
|
||||
ret: 0 st: 1 flags:1 dts: 0.993000 pts: 0.993000 pos: 319994 size: 209
|
||||
ret:-1 st:-1 flags:0 ts: 1.730004
|
||||
ret: 0 st:-1 flags:1 ts: 0.624171
|
||||
ret: 0 st: 0 flags:1 dts: 0.491000 pts: 0.491000 pos: 146738 size: 27925
|
||||
ret: 0 st: 0 flags:1 dts: 0.491000 pts: 0.491000 pos: 146706 size: 27925
|
||||
ret: 0 st: 0 flags:0 ts:-0.482000
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
ret: 0 st: 0 flags:1 ts: 2.413000
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292185 size: 27834
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292153 size: 27834
|
||||
ret:-1 st: 1 flags:0 ts: 1.307000
|
||||
ret: 0 st: 1 flags:1 ts: 0.201000
|
||||
ret: 0 st: 1 flags:1 dts: 0.000000 pts: 0.000000 pos: 555 size: 208
|
||||
ret: 0 st: 1 flags:1 dts: 0.000000 pts: 0.000000 pos: 523 size: 208
|
||||
ret: 0 st:-1 flags:0 ts:-0.904994
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
ret: 0 st:-1 flags:1 ts: 1.989173
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292185 size: 27834
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292153 size: 27834
|
||||
ret: 0 st: 0 flags:0 ts: 0.883000
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292185 size: 27834
|
||||
ret: 0 st: 0 flags:1 dts: 0.971000 pts: 0.971000 pos: 292153 size: 27834
|
||||
ret: 0 st: 0 flags:1 ts:-0.222000
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
ret:-1 st: 1 flags:0 ts: 2.672000
|
||||
ret: 0 st: 1 flags:1 ts: 1.566000
|
||||
ret: 0 st: 1 flags:1 dts: 0.993000 pts: 0.993000 pos: 320026 size: 209
|
||||
ret: 0 st: 1 flags:1 dts: 0.993000 pts: 0.993000 pos: 319994 size: 209
|
||||
ret: 0 st:-1 flags:0 ts: 0.460008
|
||||
ret: 0 st: 0 flags:1 dts: 0.491000 pts: 0.491000 pos: 146738 size: 27925
|
||||
ret: 0 st: 0 flags:1 dts: 0.491000 pts: 0.491000 pos: 146706 size: 27925
|
||||
ret: 0 st:-1 flags:1 ts:-0.645825
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 771 size: 27837
|
||||
ret: 0 st: 0 flags:1 dts: 0.011000 pts: 0.011000 pos: 739 size: 27837
|
||||
|
Reference in New Issue
Block a user