Merge remote-tracking branch 'qatar/master'

* qatar/master:
  fate: split off DPCM codec FATE tests into their own file
  fate: split off PCM codec FATE tests into their own file
  libvorbis: K&R reformatting cosmetics
  libmp3lame: K&R formatting cosmetics
  fate: Add a video test for xxan decoder
  mpegvideo_enc: K&R cosmetics (line 1000-2000).
  avconv: K&R cosmetics
  qt-faststart: Fix up indentation
  indeo4: remove two unused variables
  doxygen: cleanup style to support older doxy
  fate: add more tests for VC-1 decoder
  applehttpproto: Apply the same reload interval changes as for the demuxer
  applehttp: Use half the target duration as interval if the playlist didn't update
  applehttp: Use the last segment duration as reload interval
  lagarith: add decode support for arith rgb24 mode

Conflicts:
	avconv.c
	libavcodec/libmp3lame.c
	libavcodec/mpegvideo_enc.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2011-12-30 03:46:24 +01:00
commit 00c0465dbc
21 changed files with 1544 additions and 1223 deletions

View File

@ -868,7 +868,8 @@ static void choose_pixel_fmt(AVStream *st, AVCodec *codec)
if (st->codec->codec_id == CODEC_ID_MJPEG) { if (st->codec->codec_id == CODEC_ID_MJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE }; p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
} else if (st->codec->codec_id == CODEC_ID_LJPEG) { } else if (st->codec->codec_id == CODEC_ID_LJPEG) {
p= (const enum PixelFormat[]){PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE}; p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
} }
} }
for (; *p != PIX_FMT_NONE; p++) { for (; *p != PIX_FMT_NONE; p++) {
@ -895,7 +896,8 @@ get_sync_ipts(const OutputStream *ost)
return (double)(ist->pts - of->start_time) / AV_TIME_BASE; return (double)(ist->pts - of->start_time) / AV_TIME_BASE;
} }
static void write_frame(AVFormatContext *s, AVPacket *pkt, AVCodecContext *avctx, AVBitStreamFilterContext *bsfc){ static void write_frame(AVFormatContext *s, AVPacket *pkt, AVCodecContext *avctx, AVBitStreamFilterContext *bsfc)
{
int ret; int ret;
while (bsfc) { while (bsfc) {
@ -1034,8 +1036,8 @@ need_realloc:
} }
if (audio_sync_method) { if (audio_sync_method) {
double delta = get_sync_ipts(ost) * enc->sample_rate - ost->sync_opts double delta = get_sync_ipts(ost) * enc->sample_rate - ost->sync_opts -
- av_fifo_size(ost->fifo)/(enc->channels * osize); av_fifo_size(ost->fifo) / (enc->channels * osize);
int idelta = delta * dec->sample_rate / enc->sample_rate; int idelta = delta * dec->sample_rate / enc->sample_rate;
int byte_delta = idelta * isize * dec->channels; int byte_delta = idelta * isize * dec->channels;
@ -1077,8 +1079,8 @@ need_realloc:
} }
} }
} else } else
ost->sync_opts= lrintf(get_sync_ipts(ost) * enc->sample_rate) ost->sync_opts = lrintf(get_sync_ipts(ost) * enc->sample_rate) -
- av_fifo_size(ost->fifo)/(enc->channels * osize); //FIXME wrong av_fifo_size(ost->fifo) / (enc->channels * osize); // FIXME wrong
if (ost->audio_resample) { if (ost->audio_resample) {
buftmp = audio_buf; buftmp = audio_buf;
@ -1491,7 +1493,8 @@ static void do_video_out(AVFormatContext *s,
} }
} }
static double psnr(double d){ static double psnr(double d)
{
return -10.0 * log(d) / log(10.0); return -10.0 * log(d) / log(10.0);
} }
@ -1612,7 +1615,8 @@ static void print_report(OutputFile *output_files,
error = enc->coded_frame->error[j]; error = enc->coded_frame->error[j];
scale = enc->width * enc->height * 255.0 * 255.0; scale = enc->width * enc->height * 255.0 * 255.0;
} }
if(j) scale/=4; if (j)
scale /= 4;
error_sum += error; error_sum += error;
scale_sum += scale; scale_sum += scale;
snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "%c:%2.2f ", type[j], psnr(error / scale)); snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "%c:%2.2f ", type[j], psnr(error / scale));
@ -2723,7 +2727,8 @@ static int transcode(OutputFile *output_files,
if (!input_files[ist->file_index].eof_reached) { if (!input_files[ist->file_index].eof_reached) {
if (ipts < ipts_min) { if (ipts < ipts_min) {
ipts_min = ipts; ipts_min = ipts;
if(input_sync ) file_index = ist->file_index; if (input_sync)
file_index = ist->file_index;
} }
if (opts < opts_min) { if (opts < opts_min) {
opts_min = opts; opts_min = opts;
@ -2790,14 +2795,18 @@ static int transcode(OutputFile *output_files,
if (pkt.dts != AV_NOPTS_VALUE) if (pkt.dts != AV_NOPTS_VALUE)
pkt.dts *= ist->ts_scale; pkt.dts *= ist->ts_scale;
// fprintf(stderr, "next:%"PRId64" dts:%"PRId64" off:%"PRId64" %d\n", ist->next_pts, pkt.dts, input_files[ist->file_index].ts_offset, ist->st->codec->codec_type); //fprintf(stderr, "next:%"PRId64" dts:%"PRId64" off:%"PRId64" %d\n",
// ist->next_pts,
// pkt.dts, input_files[ist->file_index].ts_offset,
// ist->st->codec->codec_type);
if (pkt.dts != AV_NOPTS_VALUE && ist->next_pts != AV_NOPTS_VALUE if (pkt.dts != AV_NOPTS_VALUE && ist->next_pts != AV_NOPTS_VALUE
&& (is->iformat->flags & AVFMT_TS_DISCONT)) { && (is->iformat->flags & AVFMT_TS_DISCONT)) {
int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q); int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
int64_t delta = pkt_dts - ist->next_pts; int64_t delta = pkt_dts - ist->next_pts;
if ((FFABS(delta) > 1LL * dts_delta_threshold * AV_TIME_BASE || pkt_dts + 1 < ist->pts) && !copy_ts) { if ((FFABS(delta) > 1LL * dts_delta_threshold * AV_TIME_BASE || pkt_dts + 1 < ist->pts) && !copy_ts) {
input_files[ist->file_index].ts_offset -= delta; input_files[ist->file_index].ts_offset -= delta;
av_log(NULL, AV_LOG_DEBUG, "timestamp discontinuity %"PRId64", new offset= %"PRId64"\n", av_log(NULL, AV_LOG_DEBUG,
"timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
delta, input_files[ist->file_index].ts_offset); delta, input_files[ist->file_index].ts_offset);
pkt.dts-= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base); pkt.dts-= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
if (pkt.pts != AV_NOPTS_VALUE) if (pkt.pts != AV_NOPTS_VALUE)

View File

@ -235,6 +235,8 @@ div.center img {
#footer { #footer {
margin: -10px 1em 0; margin: -10px 1em 0;
padding-top: 20px; padding-top: 20px;
text-align: center;
font-size: small;
} }
address.footer { address.footer {
@ -962,7 +964,7 @@ dl.citelist dd {
} }
.tabs3 .tablist a { .tabs3 .tablist a {
padding: 0 10px; padding-left: 10px;
} }
@ -981,19 +983,6 @@ h1 a, h2 a, h3 a {
color: inherit; color: inherit;
} }
#banner, #top {
background-color: #BBC9D8;
border-bottom: 1px solid #7A96B3;
border-top: 1px solid #7A96B3;
position: relative;
text-align: center;
}
#banner img, #top img {
padding-bottom: 1px;
padding-top: 5px;
}
#body { #body {
margin: 0 1em; margin: 0 1em;
} }
@ -1041,12 +1030,12 @@ img {
border: 0; border: 0;
} }
#navrow1 { .tabs {
margin-top: 12px; margin-top: 12px;
border-top: 1px solid #5C665C; border-top: 1px solid #5C665C;
} }
#navrow1, #navrow2, #navrow3, #navrow4 { .tabs, .tabs2, .tabs3, .tabs4 {
background-color: #738073; background-color: #738073;
border-bottom: 1px solid #5C665C; border-bottom: 1px solid #5C665C;
border-left: 1px solid #5C665C; border-left: 1px solid #5C665C;
@ -1055,24 +1044,36 @@ img {
text-align: center; text-align: center;
} }
#navrow1 a, #navrow2 a, #navrow3 a, #navrow4 a { .tabs a,
.tabs2 a,
.tabs3 a,
.tabs4 a {
color: white; color: white;
padding: 0.3em; padding: 0.3em;
text-decoration: none; text-decoration: none;
} }
#navrow1 ul, #navrow2 ul, #navrow3 ul, #navrow4 ul { .tabs ul,
.tabs2 ul,
.tabs3 ul,
.tabs4 ul {
padding: 0; padding: 0;
} }
#navrow1 li.current a, #navrow2 li.current a, #navrow3 li.current a, #navrow4 li.current a { .tabs li.current a,
.tabs2 li.current a,
.tabs3 li.current a,
.tabs4 li.current a {
background-color: #414141; background-color: #414141;
color: white; color: white;
text-decoration: none; text-decoration: none;
} }
#navrow1 a:hover, #navrow2 a:hover, #navrow3 a:hover, #navrow4 a:hover { .tabs a:hover,
.tabs2 a:hover,
.tabs3 a:hover,
.tabs4 a:hover {
background-color: #313131 !important; background-color: #313131 !important;
color: white; color: white;
text-decoration: none; text-decoration: none;
@ -1094,37 +1095,3 @@ pre {
#proj_desc { #proj_desc {
font-size: 1.2em; font-size: 1.2em;
} }
#repos {
margin-left: 1em;
margin-right: 1em;
border-collapse: collapse;
border: solid 1px #6A996A;
}
#repos th {
background-color: #7BB37B;
border: solid 1px #6A996A;
}
#repos td {
padding: 0.2em;
border: solid 1px #6A996A;
}
#distro_status {
margin-left: 1em;
margin-right: 1em;
border-collapse: collapse;
border: solid 1px #6A996A;
}
#distro_status th {
background-color: #7BB37B;
border: solid 1px #6A996A;
}
#distro_status td {
padding: 0.2em;
border: solid 1px #6A996A;
}

View File

@ -930,7 +930,8 @@ static void choose_pixel_fmt(AVStream *st, AVCodec *codec)
if (st->codec->codec_id == CODEC_ID_MJPEG) { if (st->codec->codec_id == CODEC_ID_MJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE }; p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
} else if (st->codec->codec_id == CODEC_ID_LJPEG) { } else if (st->codec->codec_id == CODEC_ID_LJPEG) {
p= (const enum PixelFormat[]){PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE}; p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
} }
} }
for (; *p != PIX_FMT_NONE; p++) { for (; *p != PIX_FMT_NONE; p++) {
@ -1098,8 +1099,8 @@ need_realloc:
av_assert0(ost->audio_resample || dec->sample_fmt==enc->sample_fmt); av_assert0(ost->audio_resample || dec->sample_fmt==enc->sample_fmt);
if (audio_sync_method) { if (audio_sync_method) {
double delta = get_sync_ipts(ost) * enc->sample_rate - ost->sync_opts double delta = get_sync_ipts(ost) * enc->sample_rate - ost->sync_opts -
- av_fifo_size(ost->fifo)/(enc->channels * osize); av_fifo_size(ost->fifo) / (enc->channels * osize);
int idelta = delta * dec->sample_rate / enc->sample_rate; int idelta = delta * dec->sample_rate / enc->sample_rate;
int byte_delta = idelta * isize * dec->channels; int byte_delta = idelta * isize * dec->channels;
@ -1140,8 +1141,8 @@ need_realloc:
} }
} }
} else } else
ost->sync_opts= lrintf(get_sync_ipts(ost) * enc->sample_rate) ost->sync_opts = lrintf(get_sync_ipts(ost) * enc->sample_rate) -
- av_fifo_size(ost->fifo)/(enc->channels * osize); //FIXME wrong av_fifo_size(ost->fifo) / (enc->channels * osize); // FIXME wrong
if (ost->audio_resample) { if (ost->audio_resample) {
buftmp = audio_buf; buftmp = audio_buf;
@ -1656,7 +1657,8 @@ static void print_report(OutputFile *output_files,
error = enc->coded_frame->error[j]; error = enc->coded_frame->error[j];
scale = enc->width * enc->height * 255.0 * 255.0; scale = enc->width * enc->height * 255.0 * 255.0;
} }
if(j) scale/=4; if (j)
scale /= 4;
error_sum += error; error_sum += error;
scale_sum += scale; scale_sum += scale;
snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "%c:%2.2f ", type[j], psnr(error / scale)); snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), "%c:%2.2f ", type[j], psnr(error / scale));
@ -2892,7 +2894,8 @@ static int transcode(OutputFile *output_files, int nb_output_files,
if (!input_files[ist->file_index].eof_reached) { if (!input_files[ist->file_index].eof_reached) {
if (ipts < ipts_min) { if (ipts < ipts_min) {
ipts_min = ipts; ipts_min = ipts;
if(input_sync ) file_index = ist->file_index; if (input_sync)
file_index = ist->file_index;
} }
if (opts < opts_min) { if (opts < opts_min) {
opts_min = opts; opts_min = opts;
@ -2959,7 +2962,10 @@ static int transcode(OutputFile *output_files, int nb_output_files,
if (pkt.dts != AV_NOPTS_VALUE) if (pkt.dts != AV_NOPTS_VALUE)
pkt.dts *= ist->ts_scale; pkt.dts *= ist->ts_scale;
// fprintf(stderr, "next:%"PRId64" dts:%"PRId64" off:%"PRId64" %d\n", ist->next_pts, pkt.dts, input_files[ist->file_index].ts_offset, ist->st->codec->codec_type); //fprintf(stderr, "next:%"PRId64" dts:%"PRId64" off:%"PRId64" %d\n",
// ist->next_pts,
// pkt.dts, input_files[ist->file_index].ts_offset,
// ist->st->codec->codec_type);
if (pkt.dts != AV_NOPTS_VALUE && ist->next_pts != AV_NOPTS_VALUE if (pkt.dts != AV_NOPTS_VALUE && ist->next_pts != AV_NOPTS_VALUE
&& (is->iformat->flags & AVFMT_TS_DISCONT)) { && (is->iformat->flags & AVFMT_TS_DISCONT)) {
int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q); int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
@ -2969,7 +2975,8 @@ static int transcode(OutputFile *output_files, int nb_output_files,
ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE) || ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE) ||
pkt_dts+1<ist->pts)&& !copy_ts){ pkt_dts+1<ist->pts)&& !copy_ts){
input_files[ist->file_index].ts_offset -= delta; input_files[ist->file_index].ts_offset -= delta;
av_log(NULL, AV_LOG_DEBUG, "timestamp discontinuity %"PRId64", new offset= %"PRId64"\n", av_log(NULL, AV_LOG_DEBUG,
"timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
delta, input_files[ist->file_index].ts_offset); delta, input_files[ist->file_index].ts_offset);
pkt.dts-= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base); pkt.dts-= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
if (pkt.pts != AV_NOPTS_VALUE) if (pkt.pts != AV_NOPTS_VALUE)

View File

@ -160,7 +160,7 @@ static inline int scale_tile_size(int def_size, int size_factor)
*/ */
static int decode_pic_hdr(IVI4DecContext *ctx, AVCodecContext *avctx) static int decode_pic_hdr(IVI4DecContext *ctx, AVCodecContext *avctx)
{ {
int pic_size_indx, val, i, p; int pic_size_indx, i, p;
IVIPicConfig pic_conf; IVIPicConfig pic_conf;
if (get_bits(&ctx->gb, 18) != 0x3FFF8) { if (get_bits(&ctx->gb, 18) != 0x3FFF8) {
@ -301,7 +301,7 @@ static int decode_pic_hdr(IVI4DecContext *ctx, AVCodecContext *avctx)
/* skip picture header extension if any */ /* skip picture header extension if any */
while (get_bits1(&ctx->gb)) { while (get_bits1(&ctx->gb)) {
av_dlog(avctx, "Pic hdr extension encountered!\n"); av_dlog(avctx, "Pic hdr extension encountered!\n");
val = get_bits(&ctx->gb, 8); skip_bits(&ctx->gb, 8);
} }
if (get_bits1(&ctx->gb)) { if (get_bits1(&ctx->gb)) {
@ -325,7 +325,7 @@ static int decode_pic_hdr(IVI4DecContext *ctx, AVCodecContext *avctx)
static int decode_band_hdr(IVI4DecContext *ctx, IVIBandDesc *band, static int decode_band_hdr(IVI4DecContext *ctx, IVIBandDesc *band,
AVCodecContext *avctx) AVCodecContext *avctx)
{ {
int plane, band_num, hdr_size, indx, transform_id, scan_indx; int plane, band_num, indx, transform_id, scan_indx;
int i; int i;
plane = get_bits(&ctx->gb, 2); plane = get_bits(&ctx->gb, 2);
@ -337,7 +337,10 @@ static int decode_band_hdr(IVI4DecContext *ctx, IVIBandDesc *band,
band->is_empty = get_bits1(&ctx->gb); band->is_empty = get_bits1(&ctx->gb);
if (!band->is_empty) { if (!band->is_empty) {
hdr_size = get_bits1(&ctx->gb) ? get_bits(&ctx->gb, 16) : 4; /* skip header size
* If header size is not given, header size is 4 bytes. */
if (get_bits1(&ctx->gb))
skip_bits(&ctx->gb, 16);
band->is_halfpel = get_bits(&ctx->gb, 2); band->is_halfpel = get_bits(&ctx->gb, 2);
if (band->is_halfpel >= 2) { if (band->is_halfpel >= 2) {

View File

@ -117,27 +117,24 @@ static const int sSampleRates[] = {
}; };
static const int sBitRates[2][3][15] = { static const int sBitRates[2][3][15] = {
{ { 0, 32, 64, 96,128,160,192,224,256,288,320,352,384,416,448}, {
{ 0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448 },
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384 }, { 0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384 },
{ 0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320 } { 0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320 }
}, },
{ { 0, 32, 48, 56, 64, 80, 96,112,128,144,160,176,192,224,256}, {
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256 },
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 }, { 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 },
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 } { 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 }
}, },
}; };
static const int sSamplesPerFrame[2][3] = static const int sSamplesPerFrame[2][3] = {
{
{ 384, 1152, 1152 }, { 384, 1152, 1152 },
{ 384, 1152, 576 } { 384, 1152, 576 }
}; };
static const int sBitsPerSlot[3] = { static const int sBitsPerSlot[3] = { 32, 8, 8 };
32,
8,
8
};
static int mp3len(void *data, int *samplesPerFrame, int *sampleRate) static int mp3len(void *data, int *samplesPerFrame, int *sampleRate)
{ {
@ -152,25 +149,30 @@ static int mp3len(void *data, int *samplesPerFrame, int *sampleRate)
int mpeg_id = mode > 0; int mpeg_id = mode > 0;
int temp0, temp1, bitRate; int temp0, temp1, bitRate;
if ( (( header >> 21 ) & 0x7ff) != 0x7ff || mode == 3 || layerID==3 || sampleRateID==3) { if (((header >> 21) & 0x7ff) != 0x7ff || mode == 3 || layerID == 3 ||
sampleRateID == 3) {
return -1; return -1;
} }
if(!samplesPerFrame) samplesPerFrame= &temp0; if (!samplesPerFrame)
if(!sampleRate ) sampleRate = &temp1; samplesPerFrame = &temp0;
if (!sampleRate)
sampleRate = &temp1;
//*isMono = ((header >> 6) & 0x03) == 0x03; //*isMono = ((header >> 6) & 0x03) == 0x03;
*sampleRate = sSampleRates[sampleRateID] >> mode; *sampleRate = sSampleRates[sampleRateID] >> mode;
bitRate = sBitRates[mpeg_id][layerID][bitRateID] * 1000; bitRate = sBitRates[mpeg_id][layerID][bitRateID] * 1000;
*samplesPerFrame = sSamplesPerFrame[mpeg_id][layerID]; *samplesPerFrame = sSamplesPerFrame[mpeg_id][layerID];
//av_log(NULL, AV_LOG_DEBUG, "sr:%d br:%d spf:%d l:%d m:%d\n", *sampleRate, bitRate, *samplesPerFrame, layerID, mode); //av_log(NULL, AV_LOG_DEBUG,
// "sr:%d br:%d spf:%d l:%d m:%d\n",
// *sampleRate, bitRate, *samplesPerFrame, layerID, mode);
return *samplesPerFrame * bitRate / (bitsPerSlot * *sampleRate) + isPadded; return *samplesPerFrame * bitRate / (bitsPerSlot * *sampleRate) + isPadded;
} }
static int MP3lame_encode_frame(AVCodecContext *avctx, static int MP3lame_encode_frame(AVCodecContext *avctx, unsigned char *frame,
unsigned char *frame, int buf_size, void *data) int buf_size, void *data)
{ {
Mp3AudioContext *s = avctx->priv_data; Mp3AudioContext *s = avctx->priv_data;
int len; int len;
@ -240,7 +242,9 @@ static int MP3lame_encode_frame(AVCodecContext *avctx,
if (lame_result < 0) { if (lame_result < 0) {
if (lame_result == -1) { if (lame_result == -1) {
/* output buffer too small */ /* output buffer too small */
av_log(avctx, AV_LOG_ERROR, "lame: output buffer too small (buffer index: %d, free bytes: %d)\n", s->buffer_index, BUFFER_SIZE - s->buffer_index); av_log(avctx, AV_LOG_ERROR,
"lame: output buffer too small (buffer index: %d, free bytes: %d)\n",
s->buffer_index, BUFFER_SIZE - s->buffer_index);
} }
return -1; return -1;
} }
@ -251,7 +255,8 @@ static int MP3lame_encode_frame(AVCodecContext *avctx,
return 0; return 0;
len = mp3len(s->buffer, NULL, NULL); len = mp3len(s->buffer, NULL, NULL);
//av_log(avctx, AV_LOG_DEBUG, "in:%d packet-len:%d index:%d\n", avctx->frame_size, len, s->buffer_index); //av_log(avctx, AV_LOG_DEBUG, "in:%d packet-len:%d index:%d\n",
// avctx->frame_size, len, s->buffer_index);
if (len <= s->buffer_index) { if (len <= s->buffer_index) {
memcpy(frame, s->buffer, len); memcpy(frame, s->buffer, len);
s->buffer_index -= len; s->buffer_index -= len;

View File

@ -61,7 +61,8 @@ static const AVOption options[]={
}; };
static const AVClass class = { "libvorbis", av_default_item_name, options, LIBAVUTIL_VERSION_INT }; static const AVClass class = { "libvorbis", av_default_item_name, options, LIBAVUTIL_VERSION_INT };
static av_cold int oggvorbis_init_encoder(vorbis_info *vi, AVCodecContext *avccontext) { static av_cold int oggvorbis_init_encoder(vorbis_info *vi, AVCodecContext *avccontext)
{
OggVorbisContext *context = avccontext->priv_data; OggVorbisContext *context = avccontext->priv_data;
double cfreq; double cfreq;
@ -77,7 +78,8 @@ static av_cold int oggvorbis_init_encoder(vorbis_info *vi, AVCodecContext *avcco
/* constant bitrate */ /* constant bitrate */
if (vorbis_encode_setup_managed(vi, avccontext->channels, if (vorbis_encode_setup_managed(vi, avccontext->channels,
avccontext->sample_rate, minrate, avccontext->bit_rate, maxrate)) avccontext->sample_rate, minrate,
avccontext->bit_rate, maxrate))
return -1; return -1;
/* variable bitrate by estimate, disable slow rate management */ /* variable bitrate by estimate, disable slow rate management */
@ -130,9 +132,13 @@ static av_cold int oggvorbis_init_encoder(vorbis_info *vi, AVCodecContext *avcco
} }
/* How many bytes are needed for a buffer of length 'l' */ /* How many bytes are needed for a buffer of length 'l' */
static int xiph_len(int l) { return (1 + l / 255 + l); } static int xiph_len(int l)
{
return (1 + l / 255 + l);
}
static av_cold int oggvorbis_encode_init(AVCodecContext *avccontext) { static av_cold int oggvorbis_encode_init(AVCodecContext *avccontext)
{
OggVorbisContext *context = avccontext->priv_data; OggVorbisContext *context = avccontext->priv_data;
ogg_packet header, header_comm, header_code; ogg_packet header, header_comm, header_code;
uint8_t *p; uint8_t *p;
@ -169,9 +175,11 @@ static av_cold int oggvorbis_encode_init(AVCodecContext *avccontext) {
offset += header_code.bytes; offset += header_code.bytes;
assert(offset == avccontext->extradata_size); assert(offset == avccontext->extradata_size);
/* vorbis_block_clear(&context->vb); #if 0
vorbis_block_clear(&context->vb);
vorbis_dsp_clear(&context->vd); vorbis_dsp_clear(&context->vd);
vorbis_info_clear(&context->vi);*/ vorbis_info_clear(&context->vi);
#endif
vorbis_comment_clear(&context->vc); vorbis_comment_clear(&context->vc);
avccontext->frame_size = OGGVORBIS_FRAME_SIZE; avccontext->frame_size = OGGVORBIS_FRAME_SIZE;
@ -182,7 +190,6 @@ static av_cold int oggvorbis_encode_init(AVCodecContext *avccontext) {
return 0; return 0;
} }
static int oggvorbis_encode_frame(AVCodecContext *avccontext, static int oggvorbis_encode_frame(AVCodecContext *avccontext,
unsigned char *packets, unsigned char *packets,
int buf_size, void *data) int buf_size, void *data)
@ -255,8 +262,8 @@ static int oggvorbis_encode_frame(AVCodecContext *avccontext,
return l; return l;
} }
static av_cold int oggvorbis_encode_close(AVCodecContext *avccontext)
static av_cold int oggvorbis_encode_close(AVCodecContext *avccontext) { {
OggVorbisContext *context = avccontext->priv_data; OggVorbisContext *context = avccontext->priv_data;
/* ogg_packet op ; */ /* ogg_packet op ; */
@ -272,7 +279,6 @@ static av_cold int oggvorbis_encode_close(AVCodecContext *avccontext) {
return 0; return 0;
} }
AVCodec ff_libvorbis_encoder = { AVCodec ff_libvorbis_encoder = {
.name = "libvorbis", .name = "libvorbis",
.type = AVMEDIA_TYPE_AUDIO, .type = AVMEDIA_TYPE_AUDIO,

View File

@ -1147,7 +1147,8 @@ static int estimate_best_b_count(MpegEncContext *s)
for (i = 0; i < s->max_b_frames + 2; i++) { for (i = 0; i < s->max_b_frames + 2; i++) {
int ysize = c->width * c->height; int ysize = c->width * c->height;
int csize = (c->width / 2) * (c->height / 2); int csize = (c->width / 2) * (c->height / 2);
Picture pre_input, *pre_input_ptr= i ? s->input_picture[i-1] : s->next_picture_ptr; Picture pre_input, *pre_input_ptr = i ? s->input_picture[i - 1] :
s->next_picture_ptr;
avcodec_get_frame_defaults(&input[i]); avcodec_get_frame_defaults(&input[i]);
input[i].data[0] = av_malloc(ysize + 2 * csize); input[i].data[0] = av_malloc(ysize + 2 * csize);
@ -1166,9 +1167,15 @@ static int estimate_best_b_count(MpegEncContext *s)
pre_input.f.data[2] += INPLACE_OFFSET; pre_input.f.data[2] += INPLACE_OFFSET;
} }
s->dsp.shrink[scale](input[i].data[0], input[i].linesize[0], pre_input.f.data[0], pre_input.f.linesize[0], c->width, c->height); s->dsp.shrink[scale](input[i].data[0], input[i].linesize[0],
s->dsp.shrink[scale](input[i].data[1], input[i].linesize[1], pre_input.f.data[1], pre_input.f.linesize[1], c->width >> 1, c->height >> 1); pre_input.f.data[0], pre_input.f.linesize[0],
s->dsp.shrink[scale](input[i].data[2], input[i].linesize[2], pre_input.f.data[2], pre_input.f.linesize[2], c->width >> 1, c->height >> 1); c->width, c->height);
s->dsp.shrink[scale](input[i].data[1], input[i].linesize[1],
pre_input.f.data[1], pre_input.f.linesize[1],
c->width >> 1, c->height >> 1);
s->dsp.shrink[scale](input[i].data[2], input[i].linesize[2],
pre_input.f.data[2], pre_input.f.linesize[2],
c->width >> 1, c->height >> 1);
} }
} }
@ -1182,15 +1189,18 @@ static int estimate_best_b_count(MpegEncContext *s)
input[0].pict_type = AV_PICTURE_TYPE_I; input[0].pict_type = AV_PICTURE_TYPE_I;
input[0].quality = 1 * FF_QP2LAMBDA; input[0].quality = 1 * FF_QP2LAMBDA;
out_size = avcodec_encode_video(c, outbuf, outbuf_size, &input[0]); out_size = avcodec_encode_video(c, outbuf,
outbuf_size, &input[0]);
//rd += (out_size * lambda2) >> FF_LAMBDA_SHIFT; //rd += (out_size * lambda2) >> FF_LAMBDA_SHIFT;
for (i = 0; i < s->max_b_frames + 1; i++) { for (i = 0; i < s->max_b_frames + 1; i++) {
int is_p = i % (j + 1) == j || i == s->max_b_frames; int is_p = i % (j + 1) == j || i == s->max_b_frames;
input[i+1].pict_type= is_p ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B; input[i + 1].pict_type = is_p ?
AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B;
input[i + 1].quality = is_p ? p_lambda : b_lambda; input[i + 1].quality = is_p ? p_lambda : b_lambda;
out_size = avcodec_encode_video(c, outbuf, outbuf_size, &input[i+1]); out_size = avcodec_encode_video(c, outbuf, outbuf_size,
&input[i + 1]);
rd += (out_size * lambda2) >> (FF_LAMBDA_SHIFT - 3); rd += (out_size * lambda2) >> (FF_LAMBDA_SHIFT - 3);
} }
@ -1219,7 +1229,8 @@ static int estimate_best_b_count(MpegEncContext *s)
return best_b_count; return best_b_count;
} }
static int select_input_picture(MpegEncContext *s){ static int select_input_picture(MpegEncContext *s)
{
int i; int i;
for (i = 1; i < MAX_PICTURE_COUNT; i++) for (i = 1; i < MAX_PICTURE_COUNT; i++)
@ -1228,27 +1239,33 @@ static int select_input_picture(MpegEncContext *s){
/* set next picture type & ordering */ /* set next picture type & ordering */
if (s->reordered_input_picture[0] == NULL && s->input_picture[0]) { if (s->reordered_input_picture[0] == NULL && s->input_picture[0]) {
if(/*s->picture_in_gop_number >= s->gop_size ||*/ s->next_picture_ptr==NULL || s->intra_only){ if (/*s->picture_in_gop_number >= s->gop_size ||*/
s->next_picture_ptr == NULL || s->intra_only) {
s->reordered_input_picture[0] = s->input_picture[0]; s->reordered_input_picture[0] = s->input_picture[0];
s->reordered_input_picture[0]->f.pict_type = AV_PICTURE_TYPE_I; s->reordered_input_picture[0]->f.pict_type = AV_PICTURE_TYPE_I;
s->reordered_input_picture[0]->f.coded_picture_number = s->coded_picture_number++; s->reordered_input_picture[0]->f.coded_picture_number =
s->coded_picture_number++;
} else { } else {
int b_frames; int b_frames;
if (s->avctx->frame_skip_threshold || s->avctx->frame_skip_factor) { if (s->avctx->frame_skip_threshold || s->avctx->frame_skip_factor) {
if(s->picture_in_gop_number < s->gop_size && skip_check(s, s->input_picture[0], s->next_picture_ptr)){ if (s->picture_in_gop_number < s->gop_size &&
skip_check(s, s->input_picture[0], s->next_picture_ptr)) {
// FIXME check that te gop check above is +-1 correct // FIXME check that te gop check above is +-1 correct
//av_log(NULL, AV_LOG_DEBUG, "skip %p %"PRId64"\n", s->input_picture[0]->f.data[0], s->input_picture[0]->pts); //av_log(NULL, AV_LOG_DEBUG, "skip %p %"PRId64"\n",
// s->input_picture[0]->f.data[0],
// s->input_picture[0]->pts);
if (s->input_picture[0]->f.type == FF_BUFFER_TYPE_SHARED) { if (s->input_picture[0]->f.type == FF_BUFFER_TYPE_SHARED) {
for (i = 0; i < 4; i++) for (i = 0; i < 4; i++)
s->input_picture[0]->f.data[i] = NULL; s->input_picture[0]->f.data[i] = NULL;
s->input_picture[0]->f.type = 0; s->input_picture[0]->f.type = 0;
} else { } else {
assert( s->input_picture[0]->f.type == FF_BUFFER_TYPE_USER assert(s->input_picture[0]->f.type == FF_BUFFER_TYPE_USER ||
|| s->input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL); s->input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL);
s->avctx->release_buffer(s->avctx, (AVFrame*)s->input_picture[0]); s->avctx->release_buffer(s->avctx,
(AVFrame *) s->input_picture[0]);
} }
emms_c(); emms_c();
@ -1276,17 +1293,24 @@ static int select_input_picture(MpegEncContext *s){
if (s->avctx->b_frame_strategy == 0) { if (s->avctx->b_frame_strategy == 0) {
b_frames = s->max_b_frames; b_frames = s->max_b_frames;
while(b_frames && !s->input_picture[b_frames]) b_frames--; while (b_frames && !s->input_picture[b_frames])
b_frames--;
} else if (s->avctx->b_frame_strategy == 1) { } else if (s->avctx->b_frame_strategy == 1) {
for (i = 1; i < s->max_b_frames + 1; i++) { for (i = 1; i < s->max_b_frames + 1; i++) {
if(s->input_picture[i] && s->input_picture[i]->b_frame_score==0){ if (s->input_picture[i] &&
s->input_picture[i]->b_frame_score == 0) {
s->input_picture[i]->b_frame_score = s->input_picture[i]->b_frame_score =
get_intra_count(s, s->input_picture[i ]->f.data[0], get_intra_count(s,
s->input_picture[i-1]->f.data[0], s->linesize) + 1; s->input_picture[i ]->f.data[0],
s->input_picture[i - 1]->f.data[0],
s->linesize) + 1;
} }
} }
for (i = 0; i < s->max_b_frames + 1; i++) { for (i = 0; i < s->max_b_frames + 1; i++) {
if(s->input_picture[i]==NULL || s->input_picture[i]->b_frame_score - 1 > s->mb_num/s->avctx->b_sensitivity) break; if (s->input_picture[i] == NULL ||
s->input_picture[i]->b_frame_score - 1 >
s->mb_num / s->avctx->b_sensitivity)
break;
} }
b_frames = FFMAX(0, i - 1); b_frames = FFMAX(0, i - 1);
@ -1312,12 +1336,15 @@ static int select_input_picture(MpegEncContext *s){
if (type && type != AV_PICTURE_TYPE_B) if (type && type != AV_PICTURE_TYPE_B)
b_frames = i; b_frames = i;
} }
if (s->input_picture[b_frames]->f.pict_type == AV_PICTURE_TYPE_B && b_frames == s->max_b_frames){ if (s->input_picture[b_frames]->f.pict_type == AV_PICTURE_TYPE_B &&
av_log(s->avctx, AV_LOG_ERROR, "warning, too many b frames in a row\n"); b_frames == s->max_b_frames) {
av_log(s->avctx, AV_LOG_ERROR,
"warning, too many b frames in a row\n");
} }
if (s->picture_in_gop_number + b_frames >= s->gop_size) { if (s->picture_in_gop_number + b_frames >= s->gop_size) {
if((s->flags2 & CODEC_FLAG2_STRICT_GOP) && s->gop_size > s->picture_in_gop_number){ if ((s->flags2 & CODEC_FLAG2_STRICT_GOP) &&
s->gop_size > s->picture_in_gop_number) {
b_frames = s->gop_size - s->picture_in_gop_number - 1; b_frames = s->gop_size - s->picture_in_gop_number - 1;
} else { } else {
if (s->flags & CODEC_FLAG_CLOSED_GOP) if (s->flags & CODEC_FLAG_CLOSED_GOP)
@ -1326,30 +1353,36 @@ static int select_input_picture(MpegEncContext *s){
} }
} }
if( (s->flags & CODEC_FLAG_CLOSED_GOP) if ((s->flags & CODEC_FLAG_CLOSED_GOP) && b_frames &&
&& b_frames s->input_picture[b_frames]->f.pict_type == AV_PICTURE_TYPE_I)
&& s->input_picture[b_frames]->f.pict_type== AV_PICTURE_TYPE_I)
b_frames--; b_frames--;
s->reordered_input_picture[0] = s->input_picture[b_frames]; s->reordered_input_picture[0] = s->input_picture[b_frames];
if (s->reordered_input_picture[0]->f.pict_type != AV_PICTURE_TYPE_I) if (s->reordered_input_picture[0]->f.pict_type != AV_PICTURE_TYPE_I)
s->reordered_input_picture[0]->f.pict_type = AV_PICTURE_TYPE_P; s->reordered_input_picture[0]->f.pict_type = AV_PICTURE_TYPE_P;
s->reordered_input_picture[0]->f.coded_picture_number = s->coded_picture_number++; s->reordered_input_picture[0]->f.coded_picture_number =
s->coded_picture_number++;
for (i = 0; i < b_frames; i++) { for (i = 0; i < b_frames; i++) {
s->reordered_input_picture[i + 1] = s->input_picture[i]; s->reordered_input_picture[i + 1] = s->input_picture[i];
s->reordered_input_picture[i + 1]->f.pict_type = AV_PICTURE_TYPE_B; s->reordered_input_picture[i + 1]->f.pict_type =
s->reordered_input_picture[i + 1]->f.coded_picture_number = s->coded_picture_number++; AV_PICTURE_TYPE_B;
s->reordered_input_picture[i + 1]->f.coded_picture_number =
s->coded_picture_number++;
} }
} }
} }
no_output_pic: no_output_pic:
if (s->reordered_input_picture[0]) { if (s->reordered_input_picture[0]) {
s->reordered_input_picture[0]->f.reference = s->reordered_input_picture[0]->f.pict_type!=AV_PICTURE_TYPE_B ? 3 : 0; s->reordered_input_picture[0]->f.reference =
s->reordered_input_picture[0]->f.pict_type !=
AV_PICTURE_TYPE_B ? 3 : 0;
ff_copy_picture(&s->new_picture, s->reordered_input_picture[0]); ff_copy_picture(&s->new_picture, s->reordered_input_picture[0]);
if (s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_SHARED || s->avctx->rc_buffer_size) { if (s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_SHARED ||
// input is a shared pix, so we can't modifiy it -> alloc a new one & ensure that the shared one is reuseable s->avctx->rc_buffer_size) {
// input is a shared pix, so we can't modifiy it -> alloc a new
// one & ensure that the shared one is reuseable
Picture *pic; Picture *pic;
int i = ff_find_unused_picture(s, 0); int i = ff_find_unused_picture(s, 0);
@ -1364,19 +1397,23 @@ no_output_pic:
/* mark us unused / free shared pic */ /* mark us unused / free shared pic */
if (s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL) if (s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL)
s->avctx->release_buffer(s->avctx, (AVFrame*)s->reordered_input_picture[0]); s->avctx->release_buffer(s->avctx,
(AVFrame *) s->reordered_input_picture[0]);
for (i = 0; i < 4; i++) for (i = 0; i < 4; i++)
s->reordered_input_picture[0]->f.data[i] = NULL; s->reordered_input_picture[0]->f.data[i] = NULL;
s->reordered_input_picture[0]->f.type = 0; s->reordered_input_picture[0]->f.type = 0;
copy_picture_attributes(s, (AVFrame*)pic, (AVFrame*)s->reordered_input_picture[0]); copy_picture_attributes(s, (AVFrame *) pic,
(AVFrame *) s->reordered_input_picture[0]);
s->current_picture_ptr = pic; s->current_picture_ptr = pic;
} else { } else {
// input is not a shared pix -> reuse buffer for current_pix // input is not a shared pix -> reuse buffer for current_pix
assert( s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_USER assert(s->reordered_input_picture[0]->f.type ==
|| s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL); FF_BUFFER_TYPE_USER ||
s->reordered_input_picture[0]->f.type ==
FF_BUFFER_TYPE_INTERNAL);
s->current_picture_ptr = s->reordered_input_picture[0]; s->current_picture_ptr = s->reordered_input_picture[0];
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
@ -1423,7 +1460,8 @@ int MPV_encode_picture(AVCodecContext *avctx,
if (s->new_picture.f.data[0]) { if (s->new_picture.f.data[0]) {
s->pict_type = s->new_picture.f.pict_type; s->pict_type = s->new_picture.f.pict_type;
//emms_c(); //emms_c();
//printf("qs:%f %f %d\n", s->new_picture.quality, s->current_picture.quality, s->qscale); //printf("qs:%f %f %d\n", s->new_picture.quality,
// s->current_picture.quality, s->qscale);
MPV_frame_start(s, avctx); MPV_frame_start(s, avctx);
vbv_retry: vbv_retry:
if (encode_picture(s, s->picture_number) < 0) if (encode_picture(s, s->picture_number) < 0)
@ -1435,7 +1473,8 @@ vbv_retry:
avctx->i_tex_bits = s->i_tex_bits; avctx->i_tex_bits = s->i_tex_bits;
avctx->p_tex_bits = s->p_tex_bits; avctx->p_tex_bits = s->p_tex_bits;
avctx->i_count = s->i_count; avctx->i_count = s->i_count;
avctx->p_count = s->mb_num - s->i_count - s->skip_count; //FIXME f/b_count in avctx // FIXME f/b_count in avctx
avctx->p_count = s->mb_num - s->i_count - s->skip_count;
avctx->skip_count = s->skip_count; avctx->skip_count = s->skip_count;
MPV_frame_end(s); MPV_frame_end(s);
@ -1447,16 +1486,24 @@ vbv_retry:
RateControlContext *rcc = &s->rc_context; RateControlContext *rcc = &s->rc_context;
int max_size = rcc->buffer_index * avctx->rc_max_available_vbv_use; int max_size = rcc->buffer_index * avctx->rc_max_available_vbv_use;
if(put_bits_count(&s->pb) > max_size && s->lambda < s->avctx->lmax){ if (put_bits_count(&s->pb) > max_size &&
s->next_lambda= FFMAX(s->lambda+1, s->lambda*(s->qscale+1) / s->qscale); s->lambda < s->avctx->lmax) {
s->next_lambda = FFMAX(s->lambda + 1, s->lambda *
(s->qscale + 1) / s->qscale);
if (s->adaptive_quant) { if (s->adaptive_quant) {
int i; int i;
for (i = 0; i < s->mb_height * s->mb_stride; i++) for (i = 0; i < s->mb_height * s->mb_stride; i++)
s->lambda_table[i]= FFMAX(s->lambda_table[i]+1, s->lambda_table[i]*(s->qscale+1) / s->qscale); s->lambda_table[i] =
FFMAX(s->lambda_table[i] + 1,
s->lambda_table[i] * (s->qscale + 1) /
s->qscale);
} }
s->mb_skipped = 0; // done in MPV_frame_start() s->mb_skipped = 0; // done in MPV_frame_start()
if(s->pict_type==AV_PICTURE_TYPE_P){ //done in encode_picture() so we must undo it // done in encode_picture() so we must undo it
if(s->flipflop_rounding || s->codec_id == CODEC_ID_H263P || s->codec_id == CODEC_ID_MPEG4) if (s->pict_type == AV_PICTURE_TYPE_P) {
if (s->flipflop_rounding ||
s->codec_id == CODEC_ID_H263P ||
s->codec_id == CODEC_ID_MPEG4)
s->no_rounding ^= 1; s->no_rounding ^= 1;
} }
if (s->pict_type != AV_PICTURE_TYPE_B) { if (s->pict_type != AV_PICTURE_TYPE_B) {
@ -1483,13 +1530,16 @@ vbv_retry:
} }
if (s->flags & CODEC_FLAG_PASS1) if (s->flags & CODEC_FLAG_PASS1)
assert(avctx->header_bits + avctx->mv_bits + avctx->misc_bits + avctx->i_tex_bits + avctx->p_tex_bits == put_bits_count(&s->pb)); assert(avctx->header_bits + avctx->mv_bits + avctx->misc_bits +
avctx->i_tex_bits + avctx->p_tex_bits ==
put_bits_count(&s->pb));
flush_put_bits(&s->pb); flush_put_bits(&s->pb);
s->frame_bits = put_bits_count(&s->pb); s->frame_bits = put_bits_count(&s->pb);
stuffing_count = ff_vbv_update(s, s->frame_bits); stuffing_count = ff_vbv_update(s, s->frame_bits);
if (stuffing_count) { if (stuffing_count) {
if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < stuffing_count + 50){ if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) <
stuffing_count + 50) {
av_log(s->avctx, AV_LOG_ERROR, "stuffing too large\n"); av_log(s->avctx, AV_LOG_ERROR, "stuffing too large\n");
return -1; return -1;
} }
@ -1517,20 +1567,27 @@ vbv_retry:
} }
/* update mpeg1/2 vbv_delay for CBR */ /* update mpeg1/2 vbv_delay for CBR */
if(s->avctx->rc_max_rate && s->avctx->rc_min_rate == s->avctx->rc_max_rate && s->out_format == FMT_MPEG1 if (s->avctx->rc_max_rate &&
&& 90000LL * (avctx->rc_buffer_size-1) <= s->avctx->rc_max_rate*0xFFFFLL){ s->avctx->rc_min_rate == s->avctx->rc_max_rate &&
s->out_format == FMT_MPEG1 &&
90000LL * (avctx->rc_buffer_size - 1) <=
s->avctx->rc_max_rate * 0xFFFFLL) {
int vbv_delay, min_delay; int vbv_delay, min_delay;
double inbits = s->avctx->rc_max_rate*av_q2d(s->avctx->time_base); double inbits = s->avctx->rc_max_rate *
int minbits= s->frame_bits - 8*(s->vbv_delay_ptr - s->pb.buf - 1); av_q2d(s->avctx->time_base);
int minbits = s->frame_bits - 8 *
(s->vbv_delay_ptr - s->pb.buf - 1);
double bits = s->rc_context.buffer_index + minbits - inbits; double bits = s->rc_context.buffer_index + minbits - inbits;
if (bits < 0) if (bits < 0)
av_log(s->avctx, AV_LOG_ERROR, "Internal error, negative bits\n"); av_log(s->avctx, AV_LOG_ERROR,
"Internal error, negative bits\n");
assert(s->repeat_first_field == 0); assert(s->repeat_first_field == 0);
vbv_delay = bits * 90000 / s->avctx->rc_max_rate; vbv_delay = bits * 90000 / s->avctx->rc_max_rate;
min_delay= (minbits * 90000LL + s->avctx->rc_max_rate - 1)/ s->avctx->rc_max_rate; min_delay = (minbits * 90000LL + s->avctx->rc_max_rate - 1) /
s->avctx->rc_max_rate;
vbv_delay = FFMAX(vbv_delay, min_delay); vbv_delay = FFMAX(vbv_delay, min_delay);
@ -1554,17 +1611,19 @@ vbv_retry:
return s->frame_bits / 8; return s->frame_bits / 8;
} }
static inline void dct_single_coeff_elimination(MpegEncContext *s, int n, int threshold) static inline void dct_single_coeff_elimination(MpegEncContext *s,
int n, int threshold)
{ {
static const char tab[64]= static const char tab[64] = {
{3,2,2,1,1,1,1,1, 3, 2, 2, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0,0,0,0,0,0,0,0}; 0, 0, 0, 0, 0, 0, 0, 0
};
int score = 0; int score = 0;
int run = 0; int run = 0;
int i; int i;
@ -1579,13 +1638,15 @@ static inline void dct_single_coeff_elimination(MpegEncContext *s, int n, int th
skip_dc = 1; skip_dc = 1;
/* Are all we could set to zero already zero? */ /* Are all we could set to zero already zero? */
if(last_index<=skip_dc - 1) return; if (last_index <= skip_dc - 1)
return;
for (i = 0; i <= last_index; i++) { for (i = 0; i <= last_index; i++) {
const int j = s->intra_scantable.permutated[i]; const int j = s->intra_scantable.permutated[i];
const int level = FFABS(block[j]); const int level = FFABS(block[j]);
if (level == 1) { if (level == 1) {
if(skip_dc && i==0) continue; if (skip_dc && i == 0)
continue;
score += tab[run]; score += tab[run];
run = 0; run = 0;
} else if (level > 1) { } else if (level > 1) {
@ -1594,16 +1655,20 @@ static inline void dct_single_coeff_elimination(MpegEncContext *s, int n, int th
run++; run++;
} }
} }
if(score >= threshold) return; if (score >= threshold)
return;
for (i = skip_dc; i <= last_index; i++) { for (i = skip_dc; i <= last_index; i++) {
const int j = s->intra_scantable.permutated[i]; const int j = s->intra_scantable.permutated[i];
block[j] = 0; block[j] = 0;
} }
if(block[0]) s->block_last_index[n]= 0; if (block[0])
else s->block_last_index[n]= -1; s->block_last_index[n] = 0;
else
s->block_last_index[n] = -1;
} }
static inline void clip_coeffs(MpegEncContext *s, DCTELEM *block, int last_index) static inline void clip_coeffs(MpegEncContext *s, DCTELEM *block,
int last_index)
{ {
int i; int i;
const int maxlevel = s->max_qcoeff; const int maxlevel = s->max_qcoeff;
@ -1631,10 +1696,13 @@ static inline void clip_coeffs(MpegEncContext *s, DCTELEM *block, int last_index
} }
if (overflow && s->avctx->mb_decision == FF_MB_DECISION_SIMPLE) if (overflow && s->avctx->mb_decision == FF_MB_DECISION_SIMPLE)
av_log(s->avctx, AV_LOG_INFO, "warning, clipping %d dct coefficients to %d..%d\n", overflow, minlevel, maxlevel); av_log(s->avctx, AV_LOG_INFO,
"warning, clipping %d dct coefficients to %d..%d\n",
overflow, minlevel, maxlevel);
} }
static void get_visual_weight(int16_t *weight, uint8_t *ptr, int stride){ static void get_visual_weight(int16_t *weight, uint8_t *ptr, int stride)
{
int x, y; int x, y;
// FIXME optimize // FIXME optimize
for (y = 0; y < 8; y++) { for (y = 0; y < 8; y++) {
@ -1657,7 +1725,10 @@ static void get_visual_weight(int16_t *weight, uint8_t *ptr, int stride){
} }
} }
static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x, int motion_y, int mb_block_height, int mb_block_count) static av_always_inline void encode_mb_internal(MpegEncContext *s,
int motion_x, int motion_y,
int mb_block_height,
int mb_block_count)
{ {
int16_t weight[8][64]; int16_t weight[8][64];
DCTELEM orig[8][64]; DCTELEM orig[8][64];
@ -1669,7 +1740,8 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
uint8_t *ptr_y, *ptr_cb, *ptr_cr; uint8_t *ptr_y, *ptr_cb, *ptr_cr;
int wrap_y, wrap_c; int wrap_y, wrap_c;
for(i=0; i<mb_block_count; i++) skip_dct[i]=s->skipdct; for (i = 0; i < mb_block_count; i++)
skip_dct[i] = s->skipdct;
if (s->adaptive_quant) { if (s->adaptive_quant) {
const int last_qp = s->qscale; const int last_qp = s->qscale;
@ -1703,17 +1775,25 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
wrap_y = s->linesize; wrap_y = s->linesize;
wrap_c = s->uvlinesize; wrap_c = s->uvlinesize;
ptr_y = s->new_picture.f.data[0] + (mb_y * 16 * wrap_y) + mb_x * 16; ptr_y = s->new_picture.f.data[0] +
ptr_cb = s->new_picture.f.data[1] + (mb_y * mb_block_height * wrap_c) + mb_x * 8; (mb_y * 16 * wrap_y) + mb_x * 16;
ptr_cr = s->new_picture.f.data[2] + (mb_y * mb_block_height * wrap_c) + mb_x * 8; ptr_cb = s->new_picture.f.data[1] +
(mb_y * mb_block_height * wrap_c) + mb_x * 8;
ptr_cr = s->new_picture.f.data[2] +
(mb_y * mb_block_height * wrap_c) + mb_x * 8;
if((mb_x*16+16 > s->width || mb_y*16+16 > s->height) && s->codec_id != CODEC_ID_AMV){ if((mb_x*16+16 > s->width || mb_y*16+16 > s->height) && s->codec_id != CODEC_ID_AMV){
uint8_t *ebuf = s->edge_emu_buffer + 32; uint8_t *ebuf = s->edge_emu_buffer + 32;
s->dsp.emulated_edge_mc(ebuf , ptr_y , wrap_y,16,16,mb_x*16,mb_y*16, s->width , s->height); s->dsp.emulated_edge_mc(ebuf, ptr_y, wrap_y, 16, 16, mb_x * 16,
mb_y * 16, s->width, s->height);
ptr_y = ebuf; ptr_y = ebuf;
s->dsp.emulated_edge_mc(ebuf+18*wrap_y , ptr_cb, wrap_c, 8, mb_block_height, mb_x*8, mb_y*8, s->width>>1, s->height>>1); s->dsp.emulated_edge_mc(ebuf + 18 * wrap_y, ptr_cb, wrap_c, 8,
mb_block_height, mb_x * 8, mb_y * 8,
s->width >> 1, s->height >> 1);
ptr_cb = ebuf + 18 * wrap_y; ptr_cb = ebuf + 18 * wrap_y;
s->dsp.emulated_edge_mc(ebuf+18*wrap_y+8, ptr_cr, wrap_c, 8, mb_block_height, mb_x*8, mb_y*8, s->width>>1, s->height>>1); s->dsp.emulated_edge_mc(ebuf + 18 * wrap_y + 8, ptr_cr, wrap_c, 8,
mb_block_height, mb_x * 8, mb_y * 8,
s->width >> 1, s->height >> 1);
ptr_cr = ebuf + 18 * wrap_y + 8; ptr_cr = ebuf + 18 * wrap_y + 8;
} }
@ -1722,12 +1802,16 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
int progressive_score, interlaced_score; int progressive_score, interlaced_score;
s->interlaced_dct = 0; s->interlaced_dct = 0;
progressive_score= s->dsp.ildct_cmp[4](s, ptr_y , NULL, wrap_y, 8) progressive_score = s->dsp.ildct_cmp[4](s, ptr_y,
+s->dsp.ildct_cmp[4](s, ptr_y + wrap_y*8, NULL, wrap_y, 8) - 400; NULL, wrap_y, 8) +
s->dsp.ildct_cmp[4](s, ptr_y + wrap_y * 8,
NULL, wrap_y, 8) - 400;
if (progressive_score > 0) { if (progressive_score > 0) {
interlaced_score = s->dsp.ildct_cmp[4](s, ptr_y , NULL, wrap_y*2, 8) interlaced_score = s->dsp.ildct_cmp[4](s, ptr_y,
+s->dsp.ildct_cmp[4](s, ptr_y + wrap_y , NULL, wrap_y*2, 8); NULL, wrap_y * 2, 8) +
s->dsp.ildct_cmp[4](s, ptr_y + wrap_y,
NULL, wrap_y * 2, 8);
if (progressive_score > interlaced_score) { if (progressive_score > interlaced_score) {
s->interlaced_dct = 1; s->interlaced_dct = 1;
@ -1751,8 +1835,10 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
s->dsp.get_pixels(s->block[4], ptr_cb, wrap_c); s->dsp.get_pixels(s->block[4], ptr_cb, wrap_c);
s->dsp.get_pixels(s->block[5], ptr_cr, wrap_c); s->dsp.get_pixels(s->block[5], ptr_cr, wrap_c);
if (!s->chroma_y_shift) { /* 422 */ if (!s->chroma_y_shift) { /* 422 */
s->dsp.get_pixels(s->block[6], ptr_cb + (dct_offset>>1), wrap_c); s->dsp.get_pixels(s->block[6],
s->dsp.get_pixels(s->block[7], ptr_cr + (dct_offset>>1), wrap_c); ptr_cb + (dct_offset >> 1), wrap_c);
s->dsp.get_pixels(s->block[7],
ptr_cr + (dct_offset >> 1), wrap_c);
} }
} }
} else { } else {
@ -1773,26 +1859,37 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
} }
if (s->mv_dir & MV_DIR_FORWARD) { if (s->mv_dir & MV_DIR_FORWARD) {
MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix); MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data,
op_pix, op_qpix);
op_pix = s->dsp.avg_pixels_tab; op_pix = s->dsp.avg_pixels_tab;
op_qpix = s->dsp.avg_qpel_pixels_tab; op_qpix = s->dsp.avg_qpel_pixels_tab;
} }
if (s->mv_dir & MV_DIR_BACKWARD) { if (s->mv_dir & MV_DIR_BACKWARD) {
MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix); MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data,
op_pix, op_qpix);
} }
if (s->flags & CODEC_FLAG_INTERLACED_DCT) { if (s->flags & CODEC_FLAG_INTERLACED_DCT) {
int progressive_score, interlaced_score; int progressive_score, interlaced_score;
s->interlaced_dct = 0; s->interlaced_dct = 0;
progressive_score= s->dsp.ildct_cmp[0](s, dest_y , ptr_y , wrap_y, 8) progressive_score = s->dsp.ildct_cmp[0](s, dest_y,
+s->dsp.ildct_cmp[0](s, dest_y + wrap_y*8, ptr_y + wrap_y*8, wrap_y, 8) - 400; ptr_y, wrap_y,
8) +
s->dsp.ildct_cmp[0](s, dest_y + wrap_y * 8,
ptr_y + wrap_y * 8, wrap_y,
8) - 400;
if(s->avctx->ildct_cmp == FF_CMP_VSSE) progressive_score -= 400; if (s->avctx->ildct_cmp == FF_CMP_VSSE)
progressive_score -= 400;
if (progressive_score > 0) { if (progressive_score > 0) {
interlaced_score = s->dsp.ildct_cmp[0](s, dest_y , ptr_y , wrap_y*2, 8) interlaced_score = s->dsp.ildct_cmp[0](s, dest_y,
+s->dsp.ildct_cmp[0](s, dest_y + wrap_y , ptr_y + wrap_y , wrap_y*2, 8); ptr_y,
wrap_y * 2, 8) +
s->dsp.ildct_cmp[0](s, dest_y + wrap_y,
ptr_y + wrap_y,
wrap_y * 2, 8);
if (progressive_score > interlaced_score) { if (progressive_score > interlaced_score) {
s->interlaced_dct = 1; s->interlaced_dct = 1;
@ -1807,8 +1904,10 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
s->dsp.diff_pixels(s->block[0], ptr_y, dest_y, wrap_y); s->dsp.diff_pixels(s->block[0], ptr_y, dest_y, wrap_y);
s->dsp.diff_pixels(s->block[1], ptr_y + 8, dest_y + 8, wrap_y); s->dsp.diff_pixels(s->block[1], ptr_y + 8, dest_y + 8, wrap_y);
s->dsp.diff_pixels(s->block[2], ptr_y + dct_offset , dest_y + dct_offset , wrap_y); s->dsp.diff_pixels(s->block[2], ptr_y + dct_offset,
s->dsp.diff_pixels(s->block[3], ptr_y + dct_offset + 8, dest_y + dct_offset + 8, wrap_y); dest_y + dct_offset, wrap_y);
s->dsp.diff_pixels(s->block[3], ptr_y + dct_offset + 8,
dest_y + dct_offset + 8, wrap_y);
if (s->flags & CODEC_FLAG_GRAY) { if (s->flags & CODEC_FLAG_GRAY) {
skip_dct[4] = 1; skip_dct[4] = 1;
@ -1817,36 +1916,68 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
s->dsp.diff_pixels(s->block[4], ptr_cb, dest_cb, wrap_c); s->dsp.diff_pixels(s->block[4], ptr_cb, dest_cb, wrap_c);
s->dsp.diff_pixels(s->block[5], ptr_cr, dest_cr, wrap_c); s->dsp.diff_pixels(s->block[5], ptr_cr, dest_cr, wrap_c);
if (!s->chroma_y_shift) { /* 422 */ if (!s->chroma_y_shift) { /* 422 */
s->dsp.diff_pixels(s->block[6], ptr_cb + (dct_offset>>1), dest_cb + (dct_offset>>1), wrap_c); s->dsp.diff_pixels(s->block[6], ptr_cb + (dct_offset >> 1),
s->dsp.diff_pixels(s->block[7], ptr_cr + (dct_offset>>1), dest_cr + (dct_offset>>1), wrap_c); dest_cb + (dct_offset >> 1), wrap_c);
s->dsp.diff_pixels(s->block[7], ptr_cr + (dct_offset >> 1),
dest_cr + (dct_offset >> 1), wrap_c);
} }
} }
/* pre quantization */ /* pre quantization */
if(s->current_picture.mc_mb_var[s->mb_stride*mb_y+ mb_x]<2*s->qscale*s->qscale){ if (s->current_picture.mc_mb_var[s->mb_stride * mb_y + mb_x] <
2 * s->qscale * s->qscale) {
// FIXME optimize // FIXME optimize
if(s->dsp.sad[1](NULL, ptr_y , dest_y , wrap_y, 8) < 20*s->qscale) skip_dct[0]= 1; if (s->dsp.sad[1](NULL, ptr_y , dest_y,
if(s->dsp.sad[1](NULL, ptr_y + 8, dest_y + 8, wrap_y, 8) < 20*s->qscale) skip_dct[1]= 1; wrap_y, 8) < 20 * s->qscale)
if(s->dsp.sad[1](NULL, ptr_y +dct_offset , dest_y +dct_offset , wrap_y, 8) < 20*s->qscale) skip_dct[2]= 1; skip_dct[0] = 1;
if(s->dsp.sad[1](NULL, ptr_y +dct_offset+ 8, dest_y +dct_offset+ 8, wrap_y, 8) < 20*s->qscale) skip_dct[3]= 1; if (s->dsp.sad[1](NULL, ptr_y + 8,
if(s->dsp.sad[1](NULL, ptr_cb , dest_cb , wrap_c, 8) < 20*s->qscale) skip_dct[4]= 1; dest_y + 8, wrap_y, 8) < 20 * s->qscale)
if(s->dsp.sad[1](NULL, ptr_cr , dest_cr , wrap_c, 8) < 20*s->qscale) skip_dct[5]= 1; skip_dct[1] = 1;
if (s->dsp.sad[1](NULL, ptr_y + dct_offset,
dest_y + dct_offset, wrap_y, 8) < 20 * s->qscale)
skip_dct[2] = 1;
if (s->dsp.sad[1](NULL, ptr_y + dct_offset + 8,
dest_y + dct_offset + 8,
wrap_y, 8) < 20 * s->qscale)
skip_dct[3] = 1;
if (s->dsp.sad[1](NULL, ptr_cb, dest_cb,
wrap_c, 8) < 20 * s->qscale)
skip_dct[4] = 1;
if (s->dsp.sad[1](NULL, ptr_cr, dest_cr,
wrap_c, 8) < 20 * s->qscale)
skip_dct[5] = 1;
if (!s->chroma_y_shift) { /* 422 */ if (!s->chroma_y_shift) { /* 422 */
if(s->dsp.sad[1](NULL, ptr_cb +(dct_offset>>1), dest_cb +(dct_offset>>1), wrap_c, 8) < 20*s->qscale) skip_dct[6]= 1; if (s->dsp.sad[1](NULL, ptr_cb + (dct_offset >> 1),
if(s->dsp.sad[1](NULL, ptr_cr +(dct_offset>>1), dest_cr +(dct_offset>>1), wrap_c, 8) < 20*s->qscale) skip_dct[7]= 1; dest_cb + (dct_offset >> 1),
wrap_c, 8) < 20 * s->qscale)
skip_dct[6] = 1;
if (s->dsp.sad[1](NULL, ptr_cr + (dct_offset >> 1),
dest_cr + (dct_offset >> 1),
wrap_c, 8) < 20 * s->qscale)
skip_dct[7] = 1;
} }
} }
} }
if (s->avctx->quantizer_noise_shaping) { if (s->avctx->quantizer_noise_shaping) {
if(!skip_dct[0]) get_visual_weight(weight[0], ptr_y , wrap_y); if (!skip_dct[0])
if(!skip_dct[1]) get_visual_weight(weight[1], ptr_y + 8, wrap_y); get_visual_weight(weight[0], ptr_y , wrap_y);
if(!skip_dct[2]) get_visual_weight(weight[2], ptr_y + dct_offset , wrap_y); if (!skip_dct[1])
if(!skip_dct[3]) get_visual_weight(weight[3], ptr_y + dct_offset + 8, wrap_y); get_visual_weight(weight[1], ptr_y + 8, wrap_y);
if(!skip_dct[4]) get_visual_weight(weight[4], ptr_cb , wrap_c); if (!skip_dct[2])
if(!skip_dct[5]) get_visual_weight(weight[5], ptr_cr , wrap_c); get_visual_weight(weight[2], ptr_y + dct_offset , wrap_y);
if (!skip_dct[3])
get_visual_weight(weight[3], ptr_y + dct_offset + 8, wrap_y);
if (!skip_dct[4])
get_visual_weight(weight[4], ptr_cb , wrap_c);
if (!skip_dct[5])
get_visual_weight(weight[5], ptr_cr , wrap_c);
if (!s->chroma_y_shift) { /* 422 */ if (!s->chroma_y_shift) { /* 422 */
if(!skip_dct[6]) get_visual_weight(weight[6], ptr_cb + (dct_offset>>1), wrap_c); if (!skip_dct[6])
if(!skip_dct[7]) get_visual_weight(weight[7], ptr_cr + (dct_offset>>1), wrap_c); get_visual_weight(weight[6], ptr_cb + (dct_offset >> 1),
wrap_c);
if (!skip_dct[7])
get_visual_weight(weight[7], ptr_cr + (dct_offset >> 1),
wrap_c);
} }
memcpy(orig[0], s->block[0], sizeof(DCTELEM) * 64 * mb_block_count); memcpy(orig[0], s->block[0], sizeof(DCTELEM) * 64 * mb_block_count);
} }
@ -1858,17 +1989,22 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
if (!skip_dct[i]) { if (!skip_dct[i]) {
int overflow; int overflow;
s->block_last_index[i] = s->dct_quantize(s, s->block[i], i, s->qscale, &overflow); s->block_last_index[i] = s->dct_quantize(s, s->block[i], i, s->qscale, &overflow);
// FIXME we could decide to change to quantizer instead of clipping // FIXME we could decide to change to quantizer instead of
// JS: I don't think that would be a good idea it could lower quality instead // clipping
// of improve it. Just INTRADC clipping deserves changes in quantizer // JS: I don't think that would be a good idea it could lower
if (overflow) clip_coeffs(s, s->block[i], s->block_last_index[i]); // quality instead of improve it. Just INTRADC clipping
// deserves changes in quantizer
if (overflow)
clip_coeffs(s, s->block[i], s->block_last_index[i]);
} else } else
s->block_last_index[i] = -1; s->block_last_index[i] = -1;
} }
if (s->avctx->quantizer_noise_shaping) { if (s->avctx->quantizer_noise_shaping) {
for (i = 0; i < mb_block_count; i++) { for (i = 0; i < mb_block_count; i++) {
if (!skip_dct[i]) { if (!skip_dct[i]) {
s->block_last_index[i] = dct_quantize_refine(s, s->block[i], weight[i], orig[i], i, s->qscale); s->block_last_index[i] =
dct_quantize_refine(s, s->block[i], weight[i],
orig[i], i, s->qscale);
} }
} }
} }
@ -1901,7 +2037,8 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s, int motion_x,
int j; int j;
if (s->block_last_index[i] > 0) { if (s->block_last_index[i] > 0) {
for (j = 63; j > 0; j--) { for (j = 63; j > 0; j--) {
if(s->block[i][ s->intra_scantable.permutated[j] ]) break; if (s->block[i][s->intra_scantable.permutated[j]])
break;
} }
s->block_last_index[i] = j; s->block_last_index[i] = j;
} }

View File

@ -376,13 +376,23 @@ static int read_data(void *opaque, uint8_t *buf, int buf_size)
restart: restart:
if (!v->input) { if (!v->input) {
reload: /* If this is a live stream and the reload interval has elapsed since
/* If this is a live stream and target_duration has elapsed since
* the last playlist reload, reload the variant playlists now. */ * the last playlist reload, reload the variant playlists now. */
int64_t reload_interval = v->n_segments > 0 ?
v->segments[v->n_segments - 1]->duration :
v->target_duration;
reload_interval *= 1000000;
reload:
if (!v->finished && if (!v->finished &&
av_gettime() - v->last_load_time >= v->target_duration*1000000 && av_gettime() - v->last_load_time >= reload_interval) {
(ret = parse_playlist(c, v->url, v, NULL)) < 0) if ((ret = parse_playlist(c, v->url, v, NULL)) < 0)
return ret; return ret;
/* If we need to reload the playlist again below (if
* there's still no more segments), switch to a reload
* interval of half the target duration. */
reload_interval = v->target_duration * 500000;
}
if (v->cur_seq_no < v->start_seq_no) { if (v->cur_seq_no < v->start_seq_no) {
av_log(NULL, AV_LOG_WARNING, av_log(NULL, AV_LOG_WARNING,
"skipping %d segments ahead, expired from playlists\n", "skipping %d segments ahead, expired from playlists\n",
@ -392,8 +402,7 @@ reload:
if (v->cur_seq_no >= v->start_seq_no + v->n_segments) { if (v->cur_seq_no >= v->start_seq_no + v->n_segments) {
if (v->finished) if (v->finished)
return AVERROR_EOF; return AVERROR_EOF;
while (av_gettime() - v->last_load_time < while (av_gettime() - v->last_load_time < reload_interval) {
v->target_duration*1000000) {
if (ff_check_interrupt(c->interrupt_callback)) if (ff_check_interrupt(c->interrupt_callback))
return AVERROR_EXIT; return AVERROR_EXIT;
usleep(100*1000); usleep(100*1000);

View File

@ -244,6 +244,7 @@ static int applehttp_read(URLContext *h, uint8_t *buf, int size)
AppleHTTPContext *s = h->priv_data; AppleHTTPContext *s = h->priv_data;
const char *url; const char *url;
int ret; int ret;
int64_t reload_interval;
start: start:
if (s->seg_hd) { if (s->seg_hd) {
@ -256,12 +257,21 @@ start:
s->seg_hd = NULL; s->seg_hd = NULL;
s->cur_seq_no++; s->cur_seq_no++;
} }
reload_interval = s->n_segments > 0 ?
s->segments[s->n_segments - 1]->duration :
s->target_duration;
reload_interval *= 1000000;
retry: retry:
if (!s->finished) { if (!s->finished) {
int64_t now = av_gettime(); int64_t now = av_gettime();
if (now - s->last_load_time >= s->target_duration*1000000) if (now - s->last_load_time >= reload_interval) {
if ((ret = parse_playlist(h, s->playlisturl)) < 0) if ((ret = parse_playlist(h, s->playlisturl)) < 0)
return ret; return ret;
/* If we need to reload the playlist again below (if
* there's still no more segments), switch to a reload
* interval of half the target duration. */
reload_interval = s->target_duration * 500000;
}
} }
if (s->cur_seq_no < s->start_seq_no) { if (s->cur_seq_no < s->start_seq_no) {
av_log(h, AV_LOG_WARNING, av_log(h, AV_LOG_WARNING,
@ -272,7 +282,7 @@ retry:
if (s->cur_seq_no - s->start_seq_no >= s->n_segments) { if (s->cur_seq_no - s->start_seq_no >= s->n_segments) {
if (s->finished) if (s->finished)
return AVERROR_EOF; return AVERROR_EOF;
while (av_gettime() - s->last_load_time < s->target_duration*1000000) { while (av_gettime() - s->last_load_time < reload_interval) {
if (ff_check_interrupt(&h->interrupt_callback)) if (ff_check_interrupt(&h->interrupt_callback))
return AVERROR_EXIT; return AVERROR_EXIT;
usleep(100*1000); usleep(100*1000);

View File

@ -50,6 +50,7 @@ include $(SRC_PATH)/tests/fate/amrnb.mak
include $(SRC_PATH)/tests/fate/amrwb.mak include $(SRC_PATH)/tests/fate/amrwb.mak
include $(SRC_PATH)/tests/fate/atrac.mak include $(SRC_PATH)/tests/fate/atrac.mak
include $(SRC_PATH)/tests/fate/dct.mak include $(SRC_PATH)/tests/fate/dct.mak
include $(SRC_PATH)/tests/fate/dpcm.mak
include $(SRC_PATH)/tests/fate/fft.mak include $(SRC_PATH)/tests/fate/fft.mak
include $(SRC_PATH)/tests/fate/h264.mak include $(SRC_PATH)/tests/fate/h264.mak
include $(SRC_PATH)/tests/fate/image.mak include $(SRC_PATH)/tests/fate/image.mak
@ -62,6 +63,7 @@ include $(SRC_PATH)/tests/fate/lossless-video.mak
include $(SRC_PATH)/tests/fate/microsoft.mak include $(SRC_PATH)/tests/fate/microsoft.mak
include $(SRC_PATH)/tests/fate/mp3.mak include $(SRC_PATH)/tests/fate/mp3.mak
include $(SRC_PATH)/tests/fate/mpc.mak include $(SRC_PATH)/tests/fate/mpc.mak
include $(SRC_PATH)/tests/fate/pcm.mak
include $(SRC_PATH)/tests/fate/prores.mak include $(SRC_PATH)/tests/fate/prores.mak
include $(SRC_PATH)/tests/fate/qtrle.mak include $(SRC_PATH)/tests/fate/qtrle.mak
include $(SRC_PATH)/tests/fate/real.mak include $(SRC_PATH)/tests/fate/real.mak

View File

@ -6,10 +6,6 @@ FATE_TESTS += fate-8bps
fate-8bps: CMD = framecrc -i $(SAMPLES)/8bps/full9iron-partial.mov -pix_fmt rgb24 fate-8bps: CMD = framecrc -i $(SAMPLES)/8bps/full9iron-partial.mov -pix_fmt rgb24
FATE_TESTS += fate-aasc FATE_TESTS += fate-aasc
fate-aasc: CMD = framecrc -i $(SAMPLES)/aasc/AASC-1.5MB.AVI -pix_fmt rgb24 fate-aasc: CMD = framecrc -i $(SAMPLES)/aasc/AASC-1.5MB.AVI -pix_fmt rgb24
FATE_TESTS += fate-adpcm-ea-r2
fate-adpcm-ea-r2: CMD = crc -i $(SAMPLES)/ea-mpc/THX_logo.mpc -vn
FATE_TESTS += fate-adpcm-ea-r3
fate-adpcm-ea-r3: CMD = crc -i $(SAMPLES)/ea-vp6/THX_logo.vp6 -vn
FATE_TESTS += fate-adts-demux FATE_TESTS += fate-adts-demux
fate-adts-demux: CMD = crc -i $(SAMPLES)/aac/ct_faac-adts.aac -acodec copy fate-adts-demux: CMD = crc -i $(SAMPLES)/aac/ct_faac-adts.aac -acodec copy
FATE_TESTS += fate-aea-demux FATE_TESTS += fate-aea-demux
@ -40,14 +36,6 @@ FATE_TESTS += fate-cljr
fate-cljr: CMD = framecrc -i $(SAMPLES)/cljr/testcljr-partial.avi fate-cljr: CMD = framecrc -i $(SAMPLES)/cljr/testcljr-partial.avi
FATE_TESTS += fate-corepng FATE_TESTS += fate-corepng
fate-corepng: CMD = framecrc -i $(SAMPLES)/png1/corepng-partial.avi fate-corepng: CMD = framecrc -i $(SAMPLES)/png1/corepng-partial.avi
FATE_TESTS += fate-creative-adpcm
fate-creative-adpcm: CMD = md5 -i $(SAMPLES)/creative/intro-partial.wav -f s16le
FATE_TESTS += fate-creative-adpcm-8-2.6bit
fate-creative-adpcm-8-2.6bit: CMD = md5 -i $(SAMPLES)/creative/BBC_3BIT.VOC -f s16le
FATE_TESTS += fate-creative-adpcm-8-2bit
fate-creative-adpcm-8-2bit: CMD = md5 -i $(SAMPLES)/creative/BBC_2BIT.VOC -f s16le
FATE_TESTS += fate-creative-adpcm-8-4bit
fate-creative-adpcm-8-4bit: CMD = md5 -i $(SAMPLES)/creative/BBC_4BIT.VOC -f s16le
FATE_TESTS += fate-creatureshock-avs FATE_TESTS += fate-creatureshock-avs
fate-creatureshock-avs: CMD = framecrc -i $(SAMPLES)/creatureshock-avs/OUTATIME.AVS -pix_fmt rgb24 fate-creatureshock-avs: CMD = framecrc -i $(SAMPLES)/creatureshock-avs/OUTATIME.AVS -pix_fmt rgb24
FATE_TESTS += fate-cryo-apc FATE_TESTS += fate-cryo-apc
@ -66,10 +54,6 @@ FATE_TESTS += fate-delphine-cin
fate-delphine-cin: CMD = framecrc -i $(SAMPLES)/delphine-cin/LOGO-partial.CIN -pix_fmt rgb24 -vsync 0 fate-delphine-cin: CMD = framecrc -i $(SAMPLES)/delphine-cin/LOGO-partial.CIN -pix_fmt rgb24 -vsync 0
FATE_TESTS += fate-deluxepaint-anm FATE_TESTS += fate-deluxepaint-anm
fate-deluxepaint-anm: CMD = framecrc -i $(SAMPLES)/deluxepaint-anm/INTRO1.ANM -pix_fmt rgb24 fate-deluxepaint-anm: CMD = framecrc -i $(SAMPLES)/deluxepaint-anm/INTRO1.ANM -pix_fmt rgb24
FATE_TESTS += fate-duck-dk3
fate-duck-dk3: CMD = md5 -i $(SAMPLES)/duck/sop-audio-only.avi -f s16le
FATE_TESTS += fate-duck-dk4
fate-duck-dk4: CMD = md5 -i $(SAMPLES)/duck/salsa-audio-only.avi -f s16le
FATE_TESTS += fate-duck-tm2 FATE_TESTS += fate-duck-tm2
fate-duck-tm2: CMD = framecrc -i $(SAMPLES)/duck/tm20.avi fate-duck-tm2: CMD = framecrc -i $(SAMPLES)/duck/tm20.avi
FATE_TESTS += fate-ea-cdata FATE_TESTS += fate-ea-cdata
@ -78,22 +62,14 @@ FATE_TESTS += fate-ea-cmv
fate-ea-cmv: CMD = framecrc -i $(SAMPLES)/ea-cmv/TITLE.CMV -vsync 0 -pix_fmt rgb24 fate-ea-cmv: CMD = framecrc -i $(SAMPLES)/ea-cmv/TITLE.CMV -vsync 0 -pix_fmt rgb24
FATE_TESTS += fate-ea-dct FATE_TESTS += fate-ea-dct
fate-ea-dct: CMD = framecrc -idct simple -i $(SAMPLES)/ea-dct/NFS2Esprit-partial.dct fate-ea-dct: CMD = framecrc -idct simple -i $(SAMPLES)/ea-dct/NFS2Esprit-partial.dct
FATE_TESTS += fate-ea-mad-adpcm-ea-r1
fate-ea-mad-adpcm-ea-r1: CMD = framecrc -i $(SAMPLES)/ea-mad/NFS6LogoE.mad
FATE_TESTS += fate-ea-mad-pcm-planar
fate-ea-mad-pcm-planar: CMD = framecrc -i $(SAMPLES)/ea-mad/xeasport.mad
FATE_TESTS += fate-ea-tgq FATE_TESTS += fate-ea-tgq
fate-ea-tgq: CMD = framecrc -i $(SAMPLES)/ea-tgq/v27.tgq -an fate-ea-tgq: CMD = framecrc -i $(SAMPLES)/ea-tgq/v27.tgq -an
FATE_TESTS += fate-ea-tgv-ima-ea-eacs FATE_TESTS += fate-ea-tgv-ima-ea-eacs
fate-ea-tgv-ima-ea-eacs: CMD = framecrc -i $(SAMPLES)/ea-tgv/INTRO8K-partial.TGV -pix_fmt rgb24 fate-ea-tgv-ima-ea-eacs: CMD = framecrc -i $(SAMPLES)/ea-tgv/INTRO8K-partial.TGV -pix_fmt rgb24
FATE_TESTS += fate-ea-tgv-ima-ea-sead FATE_TESTS += fate-ea-tgv-ima-ea-sead
fate-ea-tgv-ima-ea-sead: CMD = framecrc -i $(SAMPLES)/ea-tgv/INTEL_S.TGV -pix_fmt rgb24 fate-ea-tgv-ima-ea-sead: CMD = framecrc -i $(SAMPLES)/ea-tgv/INTEL_S.TGV -pix_fmt rgb24
FATE_TESTS += fate-ea-tqi-adpcm
fate-ea-tqi-adpcm: CMD = framecrc -i $(SAMPLES)/ea-wve/networkBackbone-partial.wve -frames:v 26
FATE_TESTS += fate-feeble-dxa FATE_TESTS += fate-feeble-dxa
fate-feeble-dxa: CMD = framecrc -i $(SAMPLES)/dxa/meetsquid.dxa -t 2 -pix_fmt rgb24 fate-feeble-dxa: CMD = framecrc -i $(SAMPLES)/dxa/meetsquid.dxa -t 2 -pix_fmt rgb24
FATE_TESTS += fate-film-cvid-pcm-stereo-8bit
fate-film-cvid-pcm-stereo-8bit: CMD = framecrc -i $(SAMPLES)/film/logo-capcom.cpk
FATE_TESTS += fate-flic-af11-palette-change FATE_TESTS += fate-flic-af11-palette-change
fate-flic-af11-palette-change: CMD = framecrc -i $(SAMPLES)/fli/fli-engines.fli -t 3.3 -pix_fmt rgb24 fate-flic-af11-palette-change: CMD = framecrc -i $(SAMPLES)/fli/fli-engines.fli -t 3.3 -pix_fmt rgb24
FATE_TESTS += fate-flic-af12 FATE_TESTS += fate-flic-af12
@ -110,8 +86,6 @@ FATE_TESTS += fate-g729-1
fate-g729-1: CMD = framecrc -i $(SAMPLES)/act/REC05.act -t 10 fate-g729-1: CMD = framecrc -i $(SAMPLES)/act/REC05.act -t 10
FATE_TESTS += fate-id-cin-video FATE_TESTS += fate-id-cin-video
fate-id-cin-video: CMD = framecrc -i $(SAMPLES)/idcin/idlog-2MB.cin -pix_fmt rgb24 fate-id-cin-video: CMD = framecrc -i $(SAMPLES)/idcin/idlog-2MB.cin -pix_fmt rgb24
FATE_TESTS += fate-idroq-video-dpcm
fate-idroq-video-dpcm: CMD = framecrc -i $(SAMPLES)/idroq/idlogo.roq
FATE_TESTS-$(CONFIG_AVFILTER) += fate-idroq-video-encode FATE_TESTS-$(CONFIG_AVFILTER) += fate-idroq-video-encode
fate-idroq-video-encode: CMD = md5 -f image2 -vcodec pgmyuv -i $(SAMPLES)/ffmpeg-synthetic/vsynth1/%02d.pgm -sws_flags +bitexact -vf pad=512:512:80:112 -f RoQ -t 0.2 fate-idroq-video-encode: CMD = md5 -f image2 -vcodec pgmyuv -i $(SAMPLES)/ffmpeg-synthetic/vsynth1/%02d.pgm -sws_flags +bitexact -vf pad=512:512:80:112 -f RoQ -t 0.2
FATE_TESTS += fate-iff-byterun1 FATE_TESTS += fate-iff-byterun1
@ -120,8 +94,6 @@ FATE_TESTS += fate-iff-fibonacci
fate-iff-fibonacci: CMD = md5 -i $(SAMPLES)/iff/dasboot-in-compressed -f s16le fate-iff-fibonacci: CMD = md5 -i $(SAMPLES)/iff/dasboot-in-compressed -f s16le
FATE_TESTS += fate-iff-ilbm FATE_TESTS += fate-iff-ilbm
fate-iff-ilbm: CMD = framecrc -i $(SAMPLES)/iff/lms-matriks.ilbm -pix_fmt rgb24 fate-iff-ilbm: CMD = framecrc -i $(SAMPLES)/iff/lms-matriks.ilbm -pix_fmt rgb24
FATE_TESTS += fate-iff-pcm
fate-iff-pcm: CMD = md5 -i $(SAMPLES)/iff/Bells -f s16le
FATE_TESTS += fate-interplay-mve-16bit FATE_TESTS += fate-interplay-mve-16bit
fate-interplay-mve-16bit: CMD = framecrc -i $(SAMPLES)/interplay-mve/descent3-level5-16bit-partial.mve -pix_fmt rgb24 fate-interplay-mve-16bit: CMD = framecrc -i $(SAMPLES)/interplay-mve/descent3-level5-16bit-partial.mve -pix_fmt rgb24
FATE_TESTS += fate-interplay-mve-8bit FATE_TESTS += fate-interplay-mve-8bit
@ -150,14 +122,10 @@ FATE_TESTS += fate-nuv
fate-nuv: CMD = framecrc -idct simple -i $(SAMPLES)/nuv/Today.nuv -vsync 0 fate-nuv: CMD = framecrc -idct simple -i $(SAMPLES)/nuv/Today.nuv -vsync 0
FATE_TESTS += fate-oma-demux FATE_TESTS += fate-oma-demux
fate-oma-demux: CMD = crc -i $(SAMPLES)/oma/01-Untitled-partial.oma -acodec copy fate-oma-demux: CMD = crc -i $(SAMPLES)/oma/01-Untitled-partial.oma -acodec copy
FATE_TESTS += fate-pcm_dvd
fate-pcm_dvd: CMD = framecrc -i $(SAMPLES)/pcm-dvd/coolitnow-partial.vob -vn
FATE_TESTS += fate-psx-str FATE_TESTS += fate-psx-str
fate-psx-str: CMD = framecrc -i $(SAMPLES)/psx-str/descent-partial.str fate-psx-str: CMD = framecrc -i $(SAMPLES)/psx-str/descent-partial.str
FATE_TESTS += fate-psx-str-v3-mdec FATE_TESTS += fate-psx-str-v3-mdec
fate-psx-str-v3-mdec: CMD = framecrc -i $(SAMPLES)/psx-str/abc000_cut.str -an fate-psx-str-v3-mdec: CMD = framecrc -i $(SAMPLES)/psx-str/abc000_cut.str -an
FATE_TESTS += fate-psx-str-v3-adpcm_xa
fate-psx-str-v3-adpcm_xa: CMD = framecrc -i $(SAMPLES)/psx-str/abc000_cut.str -vn
FATE_TESTS += fate-pva-demux FATE_TESTS += fate-pva-demux
fate-pva-demux: CMD = framecrc -idct simple -i $(SAMPLES)/pva/PVA_test-partial.pva -t 0.6 -acodec copy fate-pva-demux: CMD = framecrc -idct simple -i $(SAMPLES)/pva/PVA_test-partial.pva -t 0.6 -acodec copy
FATE_TESTS += fate-qcp-demux FATE_TESTS += fate-qcp-demux
@ -180,18 +148,6 @@ FATE_TESTS += fate-qt-mac6-mono
fate-qt-mac6-mono: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-1-8-MAC6.mov -f s16le fate-qt-mac6-mono: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-1-8-MAC6.mov -f s16le
FATE_TESTS += fate-qt-mac6-stereo FATE_TESTS += fate-qt-mac6-stereo
fate-qt-mac6-stereo: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-8-MAC6.mov -f s16le fate-qt-mac6-stereo: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-8-MAC6.mov -f s16le
FATE_TESTS += fate-qt-msadpcm-stereo
fate-qt-msadpcm-stereo: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-L-ms02.mov -f s16le
FATE_TESTS += fate-qt-msimaadpcm-stereo
fate-qt-msimaadpcm-stereo: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-L-ms11.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-16bit-stereo-signed-be
fate-qt-rawpcm-16bit-stereo-signed-be: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-B-twos.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-16bit-stereo-signed-le
fate-qt-rawpcm-16bit-stereo-signed-le: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-L-sowt.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-8bit-mono-unsigned
fate-qt-rawpcm-8bit-mono-unsigned: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-1-8-raw.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-8bit-stereo-unsigned
fate-qt-rawpcm-8bit-stereo-unsigned: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-8-raw.mov -f s16le
FATE_TESTS += fate-qt-ulaw-mono FATE_TESTS += fate-qt-ulaw-mono
fate-qt-ulaw-mono: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-1-16-B-ulaw.mov -f s16le fate-qt-ulaw-mono: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-1-16-B-ulaw.mov -f s16le
FATE_TESTS += fate-qt-ulaw-stereo FATE_TESTS += fate-qt-ulaw-stereo
@ -222,8 +178,6 @@ FATE_TESTS += fate-svq1
fate-svq1: CMD = framecrc -i $(SAMPLES)/svq1/marymary-shackles.mov -an -t 10 fate-svq1: CMD = framecrc -i $(SAMPLES)/svq1/marymary-shackles.mov -an -t 10
FATE_TESTS += fate-svq3 FATE_TESTS += fate-svq3
fate-svq3: CMD = framecrc -i $(SAMPLES)/svq3/Vertical400kbit.sorenson3.mov -t 6 -an fate-svq3: CMD = framecrc -i $(SAMPLES)/svq3/Vertical400kbit.sorenson3.mov -t 6 -an
FATE_TESTS += fate-thp-mjpeg-adpcm
fate-thp-mjpeg-adpcm: CMD = framecrc -idct simple -i $(SAMPLES)/thp/pikmin2-opening1-partial.thp
FATE_TESTS += fate-tiertex-seq FATE_TESTS += fate-tiertex-seq
fate-tiertex-seq: CMD = framecrc -i $(SAMPLES)/tiertex-seq/Gameover.seq -pix_fmt rgb24 fate-tiertex-seq: CMD = framecrc -i $(SAMPLES)/tiertex-seq/Gameover.seq -pix_fmt rgb24
FATE_TESTS += fate-tmv FATE_TESTS += fate-tmv
@ -242,13 +196,9 @@ FATE_TESTS += fate-video-xl
fate-video-xl: CMD = framecrc -i $(SAMPLES)/vixl/pig-vixl.avi fate-video-xl: CMD = framecrc -i $(SAMPLES)/vixl/pig-vixl.avi
FATE_TESTS += fate-vqa-cc FATE_TESTS += fate-vqa-cc
fate-vqa-cc: CMD = framecrc -i $(SAMPLES)/vqa/cc-demo1-partial.vqa -pix_fmt rgb24 fate-vqa-cc: CMD = framecrc -i $(SAMPLES)/vqa/cc-demo1-partial.vqa -pix_fmt rgb24
FATE_TESTS += fate-w64
fate-w64: CMD = crc -i $(SAMPLES)/w64/w64-pcm16.w64
FATE_TESTS += fate-wc3movie-xan FATE_TESTS += fate-wc3movie-xan
fate-wc3movie-xan: CMD = framecrc -i $(SAMPLES)/wc3movie/SC_32-part.MVE -pix_fmt rgb24 fate-wc3movie-xan: CMD = framecrc -i $(SAMPLES)/wc3movie/SC_32-part.MVE -pix_fmt rgb24
FATE_TESTS += fate-westwood-aud FATE_TESTS += fate-westwood-aud
fate-westwood-aud: CMD = md5 -i $(SAMPLES)/westwood-aud/excellent.aud -f s16le fate-westwood-aud: CMD = md5 -i $(SAMPLES)/westwood-aud/excellent.aud -f s16le
FATE_TESTS += fate-wnv1 FATE_TESTS += fate-wnv1
fate-wnv1: CMD = framecrc -i $(SAMPLES)/wnv1/wnv1-codec.avi -an fate-wnv1: CMD = framecrc -i $(SAMPLES)/wnv1/wnv1-codec.avi -an
FATE_TESTS += fate-xan-dpcm
fate-xan-dpcm: CMD = md5 -i $(SAMPLES)/wc4-xan/wc4_2.avi -vn -f s16le

42
tests/fate/dpcm.mak Normal file
View File

@ -0,0 +1,42 @@
FATE_TESTS += fate-adpcm-ea-r2
fate-adpcm-ea-r2: CMD = crc -i $(SAMPLES)/ea-mpc/THX_logo.mpc -vn
FATE_TESTS += fate-adpcm-ea-r3
fate-adpcm-ea-r3: CMD = crc -i $(SAMPLES)/ea-vp6/THX_logo.vp6 -vn
FATE_TESTS += fate-creative-adpcm
fate-creative-adpcm: CMD = md5 -i $(SAMPLES)/creative/intro-partial.wav -f s16le
FATE_TESTS += fate-creative-adpcm-8-2bit
fate-creative-adpcm-8-2bit: CMD = md5 -i $(SAMPLES)/creative/BBC_2BIT.VOC -f s16le
FATE_TESTS += fate-creative-adpcm-8-2.6bit
fate-creative-adpcm-8-2.6bit: CMD = md5 -i $(SAMPLES)/creative/BBC_3BIT.VOC -f s16le
FATE_TESTS += fate-creative-adpcm-8-4bit
fate-creative-adpcm-8-4bit: CMD = md5 -i $(SAMPLES)/creative/BBC_4BIT.VOC -f s16le
FATE_TESTS += fate-ea-mad-adpcm-ea-r1
fate-ea-mad-adpcm-ea-r1: CMD = framecrc -i $(SAMPLES)/ea-mad/NFS6LogoE.mad
FATE_TESTS += fate-ea-tqi-adpcm
fate-ea-tqi-adpcm: CMD = framecrc -i $(SAMPLES)/ea-wve/networkBackbone-partial.wve -frames:v 26
FATE_TESTS += fate-idroq-video-dpcm
fate-idroq-video-dpcm: CMD = framecrc -i $(SAMPLES)/idroq/idlogo.roq
FATE_TESTS += fate-psx-str-v3-adpcm_xa
fate-psx-str-v3-adpcm_xa: CMD = framecrc -i $(SAMPLES)/psx-str/abc000_cut.str -vn
FATE_TESTS += fate-qt-msadpcm-stereo
fate-qt-msadpcm-stereo: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-L-ms02.mov -f s16le
FATE_TESTS += fate-qt-msimaadpcm-stereo
fate-qt-msimaadpcm-stereo: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-L-ms11.mov -f s16le
FATE_TESTS += fate-thp-mjpeg-adpcm
fate-thp-mjpeg-adpcm: CMD = framecrc -idct simple -i $(SAMPLES)/thp/pikmin2-opening1-partial.thp
FATE_TESTS += fate-dpcm_xan_audio
fate-dpcm_xan_audio: CMD = md5 -i $(SAMPLES)/wc4-xan/wc4_2.avi -vn -f s16le

View File

@ -16,3 +16,12 @@ fate-wmv8-drm-nodec: CMD = framecrc -cryptokey 137381538c84c068111902a59c5cf6c34
FATE_TESTS += fate-vc1 FATE_TESTS += fate-vc1
fate-vc1: CMD = framecrc -i $(SAMPLES)/vc1/SA00040.vc1 fate-vc1: CMD = framecrc -i $(SAMPLES)/vc1/SA00040.vc1
FATE_TESTS += fate-vc1_sa00050
fate-vc1_sa00050: CMD = framecrc -i $(SAMPLES)/vc1/SA00050.vc1
FATE_TESTS += fate-vc1_sa10091
fate-vc1_sa10091: CMD = framecrc -i $(SAMPLES)/vc1/SA10091.vc1
FATE_TESTS += fate-vc1_sa20021
fate-vc1_sa20021: CMD = framecrc -i $(SAMPLES)/vc1/SA20021.vc1

32
tests/fate/pcm.mak Normal file
View File

@ -0,0 +1,32 @@
FATE_TESTS += fate-duck-dk3
fate-duck-dk3: CMD = md5 -i $(SAMPLES)/duck/sop-audio-only.avi -f s16le
FATE_TESTS += fate-duck-dk4
fate-duck-dk4: CMD = md5 -i $(SAMPLES)/duck/salsa-audio-only.avi -f s16le
FATE_TESTS += fate-ea-mad-pcm-planar
fate-ea-mad-pcm-planar: CMD = framecrc -i $(SAMPLES)/ea-mad/xeasport.mad
FATE_TESTS += fate-film-cvid-pcm-stereo-8bit
fate-film-cvid-pcm-stereo-8bit: CMD = framecrc -i $(SAMPLES)/film/logo-capcom.cpk
FATE_TESTS += fate-iff-pcm
fate-iff-pcm: CMD = md5 -i $(SAMPLES)/iff/Bells -f s16le
FATE_TESTS += fate-pcm_dvd
fate-pcm_dvd: CMD = framecrc -i $(SAMPLES)/pcm-dvd/coolitnow-partial.vob -vn
FATE_TESTS += fate-qt-rawpcm-8bit-mono-unsigned
fate-qt-rawpcm-8bit-mono-unsigned: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-1-8-raw.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-8bit-stereo-unsigned
fate-qt-rawpcm-8bit-stereo-unsigned: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-8-raw.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-16bit-stereo-signed-be
fate-qt-rawpcm-16bit-stereo-signed-be: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-B-twos.mov -f s16le
FATE_TESTS += fate-qt-rawpcm-16bit-stereo-signed-le
fate-qt-rawpcm-16bit-stereo-signed-le: CMD = md5 -i $(SAMPLES)/qt-surge-suite/surge-2-16-L-sowt.mov -f s16le
FATE_TESTS += fate-w64
fate-w64: CMD = crc -i $(SAMPLES)/w64/w64-pcm16.w64

View File

@ -87,3 +87,6 @@ fate-v410enc: CMD = md5 -f image2 -vcodec pgmyuv -i $(TARGET_PATH)/tests/vsynth1
FATE_TESTS += fate-r210 FATE_TESTS += fate-r210
fate-r210: CMD = framecrc -i $(SAMPLES)/r210/r210.avi -pix_fmt rgb48le fate-r210: CMD = framecrc -i $(SAMPLES)/r210/r210.avi -pix_fmt rgb48le
FATE_TESTS += fate-xxan_wc4_video
fate-xxan_wc4_video: CMD = framecrc -i $(SAMPLES)/wc4-xan/wc4_2.avi -an -vframes 10

View File

@ -0,0 +1,30 @@
0, 0, 115200, 0xb8830eef
0, 3600, 115200, 0xb8830eef
0, 7200, 115200, 0xb8830eef
0, 10800, 115200, 0x952ff5e1
0, 14400, 115200, 0xa4362b14
0, 18000, 115200, 0x32bacbe7
0, 21600, 115200, 0x509eb814
0, 25200, 115200, 0x509eb814
0, 28800, 115200, 0x11a76c3e
0, 32400, 115200, 0x11a76c3e
0, 36000, 115200, 0x00cf734a
0, 39600, 115200, 0x00cf734a
0, 43200, 115200, 0x00cf734a
0, 46800, 115200, 0x00cf734a
0, 50400, 115200, 0x00cf734a
0, 54000, 115200, 0x00cf734a
0, 57600, 115200, 0x00cf734a
0, 61200, 115200, 0x00cf734a
0, 64800, 115200, 0xfddf48e6
0, 68400, 115200, 0xfddf48e6
0, 72000, 115200, 0x1eccebbf
0, 75600, 115200, 0x3da2f77e
0, 79200, 115200, 0x7c232572
0, 82800, 115200, 0xedf426e5
0, 86400, 115200, 0x5324ab20
0, 90000, 115200, 0x5324ab20
0, 93600, 115200, 0xa23e66bb
0, 97200, 115200, 0x680a50ff
0, 100800, 115200, 0x680a50ff
0, 104400, 115200, 0x680a50ff

View File

@ -0,0 +1,30 @@
0, 0, 518400, 0xae20b4fa
0, 3600, 518400, 0x2b4ccdf9
0, 7200, 518400, 0x2b4ccdf9
0, 10800, 518400, 0x2b4ccdf9
0, 14400, 518400, 0x2b4ccdf9
0, 18000, 518400, 0x2b4ccdf9
0, 21600, 518400, 0x70d9a891
0, 25200, 518400, 0x70d9a891
0, 28800, 518400, 0x70d9a891
0, 32400, 518400, 0xa461ee86
0, 36000, 518400, 0x722bc6e8
0, 39600, 518400, 0x722bc6e8
0, 43200, 518400, 0x722bc6e8
0, 46800, 518400, 0xf752fd2c
0, 50400, 518400, 0xf752fd2c
0, 54000, 518400, 0x91abcaca
0, 57600, 518400, 0x572727c3
0, 61200, 518400, 0x572727c3
0, 64800, 518400, 0x24c12382
0, 68400, 518400, 0x24c12382
0, 72000, 518400, 0x9aa39fe8
0, 75600, 518400, 0x9aa39fe8
0, 79200, 518400, 0x5cb6bd19
0, 82800, 518400, 0x704d9300
0, 86400, 518400, 0x590fad49
0, 90000, 518400, 0x590fad49
0, 93600, 518400, 0x590fad49
0, 97200, 518400, 0x46bea10b
0, 100800, 518400, 0x46bea10b
0, 104400, 518400, 0x46bea10b

View File

@ -0,0 +1,60 @@
0, 0, 506880, 0x884bc093
0, 3600, 506880, 0x4b09548f
0, 7200, 506880, 0x195cbee1
0, 10800, 506880, 0xc8141e28
0, 14400, 506880, 0xb170c49b
0, 18000, 506880, 0x2782268a
0, 21600, 506880, 0x2782268a
0, 25200, 506880, 0x2782268a
0, 28800, 506880, 0x2782268a
0, 32400, 506880, 0xe6803b32
0, 36000, 506880, 0xe6803b32
0, 39600, 506880, 0xa5ef9baf
0, 43200, 506880, 0xa5ef9baf
0, 46800, 506880, 0x46e8cbcb
0, 50400, 506880, 0x28a2239b
0, 54000, 506880, 0x7667af2f
0, 57600, 506880, 0x7667af2f
0, 61200, 506880, 0x8011bcaf
0, 64800, 506880, 0xd422115b
0, 68400, 506880, 0xd422115b
0, 72000, 506880, 0xd422115b
0, 75600, 506880, 0xbcee0b5b
0, 79200, 506880, 0x08fe9ec8
0, 82800, 506880, 0xc8fb8b37
0, 86400, 506880, 0xc8fb8b37
0, 90000, 506880, 0x2c698b52
0, 93600, 506880, 0x2c698b52
0, 97200, 506880, 0x2c698b52
0, 100800, 506880, 0x2b4ad9bc
0, 104400, 506880, 0x2b4ad9bc
0, 108000, 506880, 0x2b4ad9bc
0, 111600, 506880, 0x2b4ad9bc
0, 115200, 506880, 0x92e84ebb
0, 118800, 506880, 0x92e84ebb
0, 122400, 506880, 0xdb877da3
0, 126000, 506880, 0xdb877da3
0, 129600, 506880, 0xdb877da3
0, 133200, 506880, 0x44610654
0, 136800, 506880, 0x44610654
0, 140400, 506880, 0xe254ce67
0, 144000, 506880, 0xa6085385
0, 147600, 506880, 0x2d45d744
0, 151200, 506880, 0x2d45d744
0, 154800, 506880, 0x6e684f51
0, 158400, 506880, 0xe96186cf
0, 162000, 506880, 0xb535d369
0, 165600, 506880, 0xb535d369
0, 169200, 506880, 0xb535d369
0, 172800, 506880, 0xeed0b7e0
0, 176400, 506880, 0xeed0b7e0
0, 180000, 506880, 0xeed0b7e0
0, 183600, 506880, 0xeed0b7e0
0, 187200, 506880, 0x8789b20b
0, 190800, 506880, 0x0a0f42fb
0, 194400, 506880, 0x09bbac2d
0, 198000, 506880, 0x09bbac2d
0, 201600, 506880, 0x09bbac2d
0, 205200, 506880, 0x09bbac2d
0, 208800, 506880, 0x09bbac2d
0, 212400, 506880, 0xda77f0df

View File

@ -0,0 +1,10 @@
0, 0, 79360, 0x877eb3ed
0, 6000, 79360, 0x9ff8707c
0, 12000, 79360, 0x144dec86
0, 18000, 79360, 0x56d59588
0, 24000, 79360, 0x2d20f8ce
0, 30000, 79360, 0x1a752c42
0, 36000, 79360, 0x85705730
0, 42000, 79360, 0xddea3741
0, 48000, 79360, 0x46448efd
0, 54000, 79360, 0x27186e2b