Newer
Older
while (av_fifo_size(f->fifo)) {
av_fifo_generic_read(f->fifo, &pkt, sizeof(pkt), NULL);
av_free_packet(&pkt);
}
pthread_cond_signal(&f->fifo_cond);
pthread_mutex_unlock(&f->fifo_lock);
pthread_join(f->thread, NULL);
f->joined = 1;
while (av_fifo_size(f->fifo)) {
av_fifo_generic_read(f->fifo, &pkt, sizeof(pkt), NULL);
av_free_packet(&pkt);
}
av_fifo_free(f->fifo);
}
}
if (nb_input_files == 1)
return 0;
for (i = 0; i < nb_input_files; i++) {
InputFile *f = input_files[i];
if (!(f->fifo = av_fifo_alloc(8*sizeof(AVPacket))))
return AVERROR(ENOMEM);
if (f->ctx->pb ? !f->ctx->pb->seekable :
strcmp(f->ctx->iformat->name, "lavfi"))
f->non_blocking = 1;
pthread_mutex_init(&f->fifo_lock, NULL);
pthread_cond_init (&f->fifo_cond, NULL);
if ((ret = pthread_create(&f->thread, NULL, input_thread, f)))
return AVERROR(ret);
}
return 0;
}
static int get_input_packet_mt(InputFile *f, AVPacket *pkt)
if (av_fifo_size(f->fifo)) {
av_fifo_generic_read(f->fifo, pkt, sizeof(*pkt), NULL);
pthread_cond_signal(&f->fifo_cond);
break;
}
if (f->non_blocking) {
break;
}
pthread_cond_wait(&f->fifo_cond, &f->fifo_lock);
}
static int get_input_packet(InputFile *f, AVPacket *pkt)
{
if (f->rate_emu) {
int i;
for (i = 0; i < f->nb_streams; i++) {
InputStream *ist = input_streams[f->ist_index + i];
int64_t pts = av_rescale(ist->dts, 1000000, AV_TIME_BASE);
int64_t now = av_gettime() - ist->start;
if (pts > now)
return AVERROR(EAGAIN);
}
}
if (nb_input_files > 1)
return get_input_packet_mt(f, pkt);
#endif
return av_read_frame(f->ctx, pkt);
static int got_eagain(void)
{
int i;
for (i = 0; i < nb_output_streams; i++)
if (output_streams[i]->unavailable)
return 1;
return 0;
}
static void reset_eagain(void)
{
int i;
for (i = 0; i < nb_input_files; i++)
input_files[i]->eagain = 0;
for (i = 0; i < nb_output_streams; i++)
output_streams[i]->unavailable = 0;
}
* - 0 -- one packet was read and processed
* - AVERROR(EAGAIN) -- no packets were available for selected file,
* this function should be called again
* - AVERROR_EOF -- this function should not be called again
*/
static int process_input(int file_index)
InputFile *ifile = input_files[file_index];
AVFormatContext *is;
InputStream *ist;
AVPacket pkt;
int ret, i, j;
is = ifile->ctx;
ret = get_input_packet(ifile, &pkt);
if (ret == AVERROR(EAGAIN)) {
ifile->eagain = 1;
return ret;
}
if (ret < 0) {
if (ret != AVERROR_EOF) {
print_error(is->filename, ret);
if (exit_on_error)
}
ifile->eof_reached = 1;
for (i = 0; i < ifile->nb_streams; i++) {
ist = input_streams[ifile->ist_index + i];
if (ist->decoding_needed)
output_packet(ist, NULL);
/* mark all outputs that don't go through lavfi as finished */
for (j = 0; j < nb_output_streams; j++) {
OutputStream *ost = output_streams[j];
if (ost->source_index == ifile->ist_index + i &&
(ost->stream_copy || ost->enc->type == AVMEDIA_TYPE_SUBTITLE))
close_output_stream(ost);
}
}
return AVERROR(EAGAIN);
}
reset_eagain();
if (do_pkt_dump) {
av_pkt_dump_log2(NULL, AV_LOG_DEBUG, &pkt, do_hex_dump,
is->streams[pkt.stream_index]);
}
/* the following test is needed in case new streams appear
dynamically in stream : we ignore them */
if (pkt.stream_index >= ifile->nb_streams) {
report_new_stream(file_index, &pkt);
goto discard_packet;
}
ist = input_streams[ifile->ist_index + pkt.stream_index];
if (ist->discard)
goto discard_packet;
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "demuxer -> ist_index:%d type:%s "
"next_dts:%s next_dts_time:%s next_pts:%s next_pts_time:%s pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s off:%s off_time:%s\n",
ifile->ist_index + pkt.stream_index, av_get_media_type_string(ist->st->codec->codec_type),
av_ts2str(ist->next_dts), av_ts2timestr(ist->next_dts, &AV_TIME_BASE_Q),
av_ts2str(ist->next_pts), av_ts2timestr(ist->next_pts, &AV_TIME_BASE_Q),
av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ist->st->time_base),
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ist->st->time_base),
av_ts2str(input_files[ist->file_index]->ts_offset),
av_ts2timestr(input_files[ist->file_index]->ts_offset, &AV_TIME_BASE_Q));
if(!ist->wrap_correction_done && is->start_time != AV_NOPTS_VALUE && ist->st->pts_wrap_bits < 64){
int64_t stime, stime2;
// Correcting starttime based on the enabled streams
// FIXME this ideally should be done before the first use of starttime but we do not know which are the enabled streams at that point.
// so we instead do it here as part of discontinuity handling
if ( ist->next_dts == AV_NOPTS_VALUE
&& ifile->ts_offset == -is->start_time
&& (is->iformat->flags & AVFMT_TS_DISCONT)) {
int64_t new_start_time = INT64_MAX;
for (i=0; i<is->nb_streams; i++) {
AVStream *st = is->streams[i];
if(st->discard == AVDISCARD_ALL || st->start_time == AV_NOPTS_VALUE)
continue;
new_start_time = FFMIN(new_start_time, av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q));
}
if (new_start_time > is->start_time) {
av_log(is, AV_LOG_VERBOSE, "Correcting start time by %"PRId64"\n", new_start_time - is->start_time);
ifile->ts_offset = -new_start_time;
}
}
stime = av_rescale_q(is->start_time, AV_TIME_BASE_Q, ist->st->time_base);
stime2= stime + (1ULL<<ist->st->pts_wrap_bits);
ist->wrap_correction_done = 1;
if(stime2 > stime && pkt.dts != AV_NOPTS_VALUE && pkt.dts > stime + (1LL<<(ist->st->pts_wrap_bits-1))) {
pkt.dts -= 1ULL<<ist->st->pts_wrap_bits;
ist->wrap_correction_done = 0;
}
if(stime2 > stime && pkt.pts != AV_NOPTS_VALUE && pkt.pts > stime + (1LL<<(ist->st->pts_wrap_bits-1))) {
pkt.pts -= 1ULL<<ist->st->pts_wrap_bits;
ist->wrap_correction_done = 0;
}
}
if (pkt.dts != AV_NOPTS_VALUE)
pkt.dts += av_rescale_q(ifile->ts_offset, AV_TIME_BASE_Q, ist->st->time_base);
if (pkt.pts != AV_NOPTS_VALUE)
pkt.pts += av_rescale_q(ifile->ts_offset, AV_TIME_BASE_Q, ist->st->time_base);
if (pkt.pts != AV_NOPTS_VALUE)
pkt.pts *= ist->ts_scale;
if (pkt.dts != AV_NOPTS_VALUE)
pkt.dts *= ist->ts_scale;
if (pkt.dts != AV_NOPTS_VALUE && ist->next_dts == AV_NOPTS_VALUE && !copy_ts
&& (is->iformat->flags & AVFMT_TS_DISCONT) && ifile->last_ts != AV_NOPTS_VALUE) {
int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
int64_t delta = pkt_dts - ifile->last_ts;
if(delta < -1LL*dts_delta_threshold*AV_TIME_BASE ||
(delta > 1LL*dts_delta_threshold*AV_TIME_BASE &&
ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE)){
ifile->ts_offset -= delta;
av_log(NULL, AV_LOG_DEBUG,
"Inter stream timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
delta, ifile->ts_offset);
pkt.dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
if (pkt.pts != AV_NOPTS_VALUE)
pkt.pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
}
}
if (pkt.dts != AV_NOPTS_VALUE && ist->next_dts != AV_NOPTS_VALUE &&
!copy_ts) {
int64_t pkt_dts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
int64_t delta = pkt_dts - ist->next_dts;
if (is->iformat->flags & AVFMT_TS_DISCONT) {
if (delta < -1LL*dts_delta_threshold*AV_TIME_BASE ||
(delta > 1LL*dts_delta_threshold*AV_TIME_BASE &&
ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE) ||
pkt_dts + AV_TIME_BASE/10 < FFMAX(ist->pts, ist->dts)) {
ifile->ts_offset -= delta;
av_log(NULL, AV_LOG_DEBUG,
"timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
delta, ifile->ts_offset);
pkt.dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
if (pkt.pts != AV_NOPTS_VALUE)
pkt.pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
}
} else {
if ( delta < -1LL*dts_error_threshold*AV_TIME_BASE ||
(delta > 1LL*dts_error_threshold*AV_TIME_BASE && ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE)) {
av_log(NULL, AV_LOG_WARNING, "DTS %"PRId64", next:%"PRId64" st:%d invalid dropping\n", pkt.dts, ist->next_dts, pkt.stream_index);
pkt.dts = AV_NOPTS_VALUE;
}
if (pkt.pts != AV_NOPTS_VALUE){
int64_t pkt_pts = av_rescale_q(pkt.pts, ist->st->time_base, AV_TIME_BASE_Q);
delta = pkt_pts - ist->next_dts;
if ( delta < -1LL*dts_error_threshold*AV_TIME_BASE ||
(delta > 1LL*dts_error_threshold*AV_TIME_BASE && ist->st->codec->codec_type != AVMEDIA_TYPE_SUBTITLE)) {
av_log(NULL, AV_LOG_WARNING, "PTS %"PRId64", next:%"PRId64" invalid dropping st:%d\n", pkt.pts, ist->next_dts, pkt.stream_index);
pkt.pts = AV_NOPTS_VALUE;
}
}
}
}
if (pkt.dts != AV_NOPTS_VALUE)
ifile->last_ts = av_rescale_q(pkt.dts, ist->st->time_base, AV_TIME_BASE_Q);
if (debug_ts) {
Stefano Sabatini
committed
av_log(NULL, AV_LOG_INFO, "demuxer+ffmpeg -> ist_index:%d type:%s pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s off:%s off_time:%s\n",
ifile->ist_index + pkt.stream_index, av_get_media_type_string(ist->st->codec->codec_type),
av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ist->st->time_base),
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ist->st->time_base),
av_ts2str(input_files[ist->file_index]->ts_offset),
av_ts2timestr(input_files[ist->file_index]->ts_offset, &AV_TIME_BASE_Q));
sub2video_heartbeat(ist, pkt.pts);
ret = output_packet(ist, &pkt);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while decoding stream #%d:%d: %s\n",
ist->file_index, ist->st->index, av_err2str(ret));
}
discard_packet:
av_free_packet(&pkt);
return 0;
}
3315
3316
3317
3318
3319
3320
3321
3322
3323
3324
3325
3326
3327
3328
3329
3330
3331
3332
3333
3334
3335
3336
3337
3338
3339
3340
3341
3342
3343
3344
3345
3346
3347
3348
3349
3350
3351
3352
3353
3354
3355
3356
3357
3358
3359
3360
3361
3362
3363
3364
3365
3366
3367
3368
3369
3370
3371
3372
3373
3374
3375
3376
3377
3378
3379
3380
3381
3382
3383
3384
3385
3386
3387
3388
3389
3390
3391
3392
3393
3394
3395
3396
3397
3398
3399
3400
3401
3402
3403
3404
3405
3406
/**
* Perform a step of transcoding for the specified filter graph.
*
* @param[in] graph filter graph to consider
* @param[out] best_ist input stream where a frame would allow to continue
* @return 0 for success, <0 for error
*/
static int transcode_from_filter(FilterGraph *graph, InputStream **best_ist)
{
int i, ret;
int nb_requests, nb_requests_max = 0;
InputFilter *ifilter;
InputStream *ist;
*best_ist = NULL;
ret = avfilter_graph_request_oldest(graph->graph);
if (ret >= 0)
return reap_filters();
if (ret == AVERROR_EOF) {
ret = reap_filters();
for (i = 0; i < graph->nb_outputs; i++)
close_output_stream(graph->outputs[i]->ost);
return ret;
}
if (ret != AVERROR(EAGAIN))
return ret;
for (i = 0; i < graph->nb_inputs; i++) {
ifilter = graph->inputs[i];
ist = ifilter->ist;
if (input_files[ist->file_index]->eagain ||
input_files[ist->file_index]->eof_reached)
continue;
nb_requests = av_buffersrc_get_nb_failed_requests(ifilter->filter);
if (nb_requests > nb_requests_max) {
nb_requests_max = nb_requests;
*best_ist = ist;
}
}
if (!*best_ist)
for (i = 0; i < graph->nb_outputs; i++)
graph->outputs[i]->ost->unavailable = 1;
return 0;
}
/**
* Run a single step of transcoding.
*
* @return 0 for success, <0 for error
*/
static int transcode_step(void)
{
OutputStream *ost;
InputStream *ist;
int ret;
ost = choose_output();
if (!ost) {
if (got_eagain()) {
reset_eagain();
av_usleep(10000);
return 0;
}
av_log(NULL, AV_LOG_VERBOSE, "No more inputs to read from, finishing.\n");
return AVERROR_EOF;
}
if (ost->filter) {
if ((ret = transcode_from_filter(ost->filter->graph, &ist)) < 0)
return ret;
if (!ist)
return 0;
} else {
av_assert0(ost->source_index >= 0);
ist = input_streams[ost->source_index];
}
ret = process_input(ist->file_index);
if (ret == AVERROR(EAGAIN)) {
if (input_files[ist->file_index]->eagain)
ost->unavailable = 1;
return 0;
}
if (ret < 0)
return ret == AVERROR_EOF ? 0 : ret;
return reap_filters();
}
/*
* The following code is the main loop of the file converter
*/
static int transcode(void)
Fabrice Bellard
committed
{
int ret, i;
InputStream *ist;
int64_t timer_start;
ret = transcode_init();
if (ret < 0)
goto fail;
if (stdin_interaction) {
av_log(NULL, AV_LOG_INFO, "Press [q] to stop, [?] for help\n");
timer_start = av_gettime();
if ((ret = init_input_threads()) < 0)
goto fail;
#endif
int64_t cur_time= av_gettime();
/* if 'q' pressed, exits */
if (stdin_interaction)
if (check_keyboard_interaction(cur_time) < 0)
break;
/* check if there's any stream where output is still needed */
if (!need_output()) {
av_log(NULL, AV_LOG_VERBOSE, "No more output streams to write to, finishing.\n");
ret = transcode_step();
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN))
av_log(NULL, AV_LOG_ERROR, "Error while filtering.\n");
break;
Michael Niedermayer
committed
}
/* dump report by using the output first video and audio streams */
print_report(0, timer_start, cur_time);
/* at the end of stream, we must flush the decoder buffers */
for (i = 0; i < nb_input_streams; i++) {
ist = input_streams[i];
if (!input_files[ist->file_index]->eof_reached && ist->decoding_needed) {
output_packet(ist, NULL);
Fabrice Bellard
committed
}
flush_encoders();
Fabrice Bellard
committed
/* write the trailer if needed and close file */
os = output_files[i]->ctx;
av_write_trailer(os);
/* dump report by using the first video and audio streams */
print_report(1, timer_start, av_gettime());
/* close each encoder */
for (i = 0; i < nb_output_streams; i++) {
ost = output_streams[i];
if (ost->encoding_needed) {
av_freep(&ost->st->codec->stats_in);
avcodec_close(ost->st->codec);
/* close each decoder */
for (i = 0; i < nb_input_streams; i++) {
ist = input_streams[i];
if (ist->decoding_needed) {
avcodec_close(ist->st->codec);
if (ist->hwaccel_uninit)
ist->hwaccel_uninit(ist->st->codec);
/* finished ! */
ret = 0;
fail:
if (output_streams) {
for (i = 0; i < nb_output_streams; i++) {
ost = output_streams[i];
av_freep(&ost->st->codec->extradata);
if (ost->logfile) {
fclose(ost->logfile);
ost->logfile = NULL;
av_freep(&ost->st->codec->subtitle_header);
av_freep(&ost->forced_kf_pts);
Michael Niedermayer
committed
av_freep(&ost->apad);
av_dict_free(&ost->opts);
Michael Niedermayer
committed
av_dict_free(&ost->swr_opts);
av_dict_free(&ost->resample_opts);
static int64_t getutime(void)
#if HAVE_GETRUSAGE
struct rusage rusage;
getrusage(RUSAGE_SELF, &rusage);
return (rusage.ru_utime.tv_sec * 1000000LL) + rusage.ru_utime.tv_usec;
#elif HAVE_GETPROCESSTIMES
HANDLE proc;
FILETIME c, e, k, u;
proc = GetCurrentProcess();
GetProcessTimes(proc, &c, &e, &k, &u);
return ((int64_t) u.dwHighDateTime << 32 | u.dwLowDateTime) / 10;
#else
return av_gettime();
#endif
static int64_t getmaxrss(void)
{
#if HAVE_GETRUSAGE && HAVE_STRUCT_RUSAGE_RU_MAXRSS
struct rusage rusage;
getrusage(RUSAGE_SELF, &rusage);
return (int64_t)rusage.ru_maxrss * 1024;
#elif HAVE_GETPROCESSMEMORYINFO
HANDLE proc;
PROCESS_MEMORY_COUNTERS memcounters;
proc = GetCurrentProcess();
memcounters.cb = sizeof(memcounters);
GetProcessMemoryInfo(proc, &memcounters, sizeof(memcounters));
return memcounters.PeakPagefileUsage;
#else
return 0;
#endif
}
static void log_callback_null(void *ptr, int level, const char *fmt, va_list vl)
{
}
register_exit(ffmpeg_cleanup);
setvbuf(stderr,NULL,_IONBF,0); /* win32 runtime needs this */
av_log_set_flags(AV_LOG_SKIP_REPEATED);
parse_loglevel(argc, argv, options);
if(argc>1 && !strcmp(argv[1], "-d")){
run_as_daemon=1;
av_log_set_callback(log_callback_null);
argc--;
argv++;
}
#if CONFIG_AVDEVICE
avformat_network_init();
show_banner(argc, argv, options);
/* parse options and open all input/output files */
ret = ffmpeg_parse_options(argc, argv);
if (nb_output_files <= 0 && nb_input_files == 0) {
av_log(NULL, AV_LOG_WARNING, "Use -h to get full help or, even better, run 'man %s'\n", program_name);
/* file converter / grab */
if (nb_output_files <= 0) {
av_log(NULL, AV_LOG_FATAL, "At least one output file must be specified\n");
// if (nb_input_files == 0) {
// av_log(NULL, AV_LOG_FATAL, "At least one input file must be specified\n");
// exit_program(1);
if (transcode() < 0)
ti = getutime() - ti;
if (do_benchmark) {
printf("bench: utime=%0.3fs\n", ti / 1000000.0);
av_log(NULL, AV_LOG_DEBUG, "%"PRIu64" frames successfully decoded, %"PRIu64" decoding errors\n",
decode_error_stat[0], decode_error_stat[1]);
Michael Niedermayer
committed
if ((decode_error_stat[0] + decode_error_stat[1]) * max_error_rate < decode_error_stat[1])
Michael Niedermayer
committed
exit_program(received_nb_signals ? 255 : main_return_code);
return main_return_code;