flushed = 0;
need_packet = 1;
frame = fframe = 0;
+ probe_frame = 0;
bsfc = 0;
stats_fp = 0;
stats_filename = 0;
if( filter_graph ) avfilter_graph_free(&filter_graph);
if( frame ) av_frame_free(&frame);
if( fframe ) av_frame_free(&fframe);
+ if( probe_frame ) av_frame_free(&probe_frame);
delete frm_lock;
if( stats_fp ) fclose(stats_fp);
if( stats_in ) av_freep(&stats_in);
return writing;
}
+// this is a global parameter that really should be in the context
static AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE; // protected by ff_lock
+
+// goofy maneuver to attach a hw_format to an av_context
+#define GET_HW_PIXFMT(fn, fmt) \
+static AVPixelFormat get_hw_##fn(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { \
+ return fmt; \
+}
+GET_HW_PIXFMT(vaapi, AV_PIX_FMT_VAAPI)
+GET_HW_PIXFMT(vdpau, AV_PIX_FMT_VDPAU)
+GET_HW_PIXFMT(cuda, AV_PIX_FMT_CUDA)
+GET_HW_PIXFMT(nv12, AV_PIX_FMT_NV12)
+
static enum AVPixelFormat get_hw_format(AVCodecContext *ctx,
const enum AVPixelFormat *pix_fmts)
{
- for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p )
- if( *p == hw_pix_fmt ) return *p;
+ for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p ) {
+ if( *p != hw_pix_fmt ) continue;
+ switch( *p ) {
+ case AV_PIX_FMT_VAAPI: ctx->get_format = get_hw_vaapi; return *p;
+ case AV_PIX_FMT_VDPAU: ctx->get_format = get_hw_vdpau; return *p;
+ case AV_PIX_FMT_CUDA: ctx->get_format = get_hw_cuda; return *p;
+ case AV_PIX_FMT_NV12: ctx->get_format = get_hw_nv12; return *p;
+ default:
+ fprintf(stderr, "Unknown HW surface format: %s\n",
+ av_get_pix_fmt_name(*p));
+ continue;
+ }
+ }
fprintf(stderr, "Failed to get HW surface format.\n");
return hw_pix_fmt = AV_PIX_FMT_NONE;
}
}
if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) {
ret = decode_hw_format(decoder, hw_type);
- if( !ret ) hw_type = AV_HWDEVICE_TYPE_NONE;
}
if( ret >= 0 ) {
avcodec_parameters_to_context(avctx, st->codecpar);
avctx->thread_count = ffmpeg->ff_cpus();
ret = avcodec_open2(avctx, decoder, &copts);
}
+ AVFrame *hw_frame = 0;
if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) {
- if( need_packet ) {
- need_packet = 0;
- ret = read_packet();
- }
- if( ret >= 0 ) {
- AVPacket *pkt = (AVPacket*)ipkt;
- ret = avcodec_send_packet(avctx, pkt);
- if( ret < 0 || hw_pix_fmt == AV_PIX_FMT_NONE ) {
- ff_err(ret, "HW device init failed, using SW decode.\nfile:%s\n",
- ffmpeg->fmt_ctx->url);
- avcodec_close(avctx);
- avcodec_free_context(&avctx);
- av_buffer_unref(&hw_device_ctx);
- hw_device_ctx = 0;
- hw_type = AV_HWDEVICE_TYPE_NONE;
- int flags = AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY;
- int idx = st->index;
- av_seek_frame(fmt_ctx, idx, INT64_MIN, flags);
- need_packet = 1; flushed = 0;
- seeked = 1; st_eof(0);
- ret = 0;
- continue;
- }
+ if( !(hw_frame=av_frame_alloc()) ) {
+ fprintf(stderr, "FFStream::decode_activate: av_frame_alloc failed\n");
+ ret = AVERROR(ENOMEM);
}
+ if( ret >= 0 )
+ ret = decode(hw_frame);
}
- if( ret >= 0 ) {
- reading = 1;
+ if( ret < 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) {
+ ff_err(ret, "HW device init failed, using SW decode.\nfile:%s\n",
+ ffmpeg->fmt_ctx->url);
+ avcodec_close(avctx);
+ avcodec_free_context(&avctx);
+ av_buffer_unref(&hw_device_ctx);
+ hw_device_ctx = 0;
+ av_frame_free(&hw_frame);
+ hw_type = AV_HWDEVICE_TYPE_NONE;
+ int flags = AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY;
+ int idx = st->index;
+ av_seek_frame(fmt_ctx, idx, 0, flags);
+ need_packet = 1; flushed = 0;
+ seeked = 1; st_eof(0);
+ ret = 0;
+ continue;
}
+ probe_frame = hw_frame;
+ if( ret >= 0 )
+ reading = 1;
else
eprintf(_("open decoder failed\n"));
}
int FFStream::decode(AVFrame *frame)
{
+ if( probe_frame ) { // hw probe reads first frame
+ av_frame_ref(frame, probe_frame);
+ av_frame_free(&probe_frame);
+ return 1;
+ }
int ret = 0;
int retries = MAX_RETRY;
tstmp = av_rescale_q(tstmp, time_base, AV_TIME_BASE_Q);
idx = -1;
#endif
-
+ av_frame_free(&probe_frame);
avcodec_flush_buffers(avctx);
avformat_flush(fmt_ctx);
#if 0
}
FFVideoStream::FFVideoStream(FFMPEG *ffmpeg, AVStream *strm, int idx, int fidx)
- : FFStream(ffmpeg, strm, fidx)
+ : FFStream(ffmpeg, strm, fidx),
+ FFVideoConvert(ffmpeg->ff_prefs())
{
this->idx = idx;
width = height = 0;
length = 0;
interlaced = 0;
top_field_first = 0;
+ color_space = -1;
+ color_range = -1;
}
FFVideoStream::~FFVideoStream()
const char *hw_dev = ffmpeg->opt_hw_dev;
if( !hw_dev ) hw_dev = getenv("CIN_HW_DEV");
if( !hw_dev ) hw_dev = ffmpeg->ff_hw_dev();
- if( hw_dev && *hw_dev && strcmp(_("none"), hw_dev) ) {
+ if( hw_dev && *hw_dev &&
+ strcmp("none", hw_dev) && strcmp(_("none"), hw_dev) ) {
type = av_hwdevice_find_type_by_name(hw_dev);
if( type == AV_HWDEVICE_TYPE_NONE ) {
fprintf(stderr, "Device type %s is not supported.\n", hw_dev);
if( !config ) {
fprintf(stderr, "Decoder %s does not support device type %s.\n",
decoder->name, av_hwdevice_get_type_name(type));
+ ret = -1;
break;
}
if( (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) != 0 &&
avctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
ret = 1;
}
- else
+ else {
ff_err(ret, "Failed HW device create.\ndev:%s\n",
av_hwdevice_get_type_name(type));
+ ret = -1;
+ }
}
return ret;
}
frame->interlaced_frame = interlaced;
frame->top_field_first = top_field_first;
}
+ if( frame && frame->format == AV_PIX_FMT_VAAPI ) { // ugly
+ int ret = avcodec_send_frame(avctx, frame);
+ for( int retry=MAX_RETRY; !ret && --retry>=0; ) {
+ FFPacket pkt; av_init_packet(pkt);
+ pkt->data = NULL; pkt->size = 0;
+ if( (ret=avcodec_receive_packet(avctx, pkt)) < 0 ) {
+ if( ret == AVERROR(EAGAIN) ) ret = 0; // weird
+ break;
+ }
+ ret = write_packet(pkt);
+ pkt->stream_index = 0;
+ av_packet_unref(pkt);
+ }
+ if( ret < 0 ) {
+ ff_err(ret, "FFStream::encode_frame: vaapi encode failed.\nfile: %s\n",
+ ffmpeg->fmt_ctx->url);
+ return -1;
+ }
+ return 0;
+ }
return FFStream::encode_frame(frame);
}
}
int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip, AVFrame *ipic)
-{
+{ // picture = vframe
int cmodel = frame->get_color_model();
AVPixelFormat ofmt = color_model_to_pix_fmt(cmodel);
if( ofmt == AV_PIX_FMT_NB ) return -1;
" sws_getCachedContext() failed\n");
return -1;
}
+
+ int color_range = 0;
+ switch( preferences->yuv_color_range ) {
+ case BC_COLORS_JPEG: color_range = 1; break;
+ case BC_COLORS_MPEG: color_range = 0; break;
+ }
+ int color_space = SWS_CS_ITU601;
+ switch( preferences->yuv_color_space ) {
+ case BC_COLORS_BT601: color_space = SWS_CS_ITU601; break;
+ case BC_COLORS_BT709: color_space = SWS_CS_ITU709; break;
+ case BC_COLORS_BT2020: color_space = SWS_CS_BT2020; break;
+ }
+ const int *color_table = sws_getCoefficients(color_space);
+
+ int *inv_table, *table, src_range, dst_range;
+ int brightness, contrast, saturation;
+ if( !sws_getColorspaceDetails(convert_ctx,
+ &inv_table, &src_range, &table, &dst_range,
+ &brightness, &contrast, &saturation) ) {
+ if( src_range != color_range || dst_range != color_range ||
+ inv_table != color_table || table != color_table )
+ sws_setColorspaceDetails(convert_ctx,
+ color_table, color_range, color_table, color_range,
+ brightness, contrast, saturation);
+ }
+
int ret = sws_scale(convert_ctx, ip->data, ip->linesize, 0, ip->height,
ipic->data, ipic->linesize);
if( ret < 0 ) {
}
int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op, AVFrame *opic)
-{
+{ // vframe = picture
int cmodel = frame->get_color_model();
AVPixelFormat ifmt = color_model_to_pix_fmt(cmodel);
if( ifmt == AV_PIX_FMT_NB ) return -1;
" sws_getCachedContext() failed\n");
return -1;
}
+
+
+ int color_range = 0;
+ switch( preferences->yuv_color_range ) {
+ case BC_COLORS_JPEG: color_range = 1; break;
+ case BC_COLORS_MPEG: color_range = 0; break;
+ }
+ int color_space = SWS_CS_ITU601;
+ switch( preferences->yuv_color_space ) {
+ case BC_COLORS_BT601: color_space = SWS_CS_ITU601; break;
+ case BC_COLORS_BT709: color_space = SWS_CS_ITU709; break;
+ case BC_COLORS_BT2020: color_space = SWS_CS_BT2020; break;
+ }
+ const int *color_table = sws_getCoefficients(color_space);
+
+ int *inv_table, *table, src_range, dst_range;
+ int brightness, contrast, saturation;
+ if( !sws_getColorspaceDetails(convert_ctx,
+ &inv_table, &src_range, &table, &dst_range,
+ &brightness, &contrast, &saturation) ) {
+ if( dst_range != color_range || table != color_table )
+ sws_setColorspaceDetails(convert_ctx,
+ inv_table, src_range, color_table, color_range,
+ brightness, contrast, saturation);
+ }
+
int ret = sws_scale(convert_ctx, opic->data, opic->linesize, 0, frame->get_h(),
op->data, op->linesize);
if( ret < 0 ) {
scan_video_options(asset, edl);
}
+void FFMPEG::scan_format_options(Asset *asset, EDL *edl)
+{
+}
+
+void FFMPEG::load_format_options(Asset *asset, EDL *edl)
+{
+ char options_path[BCTEXTLEN];
+ set_option_path(options_path, "format/%s", asset->fformat);
+ if( !load_options(options_path,
+ asset->ff_format_options,
+ sizeof(asset->ff_format_options)) )
+ scan_format_options(asset, edl);
+}
+
int FFMPEG::load_defaults(const char *path, const char *type,
char *codec, char *codec_options, int len)
{
if( asset->format != FILE_FFMPEG ) return;
if( text != asset->fformat )
strcpy(asset->fformat, text);
+ if( !asset->ff_format_options[0] )
+ load_format_options(asset, edl);
if( asset->audio_data && !asset->ff_audio_options[0] ) {
if( !load_defaults("audio", text, asset->acodec,
asset->ff_audio_options, sizeof(asset->ff_audio_options)) )
if( !fp ) return 0;
int ret = read_options(fp, options, opts);
fclose(fp);
- AVDictionaryEntry *tag = av_dict_get(opts, "id", NULL, 0);
- if( tag ) st->id = strtol(tag->value,0,0);
+ if( !ret && st ) {
+ AVDictionaryEntry *tag = av_dict_get(opts, "id", NULL, 0);
+ if( tag ) st->id = strtol(tag->value,0,0);
+ }
return ret;
}
if( ffvideo.size() > 0 )
report("\n%d video stream%s\n",ffvideo.size(), ffvideo.size()!=1 ? "s" : "");
for( int vidx=0; vidx<ffvideo.size(); ++vidx ) {
+ const char *unkn = _("(unkn)");
FFVideoStream *vid = ffvideo[vidx];
AVStream *st = vid->st;
AVCodecID codec_id = st->codecpar->codec_id;
report(_("vid%d (%d), id 0x%06x:\n"), vid->idx, vid->fidx, codec_id);
const AVCodecDescriptor *desc = avcodec_descriptor_get(codec_id);
- report(" video%d %s", vidx+1, desc ? desc->name : " (unkn)");
+ report(" video%d %s ", vidx+1, desc ? desc->name : unkn);
report(" %dx%d %5.2f", vid->width, vid->height, vid->frame_rate);
AVPixelFormat pix_fmt = (AVPixelFormat)st->codecpar->format;
const char *pfn = av_get_pix_fmt_name(pix_fmt);
- report(" pix %s\n", pfn ? pfn : "(unkn)");
+ report(" pix %s\n", pfn ? pfn : unkn);
+ enum AVColorSpace space = st->codecpar->color_space;
+ const char *nm = av_color_space_name(space);
+ report(" color space:%s", nm ? nm : unkn);
+ enum AVColorRange range = st->codecpar->color_range;
+ const char *rg = av_color_range_name(range);
+ report("/ range:%s\n", rg ? rg : unkn);
double secs = to_secs(st->duration, st->time_base);
int64_t length = secs * vid->frame_rate + 0.5;
double ofs = to_secs((vid->nudge - st->start_time), st->time_base);
vid->width = avpar->width;
vid->height = avpar->height;
vid->frame_rate = !framerate.den ? 0 : (double)framerate.num / framerate.den;
+ switch( avpar->color_range ) {
+ case AVCOL_RANGE_MPEG:
+ vid->color_range = BC_COLORS_MPEG;
+ break;
+ case AVCOL_RANGE_JPEG:
+ vid->color_range = BC_COLORS_JPEG;
+ break;
+ default:
+ vid->color_range = !file_base ? BC_COLORS_JPEG :
+ file_base->file->preferences->yuv_color_range;
+ break;
+ }
+ switch( avpar->color_space ) {
+ case AVCOL_SPC_BT470BG:
+ case AVCOL_SPC_SMPTE170M:
+ vid->color_space = BC_COLORS_BT601;
+ break;
+ case AVCOL_SPC_BT709:
+ vid->color_space = BC_COLORS_BT709;
+ break;
+ case AVCOL_SPC_BT2020_NCL:
+ case AVCOL_SPC_BT2020_CL:
+ vid->color_space = BC_COLORS_BT2020;
+ break;
+ default:
+ vid->color_space = !file_base ? BC_COLORS_BT601 :
+ file_base->file->preferences->yuv_color_space;
+ break;
+ }
double secs = to_secs(st->duration, st->time_base);
vid->length = secs * vid->frame_rate;
vid->aspect_ratio = (double)st->sample_aspect_ratio.num / st->sample_aspect_ratio.den;
}
if( bad_time && !(fflags & FF_BAD_TIMES) ) {
fflags |= FF_BAD_TIMES;
- printf("FFMPEG::open_decoder: some stream have bad times: %s\n",
+ printf(_("FFMPEG::open_decoder: some stream have bad times: %s\n"),
fmt_ctx->url);
}
ff_unlock();
vid->width = asset->width;
vid->height = asset->height;
vid->frame_rate = asset->frame_rate;
-
+ if( (vid->color_range = asset->ff_color_range) < 0 )
+ vid->color_range = file_base->file->preferences->yuv_color_range;
+ switch( vid->color_range ) {
+ case BC_COLORS_MPEG: ctx->color_range = AVCOL_RANGE_MPEG; break;
+ case BC_COLORS_JPEG: ctx->color_range = AVCOL_RANGE_JPEG; break;
+ }
+ if( (vid->color_space = asset->ff_color_space) < 0 )
+ vid->color_space = file_base->file->preferences->yuv_color_space;
+ switch( vid->color_space ) {
+ case BC_COLORS_BT601: ctx->colorspace = AVCOL_SPC_SMPTE170M; break;
+ case BC_COLORS_BT709: ctx->colorspace = AVCOL_SPC_BT709; break;
+ case BC_COLORS_BT2020: ctx->colorspace = AVCOL_SPC_BT2020_NCL; break;
+ }
AVPixelFormat pix_fmt = av_get_pix_fmt(asset->ff_pixel_format);
if( opt_hw_dev != 0 ) {
AVHWDeviceType hw_type = vid->encode_hw_activate(opt_hw_dev);
fmt_ctx->url);
return -1;
}
-
+ if( !strcmp(file_format, "image2") ) {
+ Asset *asset = file_base->asset;
+ const char *filename = asset->path;
+ FILE *fp = fopen(filename,"w");
+ if( !fp ) {
+ eprintf(_("Cant write image2 header file: %s\n %m"), filename);
+ return 1;
+ }
+ fprintf(fp, "IMAGE2\n");
+ fprintf(fp, "# Frame rate: %f\n", asset->frame_rate);
+ fprintf(fp, "# Width: %d\n", asset->width);
+ fprintf(fp, "# Height: %d\n", asset->height);
+ fclose(fp);
+ }
int prog_id = 1;
AVProgram *prog = av_new_program(fmt_ctx, prog_id);
for( int i=0; i< ffvideo.size(); ++i )
char option_path[BCTEXTLEN];
set_option_path(option_path, "format/%s", file_format);
read_options(option_path, fopts, 1);
- ret = avformat_write_header(fmt_ctx, &fopts);
+ av_dict_copy(&fopts, opts, 0);
+ if( scan_options(file_base->asset->ff_format_options, fopts, 0) ) {
+ eprintf(_("bad format options %s\n"), file_base->asset->path);
+ ret = -1;
+ }
+ if( ret >= 0 )
+ ret = avformat_write_header(fmt_ctx, &fopts);
if( ret < 0 ) {
ff_err(ret, "FFMPEG::encode_activate: write header failed %s\n",
fmt_ctx->url);
return ffvideo[stream]->aspect_ratio;
}
-const char* FFMPEG::ff_video_format(int stream)
+const char* FFMPEG::ff_video_codec(int stream)
{
AVStream *st = ffvideo[stream]->st;
AVCodecID id = st->codecpar->codec_id;
return desc ? desc->name : _("Unknown");
}
+int FFMPEG::ff_color_range(int stream)
+{
+ return ffvideo[stream]->color_range;
+}
+
+int FFMPEG::ff_color_space(int stream)
+{
+ return ffvideo[stream]->color_space;
+}
+
double FFMPEG::ff_frame_rate(int stream)
{
return ffvideo[stream]->frame_rate;
return &file_base->file->preferences->use_hw_dev[0];
}
+Preferences *FFMPEG::ff_prefs()
+{
+ return !file_base ? 0 : file_base->file->preferences;
+}
+
int FFVideoStream::create_filter(const char *filter_spec, AVCodecParameters *avpar)
{
avfilter_register_all();