X-Git-Url: https://git.cinelerra-gg.org/git/?p=goodguy%2Fcinelerra.git;a=blobdiff_plain;f=cinelerra-5.1%2Fcinelerra%2Fffmpeg.C;h=499974f0e201ac30f3937dc90125d554d746d47c;hp=70098ca189c5267b3aeea8bfdb9de8ea19f051ff;hb=a6e4ede4b9a11b56b3aece044ff2a1546630ca38;hpb=cdb8b00f2f7ecf0b4910a40e1d90a87540d2891d diff --git a/cinelerra-5.1/cinelerra/ffmpeg.C b/cinelerra-5.1/cinelerra/ffmpeg.C index 70098ca1..499974f0 100644 --- a/cinelerra-5.1/cinelerra/ffmpeg.C +++ b/cinelerra-5.1/cinelerra/ffmpeg.C @@ -275,6 +275,7 @@ FFStream::FFStream(FFMPEG *ffmpeg, AVStream *st, int fidx) flushed = 0; need_packet = 1; frame = fframe = 0; + probe_frame = 0; bsfc = 0; stats_fp = 0; stats_filename = 0; @@ -284,6 +285,7 @@ FFStream::FFStream(FFMPEG *ffmpeg, AVStream *st, int fidx) FFStream::~FFStream() { + frm_lock->lock("FFStream::~FFStream"); if( reading > 0 || writing > 0 ) avcodec_close(avctx); if( avctx ) avcodec_free_context(&avctx); if( fmt_ctx ) avformat_close_input(&fmt_ctx); @@ -293,6 +295,8 @@ FFStream::~FFStream() if( filter_graph ) avfilter_graph_free(&filter_graph); if( frame ) av_frame_free(&frame); if( fframe ) av_frame_free(&fframe); + if( probe_frame ) av_frame_free(&probe_frame); + frm_lock->unlock(); delete frm_lock; if( stats_fp ) fclose(stats_fp); if( stats_in ) av_freep(&stats_in); @@ -333,12 +337,35 @@ int FFStream::encode_activate() return writing; } +// this is a global parameter that really should be in the context static AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE; // protected by ff_lock + +// goofy maneuver to attach a hw_format to an av_context +#define GET_HW_PIXFMT(fn, fmt) \ +static AVPixelFormat get_hw_##fn(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { \ + return fmt; \ +} +GET_HW_PIXFMT(vaapi, AV_PIX_FMT_VAAPI) +GET_HW_PIXFMT(vdpau, AV_PIX_FMT_VDPAU) +GET_HW_PIXFMT(cuda, AV_PIX_FMT_CUDA) +GET_HW_PIXFMT(nv12, AV_PIX_FMT_NV12) + static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p ) - if( *p == hw_pix_fmt ) return *p; + for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p ) { + if( *p != hw_pix_fmt ) continue; + switch( *p ) { + case AV_PIX_FMT_VAAPI: ctx->get_format = get_hw_vaapi; return *p; + case AV_PIX_FMT_VDPAU: ctx->get_format = get_hw_vdpau; return *p; + case AV_PIX_FMT_CUDA: ctx->get_format = get_hw_cuda; return *p; + case AV_PIX_FMT_NV12: ctx->get_format = get_hw_nv12; return *p; + default: + fprintf(stderr, "Unknown HW surface format: %s\n", + av_get_pix_fmt_name(*p)); + continue; + } + } fprintf(stderr, "Failed to get HW surface format.\n"); return hw_pix_fmt = AV_PIX_FMT_NONE; } @@ -396,7 +423,6 @@ int FFStream::decode_activate() } if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) { ret = decode_hw_format(decoder, hw_type); - if( !ret ) hw_type = AV_HWDEVICE_TYPE_NONE; } if( ret >= 0 ) { avcodec_parameters_to_context(avctx, st->codecpar); @@ -404,35 +430,35 @@ int FFStream::decode_activate() avctx->thread_count = ffmpeg->ff_cpus(); ret = avcodec_open2(avctx, decoder, &copts); } + AVFrame *hw_frame = 0; if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) { - if( need_packet ) { - need_packet = 0; - ret = read_packet(); - } - if( ret >= 0 ) { - AVPacket *pkt = (AVPacket*)ipkt; - ret = avcodec_send_packet(avctx, pkt); - if( ret < 0 || hw_pix_fmt == AV_PIX_FMT_NONE ) { - ff_err(ret, "HW device init failed, using SW decode.\nfile:%s\n", - ffmpeg->fmt_ctx->url); - avcodec_close(avctx); - avcodec_free_context(&avctx); - av_buffer_unref(&hw_device_ctx); - hw_device_ctx = 0; - hw_type = AV_HWDEVICE_TYPE_NONE; - int flags = AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY; - int idx = st->index; - av_seek_frame(fmt_ctx, idx, INT64_MIN, flags); - need_packet = 1; flushed = 0; - seeked = 1; st_eof(0); - ret = 0; - continue; - } + if( !(hw_frame=av_frame_alloc()) ) { + fprintf(stderr, "FFStream::decode_activate: av_frame_alloc failed\n"); + ret = AVERROR(ENOMEM); } + if( ret >= 0 ) + ret = decode(hw_frame); } - if( ret >= 0 ) { - reading = 1; + if( ret < 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) { + ff_err(ret, "HW device init failed, using SW decode.\nfile:%s\n", + ffmpeg->fmt_ctx->url); + avcodec_close(avctx); + avcodec_free_context(&avctx); + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = 0; + av_frame_free(&hw_frame); + hw_type = AV_HWDEVICE_TYPE_NONE; + int flags = AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY; + int idx = st->index; + av_seek_frame(fmt_ctx, idx, 0, flags); + need_packet = 1; flushed = 0; + seeked = 1; st_eof(0); + ret = 0; + continue; } + probe_frame = hw_frame; + if( ret >= 0 ) + reading = 1; else eprintf(_("open decoder failed\n")); } @@ -460,9 +486,14 @@ int FFStream::read_packet() int FFStream::decode(AVFrame *frame) { + if( probe_frame ) { // hw probe reads first frame + av_frame_ref(frame, probe_frame); + av_frame_free(&probe_frame); + return 1; + } int ret = 0; int retries = MAX_RETRY; - + frm_lock->lock("FFStream::decode"); while( ret >= 0 && !flushed && --retries >= 0 ) { if( need_packet ) { if( (ret=read_packet()) < 0 ) break; @@ -485,6 +516,7 @@ int FFStream::decode(AVFrame *frame) flushed = st_eof(); } } + frm_lock->unlock(); if( retries < 0 ) { fprintf(stderr, "FFStream::decode: Retry limit\n"); @@ -697,7 +729,8 @@ int FFStream::seek(int64_t no, double rate) tstmp = av_rescale_q(tstmp, time_base, AV_TIME_BASE_Q); idx = -1; #endif - + frm_lock->lock("FFStream::seek"); + av_frame_free(&probe_frame); avcodec_flush_buffers(avctx); avformat_flush(fmt_ctx); #if 0 @@ -743,6 +776,7 @@ int FFStream::seek(int64_t no, double rate) break; } } + frm_lock->unlock(); if( ret < 0 ) { printf("** seek fail %jd, %jd\n", pos, tstmp); seeked = need_packet = 0; @@ -1010,7 +1044,8 @@ IndexMarks *FFAudioStream::get_markers() } FFVideoStream::FFVideoStream(FFMPEG *ffmpeg, AVStream *strm, int idx, int fidx) - : FFStream(ffmpeg, strm, fidx) + : FFStream(ffmpeg, strm, fidx), + FFVideoConvert(ffmpeg->ff_prefs()) { this->idx = idx; width = height = 0; @@ -1019,6 +1054,8 @@ FFVideoStream::FFVideoStream(FFMPEG *ffmpeg, AVStream *strm, int idx, int fidx) length = 0; interlaced = 0; top_field_first = 0; + color_space = -1; + color_range = -1; } FFVideoStream::~FFVideoStream() @@ -1031,7 +1068,8 @@ AVHWDeviceType FFVideoStream::decode_hw_activate() const char *hw_dev = ffmpeg->opt_hw_dev; if( !hw_dev ) hw_dev = getenv("CIN_HW_DEV"); if( !hw_dev ) hw_dev = ffmpeg->ff_hw_dev(); - if( hw_dev && *hw_dev && strcmp(_("none"), hw_dev) ) { + if( hw_dev && *hw_dev && + strcmp("none", hw_dev) && strcmp(_("none"), hw_dev) ) { type = av_hwdevice_find_type_by_name(hw_dev); if( type == AV_HWDEVICE_TYPE_NONE ) { fprintf(stderr, "Device type %s is not supported.\n", hw_dev); @@ -1053,6 +1091,7 @@ int FFVideoStream::decode_hw_format(AVCodec *decoder, AVHWDeviceType type) if( !config ) { fprintf(stderr, "Decoder %s does not support device type %s.\n", decoder->name, av_hwdevice_get_type_name(type)); + ret = -1; break; } if( (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) != 0 && @@ -1069,9 +1108,11 @@ int FFVideoStream::decode_hw_format(AVCodec *decoder, AVHWDeviceType type) avctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); ret = 1; } - else + else { ff_err(ret, "Failed HW device create.\ndev:%s\n", av_hwdevice_get_type_name(type)); + ret = -1; + } } return ret; } @@ -1343,7 +1384,7 @@ int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip) } int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip, AVFrame *ipic) -{ +{ // picture = vframe int cmodel = frame->get_color_model(); AVPixelFormat ofmt = color_model_to_pix_fmt(cmodel); if( ofmt == AV_PIX_FMT_NB ) return -1; @@ -1397,6 +1438,32 @@ int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip, AVFrame * " sws_getCachedContext() failed\n"); return -1; } + + int color_range = 0; + switch( preferences->yuv_color_range ) { + case BC_COLORS_JPEG: color_range = 1; break; + case BC_COLORS_MPEG: color_range = 0; break; + } + int color_space = SWS_CS_ITU601; + switch( preferences->yuv_color_space ) { + case BC_COLORS_BT601: color_space = SWS_CS_ITU601; break; + case BC_COLORS_BT709: color_space = SWS_CS_ITU709; break; + case BC_COLORS_BT2020: color_space = SWS_CS_BT2020; break; + } + const int *color_table = sws_getCoefficients(color_space); + + int *inv_table, *table, src_range, dst_range; + int brightness, contrast, saturation; + if( !sws_getColorspaceDetails(convert_ctx, + &inv_table, &src_range, &table, &dst_range, + &brightness, &contrast, &saturation) ) { + if( src_range != color_range || dst_range != color_range || + inv_table != color_table || table != color_table ) + sws_setColorspaceDetails(convert_ctx, + color_table, color_range, color_table, color_range, + brightness, contrast, saturation); + } + int ret = sws_scale(convert_ctx, ip->data, ip->linesize, 0, ip->height, ipic->data, ipic->linesize); if( ret < 0 ) { @@ -1461,7 +1528,7 @@ int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op) } int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op, AVFrame *opic) -{ +{ // vframe = picture int cmodel = frame->get_color_model(); AVPixelFormat ifmt = color_model_to_pix_fmt(cmodel); if( ifmt == AV_PIX_FMT_NB ) return -1; @@ -1499,6 +1566,32 @@ int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op, AVFrame * " sws_getCachedContext() failed\n"); return -1; } + + + int color_range = 0; + switch( preferences->yuv_color_range ) { + case BC_COLORS_JPEG: color_range = 1; break; + case BC_COLORS_MPEG: color_range = 0; break; + } + int color_space = SWS_CS_ITU601; + switch( preferences->yuv_color_space ) { + case BC_COLORS_BT601: color_space = SWS_CS_ITU601; break; + case BC_COLORS_BT709: color_space = SWS_CS_ITU709; break; + case BC_COLORS_BT2020: color_space = SWS_CS_BT2020; break; + } + const int *color_table = sws_getCoefficients(color_space); + + int *inv_table, *table, src_range, dst_range; + int brightness, contrast, saturation; + if( !sws_getColorspaceDetails(convert_ctx, + &inv_table, &src_range, &table, &dst_range, + &brightness, &contrast, &saturation) ) { + if( dst_range != color_range || table != color_table ) + sws_setColorspaceDetails(convert_ctx, + inv_table, src_range, color_table, color_range, + brightness, contrast, saturation); + } + int ret = sws_scale(convert_ctx, opic->data, opic->linesize, 0, frame->get_h(), op->data, op->linesize); if( ret < 0 ) { @@ -1927,6 +2020,20 @@ void FFMPEG::load_video_options(Asset *asset, EDL *edl) scan_video_options(asset, edl); } +void FFMPEG::scan_format_options(Asset *asset, EDL *edl) +{ +} + +void FFMPEG::load_format_options(Asset *asset, EDL *edl) +{ + char options_path[BCTEXTLEN]; + set_option_path(options_path, "format/%s", asset->fformat); + if( !load_options(options_path, + asset->ff_format_options, + sizeof(asset->ff_format_options)) ) + scan_format_options(asset, edl); +} + int FFMPEG::load_defaults(const char *path, const char *type, char *codec, char *codec_options, int len) { @@ -1952,6 +2059,8 @@ void FFMPEG::set_asset_format(Asset *asset, EDL *edl, const char *text) if( asset->format != FILE_FFMPEG ) return; if( text != asset->fformat ) strcpy(asset->fformat, text); + if( !asset->ff_format_options[0] ) + load_format_options(asset, edl); if( asset->audio_data && !asset->ff_audio_options[0] ) { if( !load_defaults("audio", text, asset->acodec, asset->ff_audio_options, sizeof(asset->ff_audio_options)) ) @@ -2023,8 +2132,10 @@ int FFMPEG::scan_options(const char *options, AVDictionary *&opts, AVStream *st) if( !fp ) return 0; int ret = read_options(fp, options, opts); fclose(fp); - AVDictionaryEntry *tag = av_dict_get(opts, "id", NULL, 0); - if( tag ) st->id = strtol(tag->value,0,0); + if( !ret && st ) { + AVDictionaryEntry *tag = av_dict_get(opts, "id", NULL, 0); + if( tag ) st->id = strtol(tag->value,0,0); + } return ret; } @@ -2168,16 +2279,23 @@ int FFMPEG::info(char *text, int len) if( ffvideo.size() > 0 ) report("\n%d video stream%s\n",ffvideo.size(), ffvideo.size()!=1 ? "s" : ""); for( int vidx=0; vidxst; AVCodecID codec_id = st->codecpar->codec_id; report(_("vid%d (%d), id 0x%06x:\n"), vid->idx, vid->fidx, codec_id); const AVCodecDescriptor *desc = avcodec_descriptor_get(codec_id); - report(" video%d %s", vidx+1, desc ? desc->name : " (unkn)"); + report(" video%d %s ", vidx+1, desc ? desc->name : unkn); report(" %dx%d %5.2f", vid->width, vid->height, vid->frame_rate); AVPixelFormat pix_fmt = (AVPixelFormat)st->codecpar->format; const char *pfn = av_get_pix_fmt_name(pix_fmt); - report(" pix %s\n", pfn ? pfn : "(unkn)"); + report(" pix %s\n", pfn ? pfn : unkn); + enum AVColorSpace space = st->codecpar->color_space; + const char *nm = av_color_space_name(space); + report(" color space:%s", nm ? nm : unkn); + enum AVColorRange range = st->codecpar->color_range; + const char *rg = av_color_range_name(range); + report("/ range:%s\n", rg ? rg : unkn); double secs = to_secs(st->duration, st->time_base); int64_t length = secs * vid->frame_rate + 0.5; double ofs = to_secs((vid->nudge - st->start_time), st->time_base); @@ -2338,6 +2456,35 @@ int FFMPEG::open_decoder() vid->width = avpar->width; vid->height = avpar->height; vid->frame_rate = !framerate.den ? 0 : (double)framerate.num / framerate.den; + switch( avpar->color_range ) { + case AVCOL_RANGE_MPEG: + vid->color_range = BC_COLORS_MPEG; + break; + case AVCOL_RANGE_JPEG: + vid->color_range = BC_COLORS_JPEG; + break; + default: + vid->color_range = !file_base ? BC_COLORS_JPEG : + file_base->file->preferences->yuv_color_range; + break; + } + switch( avpar->color_space ) { + case AVCOL_SPC_BT470BG: + case AVCOL_SPC_SMPTE170M: + vid->color_space = BC_COLORS_BT601; + break; + case AVCOL_SPC_BT709: + vid->color_space = BC_COLORS_BT709; + break; + case AVCOL_SPC_BT2020_NCL: + case AVCOL_SPC_BT2020_CL: + vid->color_space = BC_COLORS_BT2020; + break; + default: + vid->color_space = !file_base ? BC_COLORS_BT601 : + file_base->file->preferences->yuv_color_space; + break; + } double secs = to_secs(st->duration, st->time_base); vid->length = secs * vid->frame_rate; vid->aspect_ratio = (double)st->sample_aspect_ratio.num / st->sample_aspect_ratio.den; @@ -2371,7 +2518,7 @@ int FFMPEG::open_decoder() } if( bad_time && !(fflags & FF_BAD_TIMES) ) { fflags |= FF_BAD_TIMES; - printf("FFMPEG::open_decoder: some stream have bad times: %s\n", + printf(_("FFMPEG::open_decoder: some stream have bad times: %s\n"), fmt_ctx->url); } ff_unlock(); @@ -2570,7 +2717,19 @@ int FFMPEG::open_encoder(const char *type, const char *spec) vid->width = asset->width; vid->height = asset->height; vid->frame_rate = asset->frame_rate; - + if( (vid->color_range = asset->ff_color_range) < 0 ) + vid->color_range = file_base->file->preferences->yuv_color_range; + switch( vid->color_range ) { + case BC_COLORS_MPEG: ctx->color_range = AVCOL_RANGE_MPEG; break; + case BC_COLORS_JPEG: ctx->color_range = AVCOL_RANGE_JPEG; break; + } + if( (vid->color_space = asset->ff_color_space) < 0 ) + vid->color_space = file_base->file->preferences->yuv_color_space; + switch( vid->color_space ) { + case BC_COLORS_BT601: ctx->colorspace = AVCOL_SPC_SMPTE170M; break; + case BC_COLORS_BT709: ctx->colorspace = AVCOL_SPC_BT709; break; + case BC_COLORS_BT2020: ctx->colorspace = AVCOL_SPC_BT2020_NCL; break; + } AVPixelFormat pix_fmt = av_get_pix_fmt(asset->ff_pixel_format); if( opt_hw_dev != 0 ) { AVHWDeviceType hw_type = vid->encode_hw_activate(opt_hw_dev); @@ -2820,7 +2979,20 @@ int FFMPEG::encode_activate() fmt_ctx->url); return -1; } - + if( !strcmp(file_format, "image2") ) { + Asset *asset = file_base->asset; + const char *filename = asset->path; + FILE *fp = fopen(filename,"w"); + if( !fp ) { + eprintf(_("Cant write image2 header file: %s\n %m"), filename); + return 1; + } + fprintf(fp, "IMAGE2\n"); + fprintf(fp, "# Frame rate: %f\n", asset->frame_rate); + fprintf(fp, "# Width: %d\n", asset->width); + fprintf(fp, "# Height: %d\n", asset->height); + fclose(fp); + } int prog_id = 1; AVProgram *prog = av_new_program(fmt_ctx, prog_id); for( int i=0; i< ffvideo.size(); ++i ) @@ -2860,7 +3032,13 @@ int FFMPEG::encode_activate() char option_path[BCTEXTLEN]; set_option_path(option_path, "format/%s", file_format); read_options(option_path, fopts, 1); - ret = avformat_write_header(fmt_ctx, &fopts); + av_dict_copy(&fopts, opts, 0); + if( scan_options(file_base->asset->ff_format_options, fopts, 0) ) { + eprintf(_("bad format options %s\n"), file_base->asset->path); + ret = -1; + } + if( ret >= 0 ) + ret = avformat_write_header(fmt_ctx, &fopts); if( ret < 0 ) { ff_err(ret, "FFMPEG::encode_activate: write header failed %s\n", fmt_ctx->url); @@ -3167,7 +3345,7 @@ float FFMPEG::ff_aspect_ratio(int stream) return ffvideo[stream]->aspect_ratio; } -const char* FFMPEG::ff_video_format(int stream) +const char* FFMPEG::ff_video_codec(int stream) { AVStream *st = ffvideo[stream]->st; AVCodecID id = st->codecpar->codec_id; @@ -3175,6 +3353,16 @@ const char* FFMPEG::ff_video_format(int stream) return desc ? desc->name : _("Unknown"); } +int FFMPEG::ff_color_range(int stream) +{ + return ffvideo[stream]->color_range; +} + +int FFMPEG::ff_color_space(int stream) +{ + return ffvideo[stream]->color_space; +} + double FFMPEG::ff_frame_rate(int stream) { return ffvideo[stream]->frame_rate; @@ -3205,6 +3393,11 @@ const char *FFMPEG::ff_hw_dev() return &file_base->file->preferences->use_hw_dev[0]; } +Preferences *FFMPEG::ff_prefs() +{ + return !file_base ? 0 : file_base->file->preferences; +} + int FFVideoStream::create_filter(const char *filter_spec, AVCodecParameters *avpar) { avfilter_register_all();