X-Git-Url: https://git.cinelerra-gg.org/git/?p=goodguy%2Fcinelerra.git;a=blobdiff_plain;f=cinelerra-5.1%2Fcinelerra%2Fffmpeg.C;h=237031ff000c86748ce103e0ead2aa12c08419f1;hp=6d1f535b28f108c03fe0f836bf3773d3d6eb16eb;hb=435f84402323118397a408c1b0c90aa59b321286;hpb=b290324e67a45e465f2d0ad5d0ee662b64343339 diff --git a/cinelerra-5.1/cinelerra/ffmpeg.C b/cinelerra-5.1/cinelerra/ffmpeg.C index 6d1f535b..237031ff 100644 --- a/cinelerra-5.1/cinelerra/ffmpeg.C +++ b/cinelerra-5.1/cinelerra/ffmpeg.C @@ -333,12 +333,35 @@ int FFStream::encode_activate() return writing; } +// this is a global parameter that really should be in the context static AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE; // protected by ff_lock + +// goofy maneuver to attach a hw_format to an av_context +#define GET_HW_PIXFMT(fn, fmt) \ +static AVPixelFormat get_hw_##fn(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { \ + return fmt; \ +} +GET_HW_PIXFMT(vaapi, AV_PIX_FMT_VAAPI) +GET_HW_PIXFMT(vdpau, AV_PIX_FMT_VDPAU) +GET_HW_PIXFMT(cuda, AV_PIX_FMT_CUDA) +GET_HW_PIXFMT(nv12, AV_PIX_FMT_NV12) + static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p ) - if( *p == hw_pix_fmt ) return *p; + for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p ) { + if( *p != hw_pix_fmt ) continue; + switch( *p ) { + case AV_PIX_FMT_VAAPI: ctx->get_format = get_hw_vaapi; return *p; + case AV_PIX_FMT_VDPAU: ctx->get_format = get_hw_vdpau; return *p; + case AV_PIX_FMT_CUDA: ctx->get_format = get_hw_cuda; return *p; + case AV_PIX_FMT_NV12: ctx->get_format = get_hw_nv12; return *p; + default: + fprintf(stderr, "Unknown HW surface format: %s\n", + av_get_pix_fmt_name(*p)); + continue; + } + } fprintf(stderr, "Failed to get HW surface format.\n"); return hw_pix_fmt = AV_PIX_FMT_NONE; } @@ -1010,7 +1033,8 @@ IndexMarks *FFAudioStream::get_markers() } FFVideoStream::FFVideoStream(FFMPEG *ffmpeg, AVStream *strm, int idx, int fidx) - : FFStream(ffmpeg, strm, fidx) + : FFStream(ffmpeg, strm, fidx), + FFVideoConvert(ffmpeg->ff_prefs()) { this->idx = idx; width = height = 0; @@ -1031,7 +1055,8 @@ AVHWDeviceType FFVideoStream::decode_hw_activate() const char *hw_dev = ffmpeg->opt_hw_dev; if( !hw_dev ) hw_dev = getenv("CIN_HW_DEV"); if( !hw_dev ) hw_dev = ffmpeg->ff_hw_dev(); - if( hw_dev && *hw_dev && strcmp(_("none"), hw_dev) ) { + if( hw_dev && *hw_dev && + strcmp("none", hw_dev) && strcmp(_("none"), hw_dev) ) { type = av_hwdevice_find_type_by_name(hw_dev); if( type == AV_HWDEVICE_TYPE_NONE ) { fprintf(stderr, "Device type %s is not supported.\n", hw_dev); @@ -1252,6 +1277,26 @@ int FFVideoStream::encode_frame(AVFrame *frame) frame->interlaced_frame = interlaced; frame->top_field_first = top_field_first; } + if( frame && frame->format == AV_PIX_FMT_VAAPI ) { // ugly + int ret = avcodec_send_frame(avctx, frame); + for( int retry=MAX_RETRY; !ret && --retry>=0; ) { + FFPacket pkt; av_init_packet(pkt); + pkt->data = NULL; pkt->size = 0; + if( (ret=avcodec_receive_packet(avctx, pkt)) < 0 ) { + if( ret == AVERROR(EAGAIN) ) ret = 0; // weird + break; + } + ret = write_packet(pkt); + pkt->stream_index = 0; + av_packet_unref(pkt); + } + if( ret < 0 ) { + ff_err(ret, "FFStream::encode_frame: vaapi encode failed.\nfile: %s\n", + ffmpeg->fmt_ctx->url); + return -1; + } + return 0; + } return FFStream::encode_frame(frame); } @@ -1323,7 +1368,7 @@ int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip) } int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip, AVFrame *ipic) -{ +{ // picture = vframe int cmodel = frame->get_color_model(); AVPixelFormat ofmt = color_model_to_pix_fmt(cmodel); if( ofmt == AV_PIX_FMT_NB ) return -1; @@ -1377,6 +1422,19 @@ int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip, AVFrame * " sws_getCachedContext() failed\n"); return -1; } + + int jpeg_range = preferences->yuv_color_range == BC_COLORS_JPEG ? 1 : 0; + int *inv_table, *table, src_range, dst_range; + int brightness, contrast, saturation; + if( !sws_getColorspaceDetails(convert_ctx, + &inv_table, &src_range, &table, &dst_range, + &brightness, &contrast, &saturation) ) { + if( src_range != jpeg_range || dst_range != jpeg_range ) + sws_setColorspaceDetails(convert_ctx, + inv_table, jpeg_range, table, jpeg_range, + brightness, contrast, saturation); + } + int ret = sws_scale(convert_ctx, ip->data, ip->linesize, 0, ip->height, ipic->data, ipic->linesize); if( ret < 0 ) { @@ -1441,7 +1499,7 @@ int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op) } int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op, AVFrame *opic) -{ +{ // vframe = picture int cmodel = frame->get_color_model(); AVPixelFormat ifmt = color_model_to_pix_fmt(cmodel); if( ifmt == AV_PIX_FMT_NB ) return -1; @@ -1479,6 +1537,19 @@ int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op, AVFrame * " sws_getCachedContext() failed\n"); return -1; } + + int jpeg_range = preferences->yuv_color_range == BC_COLORS_JPEG ? 1 : 0; + int *inv_table, *table, src_range, dst_range; + int brightness, contrast, saturation; + if( !sws_getColorspaceDetails(convert_ctx, + &inv_table, &src_range, &table, &dst_range, + &brightness, &contrast, &saturation) ) { + if( dst_range != jpeg_range ) + sws_setColorspaceDetails(convert_ctx, + inv_table, src_range, table, jpeg_range, + brightness, contrast, saturation); + } + int ret = sws_scale(convert_ctx, opic->data, opic->linesize, 0, frame->get_h(), op->data, op->linesize); if( ret < 0 ) { @@ -2351,7 +2422,7 @@ int FFMPEG::open_decoder() } if( bad_time && !(fflags & FF_BAD_TIMES) ) { fflags |= FF_BAD_TIMES; - printf("FFMPEG::open_decoder: some stream have bad times: %s\n", + printf(_("FFMPEG::open_decoder: some stream have bad times: %s\n"), fmt_ctx->url); } ff_unlock(); @@ -3185,6 +3256,11 @@ const char *FFMPEG::ff_hw_dev() return &file_base->file->preferences->use_hw_dev[0]; } +Preferences *FFMPEG::ff_prefs() +{ + return !file_base ? 0 : file_base->file->preferences; +} + int FFVideoStream::create_filter(const char *filter_spec, AVCodecParameters *avpar) { avfilter_register_all();