return writing;
}
+// this is a global parameter that really should be in the context
static AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE; // protected by ff_lock
+
+// goofy maneuver to attach a hw_format to an av_context
+#define GET_HW_PIXFMT(fn, fmt) \
+static AVPixelFormat get_hw_##fn(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { \
+ return fmt; \
+}
+GET_HW_PIXFMT(vaapi, AV_PIX_FMT_VAAPI)
+GET_HW_PIXFMT(vdpau, AV_PIX_FMT_VDPAU)
+GET_HW_PIXFMT(cuda, AV_PIX_FMT_CUDA)
+GET_HW_PIXFMT(nv12, AV_PIX_FMT_NV12)
+
static enum AVPixelFormat get_hw_format(AVCodecContext *ctx,
const enum AVPixelFormat *pix_fmts)
{
- for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p )
- if( *p == hw_pix_fmt ) return *p;
+ for( const enum AVPixelFormat *p=pix_fmts; *p!=AV_PIX_FMT_NONE; ++p ) {
+ if( *p != hw_pix_fmt ) continue;
+ switch( *p ) {
+ case AV_PIX_FMT_VAAPI: ctx->get_format = get_hw_vaapi; return *p;
+ case AV_PIX_FMT_VDPAU: ctx->get_format = get_hw_vdpau; return *p;
+ case AV_PIX_FMT_CUDA: ctx->get_format = get_hw_cuda; return *p;
+ case AV_PIX_FMT_NV12: ctx->get_format = get_hw_nv12; return *p;
+ default:
+ fprintf(stderr, "Unknown HW surface format: %s\n",
+ av_get_pix_fmt_name(*p));
+ continue;
+ }
+ }
fprintf(stderr, "Failed to get HW surface format.\n");
return hw_pix_fmt = AV_PIX_FMT_NONE;
}
}
FFVideoStream::FFVideoStream(FFMPEG *ffmpeg, AVStream *strm, int idx, int fidx)
- : FFStream(ffmpeg, strm, fidx)
+ : FFStream(ffmpeg, strm, fidx),
+ FFVideoConvert(ffmpeg->ff_prefs())
{
this->idx = idx;
width = height = 0;
const char *hw_dev = ffmpeg->opt_hw_dev;
if( !hw_dev ) hw_dev = getenv("CIN_HW_DEV");
if( !hw_dev ) hw_dev = ffmpeg->ff_hw_dev();
- if( hw_dev && *hw_dev && strcmp(_("none"), hw_dev) ) {
+ if( hw_dev && *hw_dev &&
+ strcmp("none", hw_dev) && strcmp(_("none"), hw_dev) ) {
type = av_hwdevice_find_type_by_name(hw_dev);
if( type == AV_HWDEVICE_TYPE_NONE ) {
fprintf(stderr, "Device type %s is not supported.\n", hw_dev);
frame->interlaced_frame = interlaced;
frame->top_field_first = top_field_first;
}
+ if( frame && frame->format == AV_PIX_FMT_VAAPI ) { // ugly
+ int ret = avcodec_send_frame(avctx, frame);
+ for( int retry=MAX_RETRY; !ret && --retry>=0; ) {
+ FFPacket pkt; av_init_packet(pkt);
+ pkt->data = NULL; pkt->size = 0;
+ if( (ret=avcodec_receive_packet(avctx, pkt)) < 0 ) {
+ if( ret == AVERROR(EAGAIN) ) ret = 0; // weird
+ break;
+ }
+ ret = write_packet(pkt);
+ pkt->stream_index = 0;
+ av_packet_unref(pkt);
+ }
+ if( ret < 0 ) {
+ ff_err(ret, "FFStream::encode_frame: vaapi encode failed.\nfile: %s\n",
+ ffmpeg->fmt_ctx->url);
+ return -1;
+ }
+ return 0;
+ }
return FFStream::encode_frame(frame);
}
}
int FFVideoConvert::convert_picture_vframe(VFrame *frame, AVFrame *ip, AVFrame *ipic)
-{
+{ // picture = vframe
int cmodel = frame->get_color_model();
AVPixelFormat ofmt = color_model_to_pix_fmt(cmodel);
if( ofmt == AV_PIX_FMT_NB ) return -1;
" sws_getCachedContext() failed\n");
return -1;
}
+
+ int color_range = 0;
+ switch( preferences->yuv_color_range ) {
+ case BC_COLORS_JPEG: color_range = 1; break;
+ case BC_COLORS_MPEG: color_range = 0; break;
+ }
+ int ff_color_space = SWS_CS_ITU601;
+ switch( preferences->yuv_color_space ) {
+ case BC_COLORS_BT601: ff_color_space = SWS_CS_ITU601; break;
+ case BC_COLORS_BT709: ff_color_space = SWS_CS_ITU709; break;
+ case BC_COLORS_BT2020: ff_color_space = SWS_CS_BT2020; break;
+ }
+ const int *color_table = sws_getCoefficients(ff_color_space);
+
+ int *inv_table, *table, src_range, dst_range;
+ int brightness, contrast, saturation;
+ if( !sws_getColorspaceDetails(convert_ctx,
+ &inv_table, &src_range, &table, &dst_range,
+ &brightness, &contrast, &saturation) ) {
+ if( src_range != color_range || dst_range != color_range ||
+ inv_table != color_table || table != color_table )
+ sws_setColorspaceDetails(convert_ctx,
+ color_table, color_range, color_table, color_range,
+ brightness, contrast, saturation);
+ }
+
int ret = sws_scale(convert_ctx, ip->data, ip->linesize, 0, ip->height,
ipic->data, ipic->linesize);
if( ret < 0 ) {
}
int FFVideoConvert::convert_vframe_picture(VFrame *frame, AVFrame *op, AVFrame *opic)
-{
+{ // vframe = picture
int cmodel = frame->get_color_model();
AVPixelFormat ifmt = color_model_to_pix_fmt(cmodel);
if( ifmt == AV_PIX_FMT_NB ) return -1;
" sws_getCachedContext() failed\n");
return -1;
}
+
+
+ int color_range = 0;
+ switch( preferences->yuv_color_range ) {
+ case BC_COLORS_JPEG: color_range = 1; break;
+ case BC_COLORS_MPEG: color_range = 0; break;
+ }
+ int ff_color_space = SWS_CS_ITU601;
+ switch( preferences->yuv_color_space ) {
+ case BC_COLORS_BT601: ff_color_space = SWS_CS_ITU601; break;
+ case BC_COLORS_BT709: ff_color_space = SWS_CS_ITU709; break;
+ case BC_COLORS_BT2020: ff_color_space = SWS_CS_BT2020; break;
+ }
+ const int *color_table = sws_getCoefficients(ff_color_space);
+
+ int *inv_table, *table, src_range, dst_range;
+ int brightness, contrast, saturation;
+ if( !sws_getColorspaceDetails(convert_ctx,
+ &inv_table, &src_range, &table, &dst_range,
+ &brightness, &contrast, &saturation) ) {
+ if( dst_range != color_range || table != color_table )
+ sws_setColorspaceDetails(convert_ctx,
+ inv_table, src_range, color_table, color_range,
+ brightness, contrast, saturation);
+ }
+
int ret = sws_scale(convert_ctx, opic->data, opic->linesize, 0, frame->get_h(),
op->data, op->linesize);
if( ret < 0 ) {
}
if( bad_time && !(fflags & FF_BAD_TIMES) ) {
fflags |= FF_BAD_TIMES;
- printf("FFMPEG::open_decoder: some stream have bad times: %s\n",
+ printf(_("FFMPEG::open_decoder: some stream have bad times: %s\n"),
fmt_ctx->url);
}
ff_unlock();
return &file_base->file->preferences->use_hw_dev[0];
}
+Preferences *FFMPEG::ff_prefs()
+{
+ return !file_base ? 0 : file_base->file->preferences;
+}
+
int FFVideoStream::create_filter(const char *filter_spec, AVCodecParameters *avpar)
{
avfilter_register_all();