#include "libmjpeg.h"
#include "mainerror.h"
#include "mwindow.h"
+#include "preferences.h"
#include "vframe.h"
#ifdef FFMPEG3
fst->dequeue(this);
}
+void FFrame::set_hw_frame(AVFrame *frame)
+{
+ av_frame_free(&frm);
+ frm = frame;
+}
+
int FFAudioStream::read(float *fp, long len)
{
long n = len * nch;
seek_pos = curr_pos = 0;
seeked = 1; eof = 0;
reading = writing = 0;
- hw_dev = 0;
hw_pixfmt = AV_PIX_FMT_NONE;
hw_device_ctx = 0;
flushed = 0;
return AV_HWDEVICE_TYPE_NONE;
}
-void FFStream::decode_hw_format(AVCodec *decoder, AVHWDeviceType type)
+int FFStream::decode_hw_format(AVCodec *decoder, AVHWDeviceType type)
{
+ return 0;
}
int FFStream::decode_activate()
eprintf(_("cant allocate codec context\n"));
ret = AVERROR(ENOMEM);
}
- if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE )
- decode_hw_format(decoder, hw_type);
-
+ if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) {
+ ret = decode_hw_format(decoder, hw_type);
+ if( !ret ) hw_type = AV_HWDEVICE_TYPE_NONE;
+ }
if( ret >= 0 ) {
avcodec_parameters_to_context(avctx, st->codecpar);
if( !av_dict_get(copts, "threads", NULL, 0) )
ret = avcodec_open2(avctx, decoder, &copts);
}
if( ret >= 0 && hw_type != AV_HWDEVICE_TYPE_NONE ) {
- ret = read_packet();
+ if( need_packet ) {
+ need_packet = 0;
+ ret = read_packet();
+ }
if( ret >= 0 ) {
AVPacket *pkt = (AVPacket*)ipkt;
- need_packet = 0;
ret = avcodec_send_packet(avctx, pkt);
if( ret < 0 || hw_pix_fmt == AV_PIX_FMT_NONE ) {
ff_err(ret, "HW device init failed, using SW decode.\nfile:%s\n",
av_buffer_unref(&hw_device_ctx);
hw_device_ctx = 0;
hw_type = AV_HWDEVICE_TYPE_NONE;
- flushed = 0;
- st_eof(0);
- need_packet = 1;
+ int flags = AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY;
+ int idx = st->index;
+ av_seek_frame(fmt_ctx, idx, INT64_MIN, flags);
+ need_packet = 1; flushed = 0;
+ seeked = 1; st_eof(0);
ret = 0;
continue;
}
AVHWDeviceType FFVideoStream::decode_hw_activate()
{
AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
- const char *hw_dev = getenv("CIN_HW_DEV");
- if( hw_dev ) {
+ const char *hw_dev = ffmpeg->opt_hw_dev;
+ if( !hw_dev ) hw_dev = getenv("CIN_HW_DEV");
+ if( !hw_dev ) hw_dev = ffmpeg->ff_hw_dev();
+ if( hw_dev && *hw_dev && strcmp(_("none"), hw_dev) ) {
type = av_hwdevice_find_type_by_name(hw_dev);
if( type == AV_HWDEVICE_TYPE_NONE ) {
fprintf(stderr, "Device type %s is not supported.\n", hw_dev);
return type;
}
-void FFVideoStream::decode_hw_format(AVCodec *decoder, AVHWDeviceType type)
+int FFVideoStream::decode_hw_format(AVCodec *decoder, AVHWDeviceType type)
{
+ int ret = 0;
hw_pix_fmt = AV_PIX_FMT_NONE;
for( int i=0; ; ++i ) {
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
if( hw_pix_fmt >= 0 ) {
hw_pixfmt = hw_pix_fmt;
avctx->get_format = get_hw_format;
- int ret = av_hwdevice_ctx_create(&hw_device_ctx, type, 0, 0, 0);
- if( ret >= 0 )
+ ret = av_hwdevice_ctx_create(&hw_device_ctx, type, 0, 0, 0);
+ if( ret >= 0 ) {
avctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
+ ret = 1;
+ }
else
ff_err(ret, "Failed HW device create.\ndev:%s\n",
av_hwdevice_get_type_name(type));
}
+ return ret;
+}
+
+AVHWDeviceType FFVideoStream::encode_hw_activate(const char *hw_dev)
+{
+ AVBufferRef *hw_device_ctx = 0;
+ AVBufferRef *hw_frames_ref = 0;
+ AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
+ if( strcmp(_("none"), hw_dev) ) {
+ type = av_hwdevice_find_type_by_name(hw_dev);
+ if( type != AV_HWDEVICE_TYPE_VAAPI ) {
+ fprintf(stderr, "currently, only vaapi hw encode is supported\n");
+ type = AV_HWDEVICE_TYPE_NONE;
+ }
+ }
+ if( type != AV_HWDEVICE_TYPE_NONE ) {
+ int ret = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, 0, 0, 0);
+ if( ret < 0 ) {
+ ff_err(ret, "Failed to create a HW device.\n");
+ type = AV_HWDEVICE_TYPE_NONE;
+ }
+ }
+ if( type != AV_HWDEVICE_TYPE_NONE ) {
+ hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx);
+ if( !hw_frames_ref ) {
+ fprintf(stderr, "Failed to create HW frame context.\n");
+ type = AV_HWDEVICE_TYPE_NONE;
+ }
+ }
+ if( type != AV_HWDEVICE_TYPE_NONE ) {
+ AVHWFramesContext *frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data);
+ frames_ctx->format = AV_PIX_FMT_VAAPI;
+ frames_ctx->sw_format = AV_PIX_FMT_NV12;
+ frames_ctx->width = width;
+ frames_ctx->height = height;
+ frames_ctx->initial_pool_size = 0; // 200;
+ int ret = av_hwframe_ctx_init(hw_frames_ref);
+ if( ret >= 0 ) {
+ avctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref);
+ if( !avctx->hw_frames_ctx ) ret = AVERROR(ENOMEM);
+ }
+ if( ret < 0 ) {
+ ff_err(ret, "Failed to initialize HW frame context.\n");
+ type = AV_HWDEVICE_TYPE_NONE;
+ }
+ av_buffer_unref(&hw_frames_ref);
+ }
+ return type;
+}
+
+int FFVideoStream::encode_hw_write(FFrame *picture)
+{
+ int ret = 0;
+ AVFrame *hw_frm = 0;
+ switch( avctx->pix_fmt ) {
+ case AV_PIX_FMT_VAAPI:
+ hw_frm = av_frame_alloc();
+ if( !hw_frm ) { ret = AVERROR(ENOMEM); break; }
+ ret = av_hwframe_get_buffer(avctx->hw_frames_ctx, hw_frm, 0);
+ if( ret < 0 ) break;
+ ret = av_hwframe_transfer_data(hw_frm, *picture, 0);
+ if( ret < 0 ) break;
+ picture->set_hw_frame(hw_frm);
+ return 0;
+ default:
+ return 0;
+ }
+ av_frame_free(&hw_frm);
+ ff_err(ret, "Error while transferring frame data to GPU.\n");
+ return ret;
}
int FFVideoStream::decode_frame(AVFrame *frame)
int FFVideoStream::init_frame(AVFrame *picture)
{
- picture->format = avctx->pix_fmt;
+ switch( avctx->pix_fmt ) {
+ case AV_PIX_FMT_VAAPI:
+ picture->format = AV_PIX_FMT_NV12;
+ break;
+ default:
+ picture->format = avctx->pix_fmt;
+ break;
+ }
picture->width = avctx->width;
picture->height = avctx->height;
int ret = av_frame_get_buffer(picture, 32);
frame->pts = curr_pos;
ret = convert_pixfmt(vframe, frame);
}
+ if( ret >= 0 && avctx->hw_frames_ctx )
+ encode_hw_write(picture);
if( ret >= 0 ) {
picture->queue(curr_pos);
++curr_pos;
}
AVPixelFormat pix_fmt = (AVPixelFormat)ip->format;
- if( pix_fmt == ((FFVideoStream *)this)->hw_pixfmt ) {
+ FFVideoStream *vid =(FFVideoStream *)this;
+ if( pix_fmt == vid->hw_pixfmt ) {
int ret = 0;
if( !sw_frame && !(sw_frame=av_frame_alloc()) )
ret = AVERROR(ENOMEM);
pix_fmt = (AVPixelFormat)ip->format;
}
if( ret < 0 ) {
- ff_err(ret, "Error retrieving data from GPU to CPU\n");
+ eprintf(_("Error retrieving data from GPU to CPU\nfile: %s\n"),
+ vid->ffmpeg->fmt_ctx->url);
return -1;
}
}
int ret = sws_scale(convert_ctx, ip->data, ip->linesize, 0, ip->height,
ipic->data, ipic->linesize);
if( ret < 0 ) {
- ff_err(ret, "FFVideoConvert::convert_picture_frame: sws_scale() failed\n");
+ ff_err(ret, "FFVideoConvert::convert_picture_frame: sws_scale() failed\nfile: %s\n",
+ vid->ffmpeg->fmt_ctx->url);
return -1;
}
return 0;
opt_duration = -1;
opt_video_filter = 0;
opt_audio_filter = 0;
+ opt_hw_dev = 0;
fflags = 0;
char option_path[BCTEXTLEN];
set_option_path(option_path, "%s", "ffmpeg.opts");
av_dict_free(&opts);
delete [] opt_video_filter;
delete [] opt_audio_filter;
+ delete [] opt_hw_dev;
}
int FFMPEG::check_sample_rate(AVCodec *codec, int sample_rate)
opt_video_filter = cstrdup(val);
else if( !strcmp(key, "audio_filter") )
opt_audio_filter = cstrdup(val);
+ else if( !strcmp(key, "cin_hw_dev") )
+ opt_hw_dev = cstrdup(val);
else if( !strcmp(key, "loglevel") )
set_loglevel(val);
else
vid->height = asset->height;
vid->frame_rate = asset->frame_rate;
- AVPixelFormat pix_fmt = av_get_pix_fmt(asset->ff_pixel_format);
+ AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
+ if( opt_hw_dev != 0 ) {
+ AVHWDeviceType hw_type = vid->encode_hw_activate(opt_hw_dev);
+ switch( hw_type ) {
+ case AV_HWDEVICE_TYPE_VAAPI:
+ pix_fmt = AV_PIX_FMT_VAAPI;
+ break;
+ case AV_HWDEVICE_TYPE_NONE:
+ default:
+ pix_fmt = av_get_pix_fmt(asset->ff_pixel_format);
+ break;
+ }
+ }
if( pix_fmt == AV_PIX_FMT_NONE )
pix_fmt = codec->pix_fmts ? codec->pix_fmts[0] : AV_PIX_FMT_YUV420P;
ctx->pix_fmt = pix_fmt;
+
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
int mask_w = (1<<desc->log2_chroma_w)-1;
ctx->width = (vid->width+mask_w) & ~mask_w;
return file_base->file->cpus;
}
+const char *FFMPEG::ff_hw_dev()
+{
+ return &file_base->file->preferences->use_hw_dev[0];
+}
+
int FFVideoStream::create_filter(const char *filter_spec, AVCodecParameters *avpar)
{
avfilter_register_all();