{
hms = 0;
hmsf = 0;
+ timecode = 0;
samples = 0;
frames = 0;
hex = 0;
{
delete hms;
delete hmsf;
+ delete timecode;
delete samples;
delete frames;
delete hex;
pwindow->thread->edl->session->time_format == TIME_HMSF,
x, y));
y += ys20;
+ add_subwindow(timecode = new TimeFormatTimecode(pwindow, this,
+ pwindow->thread->edl->session->time_format == TIME_TIMECODE,
+ x, y));
+ y += ys20;
add_subwindow(samples = new TimeFormatSamples(pwindow, this,
pwindow->thread->edl->session->time_format == TIME_SAMPLES,
x, y));
DeactivateFocusPolicy *focus_deactivate = new DeactivateFocusPolicy(pwindow, x, y);
add_subwindow(focus_deactivate);
y += focus_deactivate->get_h() + ys5;
+ AutoRotate *auto_rotate = new AutoRotate(pwindow, x, y);
+ add_subwindow(auto_rotate);
+ y += auto_rotate->get_h() + ys5;
}
int AppearancePrefs::update(int new_value)
pwindow->thread->edl->session->time_format = new_value;
hms->update(new_value == TIME_HMS);
hmsf->update(new_value == TIME_HMSF);
+ timecode->update(new_value == TIME_TIMECODE);
samples->update(new_value == TIME_SAMPLES);
hex->update(new_value == TIME_SAMPLES_HEX);
frames->update(new_value == TIME_FRAMES);
return 1;
}
+TimeFormatTimecode::TimeFormatTimecode(PreferencesWindow *pwindow, AppearancePrefs *tfwindow, int value, int x, int y)
+ : BC_Radial(x, y, value, TIME_TIMECODE_TEXT)
+{ this->pwindow = pwindow; this->tfwindow = tfwindow; }
+
+int TimeFormatTimecode::handle_event()
+{
+ tfwindow->update(TIME_TIMECODE);
+ return 1;
+}
+
TimeFormatSamples::TimeFormatSamples(PreferencesWindow *pwindow, AppearancePrefs *tfwindow, int value, int x, int y)
: BC_Radial(x, y, value, TIME_SAMPLES_TEXT)
{ this->pwindow = pwindow; this->tfwindow = tfwindow; }
return 1;
}
+AutoRotate::AutoRotate(PreferencesWindow *pwindow, int x, int y)
+ : BC_CheckBox(x, y, pwindow->thread->preferences->auto_rotate != 0,
+ _("Auto rotate ffmpeg media"))
+{
+ this->pwindow = pwindow;
+}
+
+int AutoRotate::handle_event()
+{
+ pwindow->thread->preferences->auto_rotate = get_value();
+ return 1;
+}
+
ForwardRenderDisplacement::ForwardRenderDisplacement(PreferencesWindow *pwindow, int x, int y)
: BC_CheckBox(x, y, pwindow->thread->preferences->forward_render_displacement,
_("Always show next frame"))
int update(int new_value);
TimeFormatHMS *hms;
TimeFormatHMSF *hmsf;
+ TimeFormatTimecode *timecode;
TimeFormatSamples *samples;
TimeFormatHex *hex;
TimeFormatFrames *frames;
AppearancePrefs *tfwindow;
};
+class TimeFormatTimecode : public BC_Radial
+{
+public:
+ TimeFormatTimecode(PreferencesWindow *pwindow, AppearancePrefs *tfwindow, int value, int x, int y);
+ int handle_event();
+ PreferencesWindow *pwindow;
+ AppearancePrefs *tfwindow;
+};
+
class TimeFormatSamples : public BC_Radial
{
public:
PreferencesWindow *pwindow;
};
+class AutoRotate: public BC_CheckBox
+{
+public:
+ AutoRotate(PreferencesWindow *pwindow, int x, int y);
+ int handle_event();
+ PreferencesWindow *pwindow;
+};
+
class ForwardRenderDisplacement : public BC_CheckBox
{
public:
class AppearancePrefs;
class TimeFormatHMS;
class TimeFormatHMSF;
+class TimeFormatTimecode;
class TimeFormatSamples;
class TimeFormatFrames;
class TimeFormatHex;
class GrabFocusPolicy;
class ActivateFocusPolicy;
class DeactivateFocusPolicy;
+class AutoRotate;
class ForwardRenderDisplacement;
class HighlightInverseColor;
class YuvColorSpace;
proxy_scale = 0; // not a proxy
proxy_edl = 0; // not proxy from edl
video_length = 0;
+ timecode = -2; // unknown
single_frame = 0;
vmpeg_cmodel = BC_YUV420P;
frame_rate = 0;
use_header = asset->use_header;
aspect_ratio = asset->aspect_ratio;
interlace_mode = asset->interlace_mode;
+ timecode = asset->timecode;
video_data = asset->video_data;
layers = asset->layers;
result = (layers == asset.layers &&
program == asset.program &&
frame_rate == asset.frame_rate &&
- asset.interlace_mode == interlace_mode &&
+ asset.interlace_mode == interlace_mode &&
+ asset.timecode == timecode &&
width == asset.width &&
height == asset.height &&
!strcmp(vcodec, asset.vcodec) &&
file->tag.get_property("VCODEC", vcodec);
video_length = file->tag.get_property("VIDEO_LENGTH", (int64_t)0);
+ timecode = file->tag.get_property("TIMECODE", -2);
mov_sphere = file->tag.get_property("MOV_SPHERE", 0);
jpeg_sphere = file->tag.get_property("JPEG_SPHERE", 0);
single_frame = file->tag.get_property("SINGLE_FRAME", (int64_t)0);
file->tag.set_property("VCODEC", vcodec);
file->tag.set_property("VIDEO_LENGTH", video_length);
+ file->tag.set_property("TIMECODE", timecode);
file->tag.set_property("MOV_SPHERE", mov_sphere);
file->tag.set_property("JPEG_SPHERE", jpeg_sphere);
file->tag.set_property("SINGLE_FRAME", single_frame);
" height %d vcodec %s aspect_ratio %f ilace_mode %s\n",
video_data, layers, program, frame_rate, width, height,
vcodec, aspect_ratio,string);
- fprintf(fp," actual_width %d actual_height %d proxy_scale %d proxy_edl %d"
- " video_length %jd repeat %d\n",
- actual_width, actual_height, proxy_scale, proxy_edl, video_length,
- single_frame);
- fprintf(fp," video_length %jd repeat %d\n", video_length, single_frame);
+ fprintf(fp," actual_width %d actual_height %d proxy_scale %d proxy_edl %d\n",
+ actual_width, actual_height, proxy_scale, proxy_edl);
+ fprintf(fp," video_length %jd repeat %d timecode %f\n",
+ video_length, single_frame, timecode);
fprintf(fp," mov_sphere=%d jpeg_sphere=%d\n", mov_sphere, jpeg_sphere);
return 0;
}
#include "pluginserver.inc"
-// Time code formats
-#define TC_DROPFRAME 0
-#define TC_NONDROPFRAME 1
-#define TC_PAL 2
-#define TC_FILM 3
-
class Asset : public Indexable, public ListItem<Asset>
{
public:
// Length in frames
// -1 means a still photo
int64_t video_length;
-
-
+// timecode, unknown=-2, no timecode=-1, timecode>=0
+ double timecode;
// mp3 compression
int mp3_bitrate;
void AssetEdit::handle_done_event(int result)
{
+ if( !result && changed_params->timecode >= 0 ) {
+ double rate = indexable->get_frame_rate();
+ changed_params->timecode =
+ atoi(window->tc_hrs->get_text()) * 3600 +
+ atoi(window->tc_mins->get_text()) * 60 +
+ atoi(window->tc_secs->get_text()) +
+ atoi(window->tc_rest->get_text()) / rate;
+ }
}
void AssetEdit::handle_close_event(int result)
int AssetEdit::window_height()
{
- int h = 128 + 64;
- if( indexable->have_audio() ) h += 200;
+ int h = yS(128 + 64);
+ if( indexable->have_audio() ) h += yS(200);
if( indexable->have_video() ) {
- h += 160;
+ h += yS(160);
if( indexable->is_asset ) {
Asset *asset = (Asset *)indexable;
if( File::can_scale_input(asset) )
- h += 42;
+ h += yS(42);
+ if( asset->timecode >= 0 )
+ h += yS(32);
}
}
return yS(h);
&asset_edit->changed_params->interlace_mode,
(ArrayList<BC_ListBoxItem*>*)&mwindow->interlace_asset_modes,
x2 + edit_ilace_mode->get_w(), y));
+ y += title->get_h() + yS(15);
}
}
+ if( asset && asset->timecode >= 0 ) {
+ char text[BCSTRLEN], *tc = text;
+ Units::totext(tc, asset->timecode, TIME_HMSF,
+ asset->sample_rate, asset->frame_rate);
+ const char *hrs = tc; tc = strchr(tc, ':'); *tc++ = 0;
+ const char *mins = tc; tc = strchr(tc, ':'); *tc++ = 0;
+ const char *secs = tc; tc = strchr(tc, ':'); *tc++ = 0;
+ const char *rest = tc;
+ int padw = BC_Title::calculate_w(this, ":", MEDIUMFONT);
+ int fldw = BC_Title::calculate_w(this, "00", MEDIUMFONT) + 5;
+ int hdrw = fldw + padw; x = x2;
+ add_subwindow(title = new BC_Title(x, y, _("hour"), SMALLFONT)); x += hdrw;
+ add_subwindow(title = new BC_Title(x, y, _("min"), SMALLFONT)); x += hdrw;
+ add_subwindow(title = new BC_Title(x, y, _("sec"), SMALLFONT)); x += hdrw;
+ add_subwindow(title = new BC_Title(x, y, _("frms"), SMALLFONT));
+ y += title->get_h() + xS(3);
+ add_subwindow(title = new BC_Title(x1, y, _("Time Code Start:")));
+ add_subwindow(tc_hrs = new BC_TextBox(x=x2, y, fldw, 1, hrs));
+ add_subwindow(new BC_Title(x += tc_hrs->get_w(), y, ":"));
+ add_subwindow(tc_mins = new BC_TextBox(x += padw, y, fldw, 1, mins));
+ add_subwindow(new BC_Title(x += tc_mins->get_w(), y, ":"));
+ add_subwindow(tc_secs = new BC_TextBox(x += padw, y , fldw, 1, secs));
+ add_subwindow(new BC_Title(x += tc_secs->get_w(), y, ":"));
+ add_subwindow(tc_rest = new BC_TextBox(x += 10, y, fldw, 1, rest));
+ y += title->get_h() + ypad5;
+ }
add_subwindow(new BC_OKButton(this));
add_subwindow(new BC_CancelButton(this));
AssetEdit *asset_edit;
BC_Title *win_width;
BC_Title *win_height;
+ BC_TextBox *tc_hrs, *tc_mins;
+ BC_TextBox *tc_secs, *tc_rest;
DetailAssetDialog *detail_dialog;
void show_info_detail();
void AssetPopup::paste_assets()
{
-// Collect items into the drag vectors for temporary storage
- gui->lock_window("AssetPopup::paste_assets");
- mwindow->gui->lock_window("AssetPopup::paste_assets");
- mwindow->cwindow->gui->lock_window("AssetPopup::paste_assets");
-
int proxy = mwindow->edl->session->awindow_folder == AW_PROXY_FOLDER ? 1 : 0;
gui->collect_assets(proxy);
+// Collect items into the drag vectors for temporary storage
+ gui->unlock_window();
+ mwindow->gui->lock_window("AssetPopup::paste_assets");
mwindow->paste_assets(mwindow->edl->local_session->get_selectionstart(1),
mwindow->edl->tracks->first, 0); // do not overwrite
-
- gui->unlock_window();
mwindow->gui->unlock_window();
- mwindow->cwindow->gui->unlock_window();
+ gui->lock_window("AssetPopup::paste_assets");
}
void AssetPopup::match_size()
int AssetPopupBuildIndex::handle_event()
{
+ popup->unlock_window();
//printf("AssetPopupBuildIndex::handle_event 1\n");
mwindow->rebuild_indices();
+ popup->lock_window("AssetPopupBuildIndex::handle_event");
return 1;
}
int AssetPopupOpenMixer::handle_event()
{
+ popup->unlock_window();
mwindow->gui->lock_window("AssetPopupOpenMixer::handle_event");
mwindow->create_mixers();
mwindow->gui->unlock_window();
+ popup->lock_window("AssetPopupOpenMixer::handle_event");
return 1;
}
int AssetPopupInsertMixer::handle_event()
{
+ popup->unlock_window();
mwindow->gui->lock_window("AssetPopupInsertMixer::handle_event");
mwindow->create_mixers(-1);
mwindow->gui->unlock_window();
+ popup->lock_window("AssetPopupInsertMixer::handle_event");
return 1;
}
int BatchRenderJob::get_strategy()
{
- return Render::get_strategy(farmed, labeled);
+ int range = File::is_image_render(asset->format) ?
+ RANGE_1FRAME : RANGE_SELECTION;
+ return Render::get_strategy(farmed, labeled, range);
}
0, // use_goto
1, // use_clk2play
1, // use_scope
- 0) // use_gang_tracks
+ 0, // use_gang_tracks
+ 0) // use_timecode
{
this->mwindow = mwindow;
this->cwindow = cwindow;
int use_goto,
int use_clk2play,
int use_scope,
- int use_gang_tracks)
+ int use_gang_tracks,
+ int use_timecode)
{
this->window_id = window_id;
this->editing_mode = editing_mode;
this->use_clk2play = use_clk2play;
this->use_scope = use_scope;
this->use_gang_tracks = use_gang_tracks;
+ this->use_timecode = use_timecode;
this->x = x;
this->y = y;
scope_dialog = new EditPanelScopeDialog(mwindow, this);
}
+ if( use_timecode ) {
+ timecode = new EditPanelTimecode(mwindow, this, x1, y1);
+ subwindow->add_subwindow(timecode);
+ x1 += timecode->get_w();
+ }
+
if( use_gang_tracks ) {
gang_tracks = new EditPanelGangTracks(mwindow, this, x1, y1-yS(1));
subwindow->add_subwindow(gang_tracks);
scope->reposition_window(x1, y1-yS(1));
x1 += scope->get_w();
}
+ if( use_timecode ) {
+ timecode->reposition_window(x1, y1);
+ x1 += timecode->get_w();
+ }
if( use_meters ) {
meters->reposition_window(x1, y1);
return 1;
}
+
+EditPanelTimecode::EditPanelTimecode(MWindow *mwindow,
+ EditPanel *panel, int x, int y)
+ : BC_Button(x, y, mwindow->theme->get_image_set("clapperbutton"))
+{
+ this->mwindow = mwindow;
+ this->panel = panel;
+ tc_dialog = 0;
+ set_tooltip(_("Set Timecode"));
+}
+
+EditPanelTimecode::~EditPanelTimecode()
+{
+ delete tc_dialog;
+}
+
+int EditPanelTimecode::handle_event()
+{
+ if( !tc_dialog )
+ tc_dialog = new EditPanelTcDialog(mwindow, panel);
+ int px, py;
+ get_pop_cursor(px, py, 0);
+ tc_dialog->start_dialog(px, py);
+ return 1;
+}
+
+EditPanelTcDialog::EditPanelTcDialog(MWindow *mwindow, EditPanel *panel)
+ : BC_DialogThread()
+{
+ this->mwindow = mwindow;
+ this->panel = panel;
+ tc_gui = 0;
+ px = py = 0;
+}
+
+EditPanelTcDialog::~EditPanelTcDialog()
+{
+ close_window();
+}
+
+#define TCW_W xS(200)
+#define TCW_H yS(120)
+
+void EditPanelTcDialog::start_dialog(int px, int py)
+{
+ this->px = px - TCW_W/2;
+ this->py = py - TCW_H/2;
+ start();
+}
+
+BC_Window *EditPanelTcDialog::new_gui()
+{
+ tc_gui = new EditPanelTcWindow(this, px, py);
+ tc_gui->create_objects();
+ double timecode = mwindow->get_timecode_offset();
+ tc_gui->update(timecode);
+ tc_gui->show_window();
+ return tc_gui;
+}
+
+void EditPanelTcDialog::handle_done_event(int result)
+{
+ if( result ) return;
+ double ofs = tc_gui->get_timecode();
+ mwindow->set_timecode_offset(ofs);
+}
+
+EditPanelTcWindow::EditPanelTcWindow(EditPanelTcDialog *tc_dialog, int x, int y)
+ : BC_Window(_(PROGRAM_NAME ": Timecode"), x, y,
+ TCW_W, TCW_H, TCW_W, TCW_H, 0, 0, 1)
+{
+ this->tc_dialog = tc_dialog;
+}
+
+EditPanelTcWindow::~EditPanelTcWindow()
+{
+}
+
+double EditPanelTcWindow::get_timecode()
+{
+ int hrs = atoi(hours->get_text());
+ int mins = atoi(minutes->get_text());
+ int secs = atoi(seconds->get_text());
+ int frms = atoi(frames->get_text());
+ double frame_rate = tc_dialog->mwindow->edl->session->frame_rate;
+ double timecode = hrs*3600 + mins*60 + secs + frms/frame_rate;
+ return timecode;
+}
+
+void EditPanelTcWindow::update(double timecode)
+{
+ if( timecode < 0 ) timecode = 0;
+ int64_t pos = timecode;
+ int hrs = pos/3600;
+ int mins = pos/60 - hrs*60;
+ int secs = pos - hrs*3600 - mins*60;
+ double frame_rate = tc_dialog->mwindow->edl->session->frame_rate;
+ int frms = (timecode-pos) * frame_rate;
+ hours->update(hrs);
+ minutes->update(mins);
+ seconds->update(secs);
+ frames->update(frms);
+}
+
+void EditPanelTcWindow::create_objects()
+{
+ lock_window("EditPanelTcWindow::create_objects");
+ int x = xS(20), y = yS(5);
+ BC_Title *title = new BC_Title(x - 2, y, _("hour min sec frms"), SMALLFONT);
+ add_subwindow(title); y += title->get_h() + xS(3);
+ hours = new EditPanelTcInt(this, x, y, xS(26), 99, "%02i");
+ add_subwindow(hours); x += hours->get_w() + xS(4);
+ minutes = new EditPanelTcInt(this, x, y, xS(26), 59, "%02i");
+ add_subwindow(minutes); x += minutes->get_w() + xS(4);
+ seconds = new EditPanelTcInt(this, x, y, xS(26), 60, "%02i");
+ add_subwindow(seconds); x += seconds->get_w() + xS(4);
+ frames = new EditPanelTcInt(this, x, y, xS(34), 999, "%03i");
+ add_subwindow(frames); x += frames->get_w() + xS(16);
+ add_subwindow(new EditPanelTcReset(this, x, y));
+ double timecode = tc_dialog->mwindow->get_timecode_offset();
+ update(timecode);
+ add_subwindow(new BC_OKButton(this));
+ add_subwindow(new BC_CancelButton(this));
+ unlock_window();
+}
+
+EditPanelTcReset::EditPanelTcReset(EditPanelTcWindow *window, int x, int y)
+ : BC_Button(x, y, window->tc_dialog->mwindow->theme->get_image_set("reset_button"))
+{
+ this->window = window;
+}
+
+int EditPanelTcReset::handle_event()
+{
+ window->update(0);
+ return 1;
+}
+
+
+EditPanelTcInt::EditPanelTcInt(EditPanelTcWindow *window, int x, int y, int w,
+ int max, const char *format)
+ : BC_TextBox(x, y, w, 1, "")
+{
+ this->window = window;
+ this->max = max;
+ this->format = format;
+ digits = 1;
+ for( int m=max; (m/=10)>0; ++digits );
+}
+
+EditPanelTcInt::~EditPanelTcInt()
+{
+}
+
+int EditPanelTcInt::handle_event()
+{
+ int v = atoi(get_text());
+ if( v > max ) {
+ v = v % (max+1);
+ char string[BCSTRLEN];
+ sprintf(string, format, v);
+ BC_TextBox::update(string);
+ }
+ return 1;
+}
+
+void EditPanelTcInt::update(int v)
+{
+ char text[BCTEXTLEN];
+ if( v > max ) v = max;
+ sprintf(text, format, v);
+ BC_TextBox::update(text);
+}
+
+int EditPanelTcInt::keypress_event()
+{
+ if( (int)strlen(get_text()) >= digits )
+ BC_TextBox::update("");
+ int key = get_keypress();
+ switch( key ) {
+ case TAB: case LEFTTAB:
+ case LEFT: case RIGHT:
+ case HOME: case END:
+ case BACKSPACE:
+ case DELETE:
+ case '0': case '1': case '2': case '3': case '4':
+ case '5': case '6': case '7': case '8': case '9':
+ return BC_TextBox::keypress_event();
+ }
+ return 1;
+}
+
MWindow *mwindow;
};
+class EditPanelTimecode : public BC_Button
+{
+public:
+ EditPanelTimecode(MWindow *mwindow, EditPanel *panel, int x, int y);
+ ~EditPanelTimecode();
+ int handle_event();
+ MWindow *mwindow;
+ EditPanel *panel;
+ EditPanelTcDialog *tc_dialog;
+};
+
+class EditPanelTcDialog : public BC_DialogThread
+{
+public:
+ EditPanelTcDialog(MWindow *mwindow, EditPanel *panel);
+ ~EditPanelTcDialog();
+ BC_Window *new_gui();
+ void start_dialog(int px, int py);
+ void handle_done_event(int result);
+
+ MWindow *mwindow;
+ EditPanel *panel;
+ EditPanelTcWindow *tc_gui;
+ int px, py;
+};
+
+class EditPanelTcWindow : public BC_Window
+{
+public:
+ EditPanelTcWindow(EditPanelTcDialog *tc_dialog, int x, int y);
+ ~EditPanelTcWindow();
+ void create_objects();
+ double get_timecode();
+ void update(double timecode);
+
+ EditPanelTcDialog *tc_dialog;
+ EditPanelTcInt *hours;
+ EditPanelTcInt *minutes;
+ EditPanelTcInt *seconds;
+ EditPanelTcInt *frames;
+};
+
+class EditPanelTcInt : public BC_TextBox
+{
+public:
+ EditPanelTcInt(EditPanelTcWindow *window, int x, int y, int w,
+ int max, const char *format);
+ ~EditPanelTcInt();
+ int handle_event();
+ int keypress_event();
+ void update(int v);
+
+ EditPanelTcWindow *window;
+ int max, digits;
+ const char *format;
+};
+
+class EditPanelTcReset : public BC_Button
+{
+public:
+ EditPanelTcReset(EditPanelTcWindow *window, int x, int y);
+ int handle_event();
+
+ EditPanelTcWindow *window;
+};
class EditPanel
{
int use_goto,
int use_clk2play,
int use_scope,
- int use_gang_tracks);
+ int use_gang_tracks,
+ int use_timecode);
~EditPanel();
void set_meters(MeterPanel *meter_panel);
int use_clk2play;
int use_scope;
int use_gang_tracks;
+ int use_timecode;
EditFit *fit;
EditFitAutos *fit_autos;
EditManualGoto *mangoto;
EditClick2Play *click2play;
EditPanelScope *scope;
+ EditPanelTimecode *timecode;
EditPanelScopeDialog *scope_dialog;
EditCopy *copy;
EditPaste *paste;
class EditPanelScopeGUI;
class EditPanelScopeDialog;
class EditPanelScope;
-class EditPanelMashMixers;
+class EditPanelTimecode;
+class EditPanelTcDialog;
+class EditPanelTcWindow;
+class EditPanelTcInt;
#endif
add_item(new EditPopupOverwritePlugins(mwindow, this));
add_item(new EditCollectEffects(mwindow, this));
add_item(new EditPasteEffects(mwindow, this));
+ add_item(new EditPopupTimecode(mwindow, this));
}
int EditPopup::activate_menu(Track *track, Edit *edit,
return 1;
}
+EditPopupTimecode::EditPopupTimecode(MWindow *mwindow, EditPopup *popup)
+ : BC_MenuItem(_("Timecode"),_("Ctrl-!"),'!')
+{
+ this->mwindow = mwindow;
+ this->popup = popup;
+ set_ctrl(1);
+}
+
+int EditPopupTimecode::handle_event()
+{
+ if( mwindow->session->current_operation != NO_OPERATION ) return 1;
+ Edit *edit = popup->edit;
+ if( !edit || !edit->asset ) return 1;
+ Asset *asset = edit->asset;
+ double timecode = asset->timecode != -2 ? asset->timecode :
+ FFMPEG::get_timecode(asset->path,
+ edit->track->data_type, edit->channel,
+ mwindow->edl->session->frame_rate);
+ asset->timecode = timecode;
+ if( timecode >= 0 ) {
+ int64_t pos = edit->startproject + edit->startsource;
+ double position = edit->track->from_units(pos);
+ mwindow->set_timecode_offset(timecode - position);
+ }
+ else
+ mwindow->set_timecode_offset(0);
+ return 1;
+}
EditPopup *popup;
};
+class EditPopupTimecode : public BC_MenuItem
+{
+public:
+ EditPopupTimecode(MWindow *mwindow, EditPopup *popup);
+ int handle_event();
+ MWindow *mwindow;
+ EditPopup *popup;
+};
+
#endif
class EditPopupOverwritePlugins;
class EditCollectEffects;
class EditPasteEffects;
+class EditPopupTimecode;
#endif
#include "edits.h"
#include "edl.h"
#include "edlsession.h"
+#include "ffmpeg.h"
#include "file.h"
#include "filexml.h"
#include "filesystem.h"
track->shift_effects(position, length, edit_autos, 0);
}
+double Edits::early_timecode()
+{
+ double result = -1;
+ for( Edit *edit=first; edit; edit=edit->next ) {
+ Asset *asset = edit->asset;
+ if( !asset ) continue;
+ if( asset->timecode < -1 )
+ asset->timecode = FFMPEG::get_timecode(asset->path,
+ track->data_type, edit->channel,
+ edl->session->frame_rate);
+ if( asset->timecode < 0 ) continue;
+ if( result < 0 || result > asset->timecode )
+ result = asset->timecode;
+ }
+ return result;
+}
+
+void Edits::align_timecodes(double offset)
+{
+ for( Edit *edit=first, *next=0; edit; edit=next ) {
+ next = edit->next;
+ if( edit->silence() ) delete edit;
+ }
+ for( Edit *edit=first, *next=0; edit; edit=next ) {
+ next = edit->next;
+ Asset *asset = edit->asset;
+ if( !asset && asset->timecode < 0 ) continue;
+ double position = asset->timecode - offset;
+ edit->startproject = track->to_units(position, 1) + edit->startsource;
+ }
+ int result = 1;
+ while( result ) {
+ result = 0;
+ for( Edit *edit=first, *next=0; edit; edit=next ) {
+ next = edit->next;
+ if( !next || next->startproject >= edit->startproject ) continue;
+ swap(next, edit);
+ next = edit;
+ result = 1;
+ }
+ }
+ int64_t startproject = 0;
+ for( Edit *edit=first; edit; edit=edit->next ) {
+ int64_t length = edit->startproject - startproject;
+ if( length > 0 ) {
+ Edit *new_edit = create_edit();
+ insert_before(edit, new_edit);
+ new_edit->startproject = startproject;
+ new_edit->length = length;
+ startproject = edit->startproject;
+ }
+ startproject += edit->length;
+ }
+}
+
void paste_silence(int64_t start, int64_t end);
// Returns the newly created edit
Edit *create_silence(int64_t start, int64_t end);
+ double early_timecode();
+ void align_timecodes(double offset);
void resample(double old_rate, double new_rate);
// Shift edits on or after position by distance
si_duration = 3;
test_playback_edits = 1;
time_format = TIME_HMSF;
+ timecode_offset = 0;
nudge_format = 1;
tool_window = 0;
for(int i = 0; i < MAXCHANNELS; i++) {
gang_tracks = defaults->get("GANG_TRACKS", GANG_NONE);
// test_playback_edits = defaults->get("TEST_PLAYBACK_EDITS", 1);
time_format = defaults->get("TIME_FORMAT", TIME_HMSF);
+ timecode_offset = defaults->get("TIMECODE_OFFSET", timecode_offset);
nudge_format = defaults->get("NUDGE_FORMAT", 1);
tool_window = defaults->get("TOOL_WINDOW", 0);
vconfig_in->load_defaults(defaults);
defaults->update("GANG_TRACKS", gang_tracks);
// defaults->update("TEST_PLAYBACK_EDITS", test_playback_edits);
defaults->update("TIME_FORMAT", time_format);
+ defaults->update("TIMECODE_OFFSET", timecode_offset);
defaults->update("NUDGE_FORMAT", nudge_format);
defaults->update("TOOL_WINDOW", tool_window);
vconfig_in->save_defaults(defaults);
gang_tracks = file->tag.get_property("GANG_TRACKS", GANG_NONE);
// test_playback_edits = file->tag.get_property("TEST_PLAYBACK_EDITS", test_playback_edits);
time_format = file->tag.get_property("TIME_FORMAT", time_format);
+ timecode_offset = file->tag.get_property("TIMECODE_OFFSET", timecode_offset);
nudge_format = file->tag.get_property("NUDGE_FORMAT", nudge_format);
tool_window = file->tag.get_property("TOOL_WINDOW", tool_window);
vwindow_meter = file->tag.get_property("VWINDOW_METER", vwindow_meter);
file->tag.set_property("GANG_TRACKS", gang_tracks);
file->tag.set_property("TEST_PLAYBACK_EDITS", test_playback_edits);
file->tag.set_property("TIME_FORMAT", time_format);
+ file->tag.set_property("TIMECODE_OFFSET", timecode_offset);
file->tag.set_property("NUDGE_FORMAT", nudge_format);
file->tag.set_property("TOOL_WINDOW", tool_window);
file->tag.set_property("VWINDOW_METER", vwindow_meter);
gang_tracks = session->gang_tracks;
test_playback_edits = session->test_playback_edits;
time_format = session->time_format;
+ timecode_offset = session->timecode_offset;
nudge_format = session->nudge_format;
tool_window = session->tool_window;
for(int i = 0; i < MAXCHANNELS; i++) {
int test_playback_edits;
// Format to display times in
int time_format;
-// Offset for timecode
- int timecode_offset[4];
+// Offset for timecode units
+ double timecode_offset;
// Format to display nudge in, either seconds or track units.
int nudge_format;
// Show tool window in CWindow
{
this->idx = idx;
width = height = 0;
+ transpose = 0;
frame_rate = 0;
aspect_ratio = 0;
length = 0;
return !index_state ? 0 : index_state->video_markers[idx];
}
+double FFVideoStream::get_rotation_angle()
+{
+ int size = 0;
+ int *matrix = (int*)av_stream_get_side_data(st, AV_PKT_DATA_DISPLAYMATRIX, &size);
+ int len = size/sizeof(*matrix);
+ if( !matrix || len < 5 ) return 0;
+ const double s = 1/65536.;
+ double theta = (!matrix[0] && !matrix[3]) || (!matrix[1] && !matrix[4]) ? 0 :
+ atan2( s*matrix[1] / hypot(s*matrix[1], s*matrix[4]),
+ s*matrix[0] / hypot(s*matrix[0], s*matrix[3])) * 180/M_PI;
+ return theta;
+}
+
+void FFVideoStream::flip()
+{
+ transpose = 0;
+ if( !ffmpeg->file_base ) return;
+ double theta = get_rotation_angle(), tolerance = 1;
+ if( fabs(theta-0) < tolerance ) return;
+ if( fabs(theta-90) < tolerance ) {
+ create_filter("transpose=clock", st->codecpar);
+ transpose = 1;
+ }
+ else if( fabs(theta-180) < tolerance ) {
+ create_filter("hflip", st->codecpar);
+ create_filter("vflip", st->codecpar);
+ }
+ else if (fabs(theta-270) < tolerance ) {
+ create_filter("transpose=cclock", st->codecpar);
+ transpose = 1;
+ }
+ else {
+ char rotate[BCSTRLEN];
+ sprintf(rotate, "rotate=%f", theta*M_PI/180.);
+ create_filter(rotate, st->codecpar);
+ }
+}
+
FFMPEG::FFMPEG(FileBase *file_base)
{
return (AVRational) { freq, 1001*12 };
}
-AVRational FFMPEG::check_frame_rate(AVCodec *codec, double frame_rate)
+AVRational FFMPEG::check_frame_rate(const AVRational *p, double frame_rate)
{
- const AVRational *p = codec->supported_framerates;
AVRational rate, best_rate = (AVRational) { 0, 0 };
double max_err = 1.; int i = 0;
while( ((p ? (rate=*p++) : (rate=std_frame_rate(i++))), rate.num) != 0 ) {
int hrs = secs/3600; secs -= hrs*3600;
int mins = secs/60; secs -= mins*60;
report(" %d:%02d:%05.2f\n", hrs, mins, secs);
+ double theta = vid->get_rotation_angle();
+ if( fabs(theta) > 1 )
+ report(" rotation angle: %0.1f\n", theta);
}
if( ffaudio.size() > 0 )
report("\n%d audio stream%s\n",ffaudio.size(), ffaudio.size()!=1 ? "s" : "");
vid->reading = -1;
if( opt_video_filter )
ret = vid->create_filter(opt_video_filter, avpar);
+ if( file_base && file_base->file->preferences->auto_rotate )
+ vid->flip();
break; }
case AVMEDIA_TYPE_AUDIO: {
if( avpar->channels < 1 ) continue;
int mask_h = (1<<desc->log2_chroma_h)-1;
ctx->height = (vid->height+mask_h) & ~mask_h;
ctx->sample_aspect_ratio = to_sample_aspect_ratio(asset);
- AVRational frame_rate = check_frame_rate(codec, vid->frame_rate);
+ AVRational frame_rate = check_frame_rate(codec->supported_framerates, vid->frame_rate);
if( !frame_rate.num || !frame_rate.den ) {
eprintf(_("check_frame_rate failed %s\n"), filename);
ret = 1;
int FFMPEG::ff_video_width(int stream)
{
- return ffvideo[stream]->width;
+ FFVideoStream *vst = ffvideo[stream];
+ return !vst->transpose ? vst->width : vst->height;
}
int FFMPEG::ff_video_height(int stream)
{
- return ffvideo[stream]->height;
+ FFVideoStream *vst = ffvideo[stream];
+ return !vst->transpose ? vst->height : vst->width;
}
int FFMPEG::ff_set_video_width(int stream, int width)
{
- int w = ffvideo[stream]->width;
- ffvideo[stream]->width = width;
+ FFVideoStream *vst = ffvideo[stream];
+ int *vw = !vst->transpose ? &vst->width : &vst->height, w = *vw;
+ *vw = width;
return w;
}
int FFMPEG::ff_set_video_height(int stream, int height)
{
- int h = ffvideo[stream]->height;
- ffvideo[stream]->height = height;
+ FFVideoStream *vst = ffvideo[stream];
+ int *vh = !vst->transpose ? &vst->height : &vst->width, h = *vh;
+ *vh = height;
return h;
}
int FFMPEG::ff_cpus()
{
- return file_base->file->cpus;
+ return !file_base ? 1 : file_base->file->cpus;
}
const char *FFMPEG::ff_hw_dev()
filter_graph = avfilter_graph_alloc();
const AVFilter *buffersrc = avfilter_get_by_name("buffer");
const AVFilter *buffersink = avfilter_get_by_name("buffersink");
+ int sa_num = avpar->sample_aspect_ratio.num;
+ if( !sa_num ) sa_num = 1;
+ int sa_den = avpar->sample_aspect_ratio.den;
+ if( !sa_den ) sa_num = 1;
int ret = 0; char args[BCTEXTLEN];
AVPixelFormat pix_fmt = (AVPixelFormat)avpar->format;
snprintf(args, sizeof(args),
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
avpar->width, avpar->height, (int)pix_fmt,
- st->time_base.num, st->time_base.den,
- avpar->sample_aspect_ratio.num, avpar->sample_aspect_ratio.den);
+ st->time_base.num, st->time_base.den, sa_num, sa_den);
if( ret >= 0 )
ret = avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in",
args, NULL, filter_graph);
return ret;
}
+
+AVCodecContext *FFMPEG::activate_decoder(AVStream *st)
+{
+ AVDictionary *copts = 0;
+ av_dict_copy(&copts, opts, 0);
+ AVCodecID codec_id = st->codecpar->codec_id;
+ AVCodec *decoder = 0;
+ switch( st->codecpar->codec_type ) {
+ case AVMEDIA_TYPE_VIDEO:
+ if( opt_video_decoder )
+ decoder = avcodec_find_decoder_by_name(opt_video_decoder);
+ else
+ video_codec_remaps.update(codec_id, decoder);
+ break;
+ case AVMEDIA_TYPE_AUDIO:
+ if( opt_audio_decoder )
+ decoder = avcodec_find_decoder_by_name(opt_audio_decoder);
+ else
+ audio_codec_remaps.update(codec_id, decoder);
+ break;
+ default:
+ return 0;
+ }
+ if( !decoder && !(decoder = avcodec_find_decoder(codec_id)) ) {
+ eprintf(_("cant find decoder codec %d\n"), (int)codec_id);
+ return 0;
+ }
+ AVCodecContext *avctx = avcodec_alloc_context3(decoder);
+ if( !avctx ) {
+ eprintf(_("cant allocate codec context\n"));
+ return 0;
+ }
+ avcodec_parameters_to_context(avctx, st->codecpar);
+ if( !av_dict_get(copts, "threads", NULL, 0) )
+ avctx->thread_count = ff_cpus();
+ int ret = avcodec_open2(avctx, decoder, &copts);
+ av_dict_free(&copts);
+ if( ret < 0 ) {
+ avcodec_free_context(&avctx);
+ avctx = 0;
+ }
+ return avctx;
+}
+
int FFMPEG::scan(IndexState *index_state, int64_t *scan_position, int *canceled)
{
AVPacket pkt;
index_state->add_audio_markers(ffaudio.size());
for( int i=0; i<(int)fmt_ctx->nb_streams; ++i ) {
- int ret = 0;
- AVDictionary *copts = 0;
- av_dict_copy(&copts, opts, 0);
AVStream *st = fmt_ctx->streams[i];
- AVCodecID codec_id = st->codecpar->codec_id;
- AVCodec *decoder = 0;
- switch( st->codecpar->codec_type ) {
- case AVMEDIA_TYPE_VIDEO:
- if( opt_video_decoder )
- decoder = avcodec_find_decoder_by_name(opt_video_decoder);
- else
- video_codec_remaps.update(codec_id, decoder);
- break;
- case AVMEDIA_TYPE_AUDIO:
- if( opt_audio_decoder )
- decoder = avcodec_find_decoder_by_name(opt_audio_decoder);
- else
- audio_codec_remaps.update(codec_id, decoder);
- break;
- default:
- continue;
- }
- if( !decoder && !(decoder = avcodec_find_decoder(codec_id)) )
- continue;
- AVCodecContext *avctx = avcodec_alloc_context3(decoder);
- if( !avctx ) {
- eprintf(_("cant allocate codec context\n"));
- ret = AVERROR(ENOMEM);
- }
- if( ret >= 0 ) {
- avcodec_parameters_to_context(avctx, st->codecpar);
- if( !av_dict_get(copts, "threads", NULL, 0) )
- avctx->thread_count = ff_cpus();
- ret = avcodec_open2(avctx, decoder, &copts);
- }
- av_dict_free(&copts);
- if( ret >= 0 ) {
+ AVCodecContext *avctx = activate_decoder(st);
+ if( avctx ) {
AVCodecParameters *avpar = st->codecpar;
switch( avpar->codec_type ) {
case AVMEDIA_TYPE_VIDEO: {
}
}
+
+/*
+ * 1) if the format context has a timecode
+ * return fmt_ctx->timecode - 0
+ * 2) if the layer/channel has a timecode
+ * return st->timecode - (start_time-nudge)
+ * 3) find the 1st program with stream, find 1st program video stream,
+ * if video stream has a timecode, return st->timecode - (start_time-nudge)
+ * 4) find timecode in any stream, return st->timecode
+ * 5) read 100 packets, save ofs=pkt.pts*st->time_base - st->nudge:
+ * decode frame for video stream of 1st program
+ * if frame->timecode has a timecode, return frame->timecode - ofs
+ * if side_data has gop timecode, return gop->timecode - ofs
+ * if side_data has smpte timecode, return smpte->timecode - ofs
+ * 6) if the filename/url scans *date_time.ext, return date_time
+ * 7) if stat works on the filename/url, return mtime
+ * 8) return -1 failure
+*/
+double FFMPEG::get_initial_timecode(int data_type, int channel, double frame_rate)
+{
+ AVRational rate = check_frame_rate(0, frame_rate);
+ if( !rate.num ) return -1;
+// format context timecode
+ AVDictionaryEntry *tc = av_dict_get(fmt_ctx->metadata, "timecode", 0, 0);
+ if( tc ) return ff_get_timecode(tc->value, rate, 0);
+// stream timecode
+ if( open_decoder() ) return -1;
+ AVStream *st = 0;
+ int64_t nudge = 0;
+ int codec_type = -1, fidx = -1;
+ switch( data_type ) {
+ case TRACK_AUDIO: {
+ codec_type = AVMEDIA_TYPE_AUDIO;
+ int aidx = astrm_index[channel].st_idx;
+ FFAudioStream *aud = ffaudio[aidx];
+ fidx = aud->fidx;
+ nudge = aud->nudge;
+ st = aud->st;
+ break; }
+ case TRACK_VIDEO: {
+ codec_type = AVMEDIA_TYPE_VIDEO;
+ int vidx = vstrm_index[channel].st_idx;
+ FFVideoStream *vid = ffvideo[vidx];
+ fidx = vid->fidx;
+ nudge = vid->nudge;
+ st = vid->st;
+ break; }
+ }
+ if( codec_type < 0 ) return -1;
+ if( st )
+ tc = av_dict_get(st->metadata, "timecode", 0, 0);
+ if( !tc ) {
+ st = 0;
+// find first program which references this stream
+ int pidx = -1;
+ for( int i=0, m=fmt_ctx->nb_programs; pidx<0 && i<m; ++i ) {
+ AVProgram *pgrm = fmt_ctx->programs[i];
+ for( int j=0, n=pgrm->nb_stream_indexes; j<n; ++j ) {
+ int st_idx = pgrm->stream_index[j];
+ if( st_idx == fidx ) { pidx = i; break; }
+ }
+ }
+ fidx = -1;
+ if( pidx >= 0 ) {
+ AVProgram *pgrm = fmt_ctx->programs[pidx];
+ for( int j=0, n=pgrm->nb_stream_indexes; j<n; ++j ) {
+ int st_idx = pgrm->stream_index[j];
+ AVStream *tst = fmt_ctx->streams[st_idx];
+ if( !tst ) continue;
+ if( tst->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ) {
+ st = tst; fidx = st_idx;
+ break;
+ }
+ }
+ }
+ else {
+ for( int i=0, n=fmt_ctx->nb_streams; i<n; ++i ) {
+ AVStream *tst = fmt_ctx->streams[i];
+ if( !tst ) continue;
+ if( tst->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ) {
+ st = tst; fidx = i;
+ break;
+ }
+ }
+ }
+ if( st )
+ tc = av_dict_get(st->metadata, "timecode", 0, 0);
+ }
+
+ if( !tc ) {
+ // any timecode, includes -data- streams
+ for( int i=0, n=fmt_ctx->nb_streams; i<n; ++i ) {
+ AVStream *tst = fmt_ctx->streams[i];
+ if( !tst ) continue;
+ if( (tc = av_dict_get(tst->metadata, "timecode", 0, 0)) ) {
+ st = tst; fidx = i;
+ break;
+ }
+ }
+ }
+
+ if( st && st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ) {
+ if( st->r_frame_rate.num && st->r_frame_rate.den )
+ rate = st->r_frame_rate;
+ nudge = st->start_time;
+ for( int i=0; i<ffvideo.size(); ++i ) {
+ if( ffvideo[i]->st == st ) {
+ nudge = ffvideo[i]->nudge;
+ break;
+ }
+ }
+ }
+
+ if( tc ) { // return timecode
+ double secs = st->start_time == AV_NOPTS_VALUE ? 0 :
+ to_secs(st->start_time - nudge, st->time_base);
+ return ff_get_timecode(tc->value, rate, secs);
+ }
+
+ if( !st || fidx < 0 ) return -1;
+
+ decode_activate();
+ AVCodecContext *av_ctx = activate_decoder(st);
+ if( !av_ctx ) {
+ fprintf(stderr,"activate_decoder failed\n");
+ return -1;
+ }
+ avCodecContext avctx(av_ctx); // auto deletes
+ if( avctx->codec_type == AVMEDIA_TYPE_VIDEO &&
+ avctx->framerate.num && avctx->framerate.den )
+ rate = avctx->framerate;
+
+ avPacket pkt; // auto deletes
+ avFrame frame; // auto deletes
+ if( !frame ) {
+ fprintf(stderr,"av_frame_alloc failed\n");
+ return -1;
+ }
+ int errs = 0;
+ int64_t max_packets = 100;
+ char tcbuf[AV_TIMECODE_STR_SIZE];
+
+ for( int64_t count=0; count<max_packets; ++count ) {
+ av_packet_unref(pkt);
+ pkt->data = 0; pkt->size = 0;
+
+ int ret = av_read_frame(fmt_ctx, pkt);
+ if( ret < 0 ) {
+ if( ret == AVERROR_EOF ) break;
+ if( ++errs > 100 ) {
+ fprintf(stderr,"over 100 read_frame errs\n");
+ break;
+ }
+ continue;
+ }
+ if( !pkt->data ) continue;
+ int i = pkt->stream_index;
+ if( i != fidx ) continue;
+ int64_t tstmp = pkt->pts;
+ if( tstmp == AV_NOPTS_VALUE ) tstmp = pkt->dts;
+ double secs = to_secs(tstmp - nudge, st->time_base);
+ ret = avcodec_send_packet(avctx, pkt);
+ if( ret < 0 ) return -1;
+
+ while( (ret = avcodec_receive_frame(avctx, frame)) >= 0 ) {
+ if( (tc = av_dict_get(frame->metadata, "timecode", 0, 0)) )
+ return ff_get_timecode(tc->value, rate, secs);
+ int k = frame->nb_side_data;
+ AVFrameSideData *side_data = 0;
+ while( --k >= 0 ) {
+ side_data = frame->side_data[k];
+ switch( side_data->type ) {
+ case AV_FRAME_DATA_GOP_TIMECODE: {
+ int64_t data = *(int64_t *)side_data->data;
+ int sz = sizeof(data);
+ if( side_data->size >= sz ) {
+ av_timecode_make_mpeg_tc_string(tcbuf, data);
+ return ff_get_timecode(tcbuf, rate, secs);
+ }
+ break; }
+ case AV_FRAME_DATA_S12M_TIMECODE: {
+ uint32_t *data = (uint32_t *)side_data->data;
+ int n = data[0], sz = (n+1)*sizeof(*data);
+ if( side_data->size >= sz ) {
+ av_timecode_make_smpte_tc_string(tcbuf, data[n], 0);
+ return ff_get_timecode(tcbuf, rate, secs);
+ }
+ break; }
+ default:
+ break;
+ }
+ }
+ }
+ }
+ char *path = fmt_ctx->url;
+ char *bp = strrchr(path, '/');
+ if( !bp ) bp = path; else ++bp;
+ char *cp = strrchr(bp, '.');
+ if( cp && (cp-=(8+1+6)) >= bp ) {
+ char sep[BCSTRLEN];
+ int year,mon,day, hour,min,sec, frm=0;
+ if( sscanf(cp,"%4d%2d%2d%[_-]%2d%2d%2d",
+ &year,&mon,&day, sep, &hour,&min,&sec) == 7 ) {
+ int ch = sep[0];
+ // year>=1970,mon=1..12,day=1..31, hour=0..23,min=0..59,sec=0..60
+ if( (ch=='_' || ch=='-' ) &&
+ year >= 1970 && mon>=1 && mon<=12 && day>=1 && day<=31 &&
+ hour>=0 && hour<24 && min>=0 && min<60 && sec>=0 && sec<=60 ) {
+ sprintf(tcbuf,"%d:%02d:%02d:%02d", hour,min,sec, frm);
+ return ff_get_timecode(tcbuf, rate, 0);
+ }
+ }
+ }
+ struct stat tst;
+ if( stat(path, &tst) >= 0 ) {
+ time_t t = (time_t)tst.st_mtim.tv_sec;
+ struct tm tm;
+ localtime_r(&t, &tm);
+ int64_t us = tst.st_mtim.tv_nsec / 1000;
+ int frm = us/1000000. * frame_rate;
+ sprintf(tcbuf,"%d:%02d:%02d:%02d", tm.tm_hour, tm.tm_min, tm.tm_sec, frm);
+ return ff_get_timecode(tcbuf, rate, 0);
+ }
+ return -1;
+}
+
+double FFMPEG::ff_get_timecode(char *str, AVRational rate, double pos)
+{
+ AVTimecode tc;
+ if( av_timecode_init_from_string(&tc, rate, str, fmt_ctx) )
+ return -1;
+ double secs = (double)tc.start / tc.fps - pos;
+ if( secs < 0 ) secs = 0;
+ return secs;
+}
+
+double FFMPEG::get_timecode(const char *path, int data_type, int channel, double rate)
+{
+ FFMPEG ffmpeg(0);
+ if( ffmpeg.init_decoder(path) ) return -1;
+ return ffmpeg.get_initial_timecode(data_type, channel, rate);
+}
+
#include "libavutil/pixdesc.h"
#include "libswresample/swresample.h"
#include "libswscale/swscale.h"
+#include "libavutil/parseutils.h"
+#include "libavutil/timecode.h"
}
class FFPacket {
int video_seek(int64_t pos);
int encode(VFrame *vframe);
int drain();
+ double get_rotation_angle();
+ void flip();
int idx;
double frame_rate;
- int width, height;
+ int width, height, transpose;
int64_t length;
float aspect_ratio;
int update(AVCodecID &codec_id, AVCodec *&decoder);
};
+// for get_initial_timecode auto deletes
+class avFrame {
+ AVFrame *frm;
+public:
+ avFrame() { frm = av_frame_alloc(); }
+ ~avFrame() { av_frame_free(&frm); }
+ operator AVFrame *() { return frm; }
+ AVFrame *operator ->() { return frm; }
+};
+
+class avPacket {
+ AVPacket pkt;
+public:
+ avPacket() {
+ av_init_packet(&pkt);
+ pkt.data = 0; pkt.size = 0;
+ }
+ ~avPacket() { av_packet_unref(&pkt); }
+ operator AVPacket *() { return &pkt; }
+ AVPacket *operator ->() { return &pkt; }
+};
+
+class avCodecContext {
+ AVCodecContext *avctx;
+public:
+ avCodecContext(AVCodecContext *ctx) { avctx = ctx; }
+ ~avCodecContext() { avcodec_free_context(&avctx); }
+ operator AVCodecContext *() { return avctx; }
+ AVCodecContext *operator ->() { return avctx; }
+};
+
+
class FFMPEG : public Thread {
public:
static Mutex fflock;
static void ff_unlock() { fflock.unlock(); }
int check_sample_rate(AVCodec *codec, int sample_rate);
- AVRational check_frame_rate(AVCodec *codec, double frame_rate);
+ AVRational check_frame_rate(const AVRational *p, double frame_rate);
AVRational to_sample_aspect_ratio(Asset *asset);
AVRational to_time_base(int sample_rate);
static int get_fmt_score(AVSampleFormat dst_fmt, AVSampleFormat src_fmt);
int total_audio_channels();
int total_video_channels();
+ double get_initial_timecode(int data_type, int channel, double frame_rate);
int audio_seek(int ch, int64_t pos);
int video_seek(int layer, int64_t pos);
FFMPEG(FileBase *file_base=0);
~FFMPEG();
+ AVCodecContext *activate_decoder(AVStream *st);
int scan(IndexState *index_state, int64_t *scan_position, int *canceled);
int ff_audio_stream(int channel) { return astrm_index[channel].st_idx; }
int64_t ff_video_frames(int stream);
int ff_video_pid(int stream);
int ff_video_mpeg_color_range(int stream);
+ double ff_get_timecode(char *str, AVRational rate, double pos);
+ static double get_timecode(const char *path, int data_type, int channel, double rate);
int ff_cpus();
const char *ff_hw_dev();
lock_window("MainClock::update");
char string[BCTEXTLEN];
position += position_offset;
+ double timecode_offset =
+ mwindow->edl->session->time_format == TIME_TIMECODE ?
+ mwindow->get_timecode_offset() : 0 ;
Units::totext(string, position,
mwindow->edl->session->time_format,
mwindow->edl->session->sample_rate,
mwindow->edl->session->frame_rate,
- mwindow->edl->session->frames_per_foot);
+ mwindow->edl->session->frames_per_foot,
+ timecode_offset);
BC_Title::update(string);
unlock_window();
}
windowmenu->add_item(split_x = new SplitX(mwindow));
windowmenu->add_item(split_y = new SplitY(mwindow));
windowmenu->add_item(mixer_items = new MixerItems(mwindow));
+ windowmenu->add_item(align_timecodes = new AlignTimecodes(mwindow));
mixer_items->create_objects();
windowmenu->add_item(new TileWindows(mwindow,_("Tile left"),0));
windowmenu->add_item(new TileWindows(mwindow,_("Tile right"),1));
}
+AlignTimecodes::AlignTimecodes(MWindow *mwindow)
+ : BC_MenuItem(_("Align Timecodes"))
+{
+ this->mwindow = mwindow;
+}
+
+int AlignTimecodes::handle_event()
+{
+ mwindow->align_timecodes();
+ return 1;
+}
+
+
LoadLayoutItem::LoadLayoutItem(LoadLayout *load_layout, const char *text, int idx, int hotkey)
: BC_MenuItem(text, "", hotkey)
{
SplitX *split_x;
SplitY *split_y;
MixerItems *mixer_items;
+ AlignTimecodes *align_timecodes;
LoadLayout *load_layout;
LoadLayout *save_layout;
};
int handle_event();
};
+class AlignTimecodes : public BC_MenuItem
+{
+public:
+ AlignTimecodes(MWindow *mwindow);
+ int handle_event();
+ MWindow *mwindow;
+};
+
// ======================================== audio
class AddAudioTrack : public BC_MenuItem
class MixerViewer;
class TileMixers;
class AlignMixers;
+class AlignTimecodes;
class AddAudioTrack;
class DeleteAudioTrack;
class DefaultATransition;
1, // use_goto
0, // use_clk2play
0, // use_scope
- 1) // use_gang_tracks
+ 1, // use_gang_tracks
+ 1) // use_timecode
{
this->mwindow = mwindow;
this->mbuttons = mbuttons;
default_asset->height = mwindow->edl->session->output_h;
}
- int strategy = Render::get_strategy(mwindow->preferences->use_renderfarm, use_labels);
+ int range = File::is_image_render(default_asset->format) ?
+ RANGE_1FRAME : RANGE_SELECTION;
+ int strategy = Render::get_strategy(mwindow->preferences->use_renderfarm, use_labels, range);
// Process the total length in fragments
ArrayList<MenuEffectPacket*> packets;
if(!result)
}
case TIME_FRAMES:
+ case TIME_TIMECODE:
case TIME_HMSF:
// One frame per text mark
if(frame_seconds >= min_time)
// Set tick interval
tick_interval = text_interval;
+ double timecode_offset = 0;
switch(mwindow->edl->session->time_format)
{
- case TIME_HMSF:
+ case TIME_TIMECODE:
+ timecode_offset = mwindow->get_timecode_offset(); // fall thru
case TIME_FEET_FRAMES:
+ case TIME_HMSF:
case TIME_FRAMES:
if(frame_seconds / time_per_pixel > TICK_SPACING)
tick_interval = frame_seconds;
mwindow->edl->session->time_format,
sample_rate,
mwindow->edl->session->frame_rate,
- mwindow->edl->session->frames_per_foot);
+ mwindow->edl->session->frames_per_foot,
+ timecode_offset);
set_color(get_resources()->default_text_color);
set_font(MEDIUMFONT);
add_item(items[1] = new TimeBarPopupItem(mwindow,
this, TIME_HMSF_TEXT, TIME_HMSF));
add_item(items[2] = new TimeBarPopupItem(mwindow,
- this, TIME_FRAMES_TEXT, TIME_FRAMES));
+ this, TIME_TIMECODE_TEXT, TIME_TIMECODE));
add_item(items[3] = new TimeBarPopupItem(mwindow,
- this, TIME_SAMPLES_TEXT, TIME_SAMPLES));
+ this, TIME_FRAMES_TEXT, TIME_FRAMES));
add_item(items[4] = new TimeBarPopupItem(mwindow,
- this, TIME_SAMPLES_HEX_TEXT, TIME_SAMPLES_HEX));
+ this, TIME_SAMPLES_TEXT, TIME_SAMPLES));
add_item(items[5] = new TimeBarPopupItem(mwindow,
- this, TIME_SECONDS_TEXT, TIME_SECONDS));
+ this, TIME_SAMPLES_HEX_TEXT, TIME_SAMPLES_HEX));
add_item(items[6] = new TimeBarPopupItem(mwindow,
+ this, TIME_SECONDS_TEXT, TIME_SECONDS));
+ add_item(items[7] = new TimeBarPopupItem(mwindow,
this, TIME_FEET_FRAMES_TEXT, TIME_FEET_FRAMES));
}
if( load_mode == LOADMODE_REPLACE ||
load_mode == LOADMODE_REPLACE_CONCATENATE ) {
+ edl->session->timecode_offset = 0;
delete gui->keyvalue_popup;
gui->keyvalue_popup = 0;
gui->load_panes();
for( int i=stack.size(); --i>=0; ) {
StackItem &item = stack[i];
Indexable *idxbl = item.idxbl;
+ if( !idxbl ) continue;
if( idxbl->is_asset ) {
Asset *asset = (Asset *)idxbl;
if( asset->format == FILE_REF ) {
return 1;
}
}
- else if( item.new_edl != item.idxbl )
- item.new_edl->overwrite_clip((EDL*)item.idxbl);
+ else if( item.new_edl != idxbl )
+ item.new_edl->overwrite_clip((EDL*)idxbl);
}
EDL *new_edl = stack.size() ? stack[0].edl : edl;
save(new_edl, path, 1);
void MWindow::next_time_format()
{
- switch(edl->session->time_format)
- {
- case TIME_HMS: edl->session->time_format = TIME_HMSF; break;
- case TIME_HMSF: edl->session->time_format = TIME_SAMPLES; break;
- case TIME_SAMPLES: edl->session->time_format = TIME_SAMPLES_HEX; break;
- case TIME_SAMPLES_HEX: edl->session->time_format = TIME_FRAMES; break;
- case TIME_FRAMES: edl->session->time_format = TIME_FEET_FRAMES; break;
- case TIME_FEET_FRAMES: edl->session->time_format = TIME_SECONDS; break;
- case TIME_SECONDS: edl->session->time_format = TIME_HMS; break;
+ switch( edl->session->time_format ) {
+ case TIME_HMS: edl->session->time_format = TIME_HMSF; break;
+ case TIME_HMSF: edl->session->time_format = TIME_TIMECODE; break;
+ case TIME_TIMECODE: edl->session->time_format = TIME_FRAMES; break;
+ case TIME_FRAMES: edl->session->time_format = TIME_SAMPLES; break;
+ case TIME_SAMPLES: edl->session->time_format = TIME_SAMPLES_HEX; break;
+ case TIME_SAMPLES_HEX: edl->session->time_format = TIME_SECONDS; break;
+ case TIME_SECONDS: edl->session->time_format = TIME_FEET_FRAMES; break;
+ case TIME_FEET_FRAMES: edl->session->time_format = TIME_HMS; break;
}
-
time_format_common();
}
void MWindow::prev_time_format()
{
- switch(edl->session->time_format)
- {
- case TIME_HMS: edl->session->time_format = TIME_SECONDS; break;
- case TIME_SECONDS: edl->session->time_format = TIME_FEET_FRAMES; break;
- case TIME_FEET_FRAMES: edl->session->time_format = TIME_FRAMES; break;
- case TIME_FRAMES: edl->session->time_format = TIME_SAMPLES_HEX; break;
- case TIME_SAMPLES_HEX: edl->session->time_format = TIME_SAMPLES; break;
- case TIME_SAMPLES: edl->session->time_format = TIME_HMSF; break;
- case TIME_HMSF: edl->session->time_format = TIME_HMS; break;
+ switch( edl->session->time_format ) {
+ case TIME_HMS: edl->session->time_format = TIME_FEET_FRAMES; break;
+ case TIME_HMSF: edl->session->time_format = TIME_HMS; break;
+ case TIME_TIMECODE: edl->session->time_format = TIME_HMSF; break;
+ case TIME_FRAMES: edl->session->time_format = TIME_TIMECODE; break;
+ case TIME_SAMPLES: edl->session->time_format = TIME_FRAMES; break;
+ case TIME_SAMPLES_HEX: edl->session->time_format = TIME_SAMPLES; break;
+ case TIME_SECONDS: edl->session->time_format = TIME_SAMPLES_HEX; break;
+ case TIME_FEET_FRAMES: edl->session->time_format = TIME_SECONDS; break;
}
time_format_common();
void toggle_projector_xyz();
double get_position();
void set_position(double position);
+ double get_timecode_offset();
+ void set_timecode_offset(double offset);
+ void align_timecodes();
// seek to labels
// shift_down must be passed by the caller because different windows call
group->remove_user();
}
+void MWindow::align_timecodes()
+{
+ undo_before();
+ double offset = edl->tracks->align_timecodes();
+ set_timecode_offset(offset);
+ save_backup();
+ undo_after(_("align timecodes"), LOAD_ALL);
+ restart_brender();
+ cwindow->refresh_frame(CHANGE_EDL);
+ update_plugin_guis();
+}
+
double MWindow::get_position()
{
- return edl->local_session->get_selectionstart(1);
+ return edl->local_session->get_selectionstart(1);
}
void MWindow::set_position(double position)
{
- if( position != get_position() ) {
- if( position < 0 ) position = 0;
- edl->local_session->set_selectionstart(position);
- edl->local_session->set_selectionend(position);
- gui->lock_window();
- find_cursor();
- gui->update(1, NORMAL_DRAW, 1, 1, 1, 1, 0);
- gui->unlock_window();
- cwindow->update(1, 0, 0, 0, 0);
- }
+ if( position != get_position() ) {
+ if( position < 0 ) position = 0;
+ edl->local_session->set_selectionstart(position);
+ edl->local_session->set_selectionend(position);
+ gui->lock_window();
+ find_cursor();
+ gui->update(1, NORMAL_DRAW, 1, 1, 1, 1, 0);
+ gui->unlock_window();
+ cwindow->update(1, 0, 0, 0, 0);
+ }
+}
+
+
+double MWindow::get_timecode_offset()
+{
+ return edl->session->timecode_offset;
+}
+
+void MWindow::set_timecode_offset(double offset)
+{
+ edl->session->time_format = TIME_TIMECODE;
+ edl->session->timecode_offset = offset;
+ gui->lock_window();
+ gui->update(1, NORMAL_DRAW, 1, 1, 1, 1, 0);
+ gui->unlock_window();
+ cwindow->update(1, 0, 0, 0, 0);
}
#define LOAD_CONFIGURATION_MACRO(plugin_class, config_class) \
int plugin_class::load_configuration() \
{ \
- KeyFrame *prev_keyframe, *next_keyframe; \
- prev_keyframe = get_prev_keyframe(get_source_position()); \
- next_keyframe = get_next_keyframe(get_source_position()); \
- \
- int64_t next_position = edl_to_local(next_keyframe->position); \
+ KeyFrame * prev_keyframe = get_prev_keyframe(get_source_position()); \
int64_t prev_position = edl_to_local(prev_keyframe->position); \
- \
config_class old_config, prev_config, next_config; \
old_config.copy_from(config); \
read_data(prev_keyframe); \
prev_config.copy_from(config); \
- read_data(next_keyframe); \
- next_config.copy_from(config); \
- \
- config.interpolate(prev_config, \
- next_config, \
- (next_position == prev_position) ? \
- get_source_position() : \
- prev_position, \
- (next_position == prev_position) ? \
- get_source_position() + 1 : \
- next_position, \
- get_source_position()); \
+ KeyFrame * next_keyframe = get_next_keyframe(get_source_position()); \
+ if( next_keyframe ) { \
+ int64_t next_position = edl_to_local(next_keyframe->position); \
+ read_data(next_keyframe); \
+ next_config.copy_from(config); \
\
- if(!config.equivalent(old_config)) \
- return 1; \
- else \
- return 0; \
+ config.interpolate(prev_config, next_config, \
+ (next_position == prev_position) ? \
+ get_source_position() : prev_position, \
+ (next_position == prev_position) ? \
+ get_source_position() + 1 : next_position, \
+ get_source_position()); \
+ } \
+ return !config.equivalent(old_config) ? 1 : 0; \
}
popupmenu_btnup = 1;
grab_input_focus = 1;
textbox_focus_policy = 0;
+ auto_rotate = 1;
forward_render_displacement = 0;
dvd_yuv420p_interlace = 0;
highlight_inverse = 0xffffff;
popupmenu_btnup = that->popupmenu_btnup;
grab_input_focus = that->grab_input_focus;
textbox_focus_policy = that->textbox_focus_policy;
+ auto_rotate = that->auto_rotate;
forward_render_displacement = that->forward_render_displacement;
dvd_yuv420p_interlace = that->dvd_yuv420p_interlace;
highlight_inverse = that->highlight_inverse;
popupmenu_btnup = defaults->get("POPUPMENU_BTNUP", popupmenu_btnup);
grab_input_focus = defaults->get("GRAB_FOCUS", grab_input_focus);
textbox_focus_policy = defaults->get("TEXTBOX_FOCUS_POLICY", textbox_focus_policy);
+ auto_rotate = defaults->get("AUTO_ROTATE", auto_rotate);
forward_render_displacement = defaults->get("FORWARD_RENDER_DISPLACEMENT", forward_render_displacement);
dvd_yuv420p_interlace = defaults->get("DVD_YUV420P_INTERLACE", dvd_yuv420p_interlace);
highlight_inverse = defaults->get("HIGHLIGHT_INVERSE", highlight_inverse);
defaults->update("POPUPMENU_BTNUP", popupmenu_btnup);
defaults->update("GRAB_FOCUS", grab_input_focus);
defaults->update("TEXTBOX_FOCUS_POLICY", textbox_focus_policy);
+ defaults->update("AUTO_ROTATE", auto_rotate);
defaults->update("FORWARD_RENDER_DISPLACEMENT", forward_render_displacement);
defaults->update("DVD_YUV420P_INTERLACE", dvd_yuv420p_interlace);
defaults->update("HIGHLIGHT_INVERSE", highlight_inverse);
int popupmenu_btnup;
// textbox focus policy: click, leave
int textbox_focus_policy;
+// apply display_matrix rotation, ffmpeg only
+ int auto_rotate;
// forward playback starts next frame, not this frame
int forward_render_displacement;
// use dvd yuv420p interlace format
return 0;
}
-int Render::get_strategy(int use_renderfarm, int use_labels)
+int Render::get_strategy(int use_renderfarm, int use_labels, int range_type)
{
- return use_renderfarm ?
- (use_labels ? FILE_PER_LABEL_FARM : SINGLE_PASS_FARM) :
- (use_labels ? FILE_PER_LABEL : SINGLE_PASS ) ;
+ return range_type == RANGE_1FRAME ? SINGLE_PASS :
+ use_renderfarm ?
+ (use_labels ? FILE_PER_LABEL_FARM : SINGLE_PASS_FARM) :
+ (use_labels ? FILE_PER_LABEL : SINGLE_PASS ) ;
}
int Render::get_strategy()
{
- return get_strategy(preferences->use_renderfarm, use_labels);
+ return get_strategy(preferences->use_renderfarm, use_labels, range_type);
}
void Render::start_progress()
// This should be integrated into the Asset Class.
static int check_asset(EDL *edl, Asset &asset);
// strategy to conform with using renderfarm.
- static int get_strategy(int use_renderfarm, int use_labels);
+ static int get_strategy(int use_renderfarm, int use_labels, int range_type);
int get_strategy();
// Force filename to have a 0 padded number if rendering to a list.
int check_numbering(Asset &asset);
}
}
+double Tracks::align_timecodes()
+{
+ double offset = -1;
+ for( Track *track=edl->tracks->first; track; track=track->next ) {
+ if( !track->is_armed() ) continue;
+ double early_offset = track->edits->early_timecode();
+ if( offset < 0 || offset > early_offset )
+ offset = early_offset;
+ }
+ if( offset >= 0 ) {
+ for( Track *track=edl->tracks->first; track; track=track->next ) {
+ if( !track->is_armed() ) continue;
+ track->edits->align_timecodes(offset);
+ }
+ }
+ return offset;
+}
+
void translate_camera(float dx, float dy, int all=0);
void crop_resize(float x, float y, float z);
void crop_shrink(float x, float y, float z);
+ double align_timecodes();
int total_of(int type);
Track* get_track_by_id(int id);
0, // use_goto
1, // use_clk2play
1, // use_scope
- 0) // use_gang_tracks
+ 0, // use_gang_tracks
+ 0) // use_timecode
{
this->mwindow = mwindow;
this->vwindow = vwindow;
mwindow->edl->session->time_format,
mwindow->edl->session->sample_rate,
mwindow->edl->session->frame_rate,
- mwindow->edl->session->frames_per_foot);
+ mwindow->edl->session->frames_per_foot,
+ mwindow->get_timecode_offset());
//printf("FromTextBox::update_position %f %s\n", new_position, string);
update(string);
return 0;
mwindow->edl->session->time_format,
mwindow->edl->session->sample_rate,
mwindow->edl->session->frame_rate,
- mwindow->edl->session->frames_per_foot);
+ mwindow->edl->session->frames_per_foot,
+ mwindow->get_timecode_offset());
update(string);
return 0;
}
mwindow->edl->session->sample_rate,
mwindow->edl->session->time_format,
mwindow->edl->session->frame_rate,
- mwindow->edl->session->frames_per_foot);
+ mwindow->edl->session->frames_per_foot,
+ mwindow->get_timecode_offset());
total_samples /= mwindow->theme->mcanvas_w -
mwindow->theme->patchbay_w -
BC_ScrollBar::get_span(SCROLL_VERT);
<td height="26" align="left"><b><font face="Liberation Serif" size=4> Drag/Drop</font></b></td>
<td align="left"><font face="Liberation Serif" size=4>Clear Select</font></td>
<td align="left"><font face="Liberation Serif" size=4>Ctrl-Shift-A</font></td>
- <td align="left"><font face="Liberation Serif" size=4>Delselect all selected edits</font></td>
+ <td align="left"><font face="Liberation Serif" size=4>De-select all selected edits</font></td>
</tr>
<tr>
<td height="26" align="left"><b><font face="Liberation Serif" size=4> Edits </font></b></td>
#xfade ###Input/output error
#arnndn ###Invalid argument
#dnn_processing ###Invalid argument
+#tonemap_vaapi ###Operation not permitted
+#afirsrc ###Operation not permitted
#; the ffmpeg system library on Arch has these errors:
#ladspa
#ass
}
if( bytes_per_line < 0 )
bytes_per_line = w * calculate_pixelsize(color_model);
- return h * bytes_per_line + 4;
+ return h * bytes_per_line + BC_COLOR_ALIGN;
}
int BC_CModels::bc_to_x(int color_model)
#define FOURCC_YUV2 0x32595559 /* YUV2 YUV422 */
#define FOURCC_UYVY 0x59565955 /* UYVY UVY422 */
#define FOURCC_I420 0x30323449 /* I420 Intel Indeo 4 */
-
+#define BC_COLOR_ALIGN 64 /* overwrite padding, ssse3 */
#endif // !BC_TRANSPARENCY
// give text representation as time
char* Units::totext(char *text, double seconds, int time_format,
- int sample_rate, float frame_rate, float frames_per_foot)
+ int sample_rate, float frame_rate, float frames_per_foot,
+ double timecode_offset)
{
int64_t hour, feet, frame;
int minute, second, thousandths;
sprintf(text, "%02d:%02d:%02d", (int)hour, minute, second);
break; }
+ case TIME_TIMECODE:
+ seconds += timecode_offset; // fall thru
case TIME_HMSF: {
seconds = fabs(seconds) + 1.0e-6;
hour = seconds/3600;
// give text representation as time
char* Units::totext(char *text, int64_t samples, int samplerate,
- int time_format, float frame_rate, float frames_per_foot)
+ int time_format, float frame_rate, float frames_per_foot,
+ double timecode_offset)
{
return totext(text, (double)samples/samplerate, time_format,
- samplerate, frame_rate, frames_per_foot);
+ samplerate, frame_rate, frames_per_foot, timecode_offset);
}
int64_t Units::get_int64(const char *&bp)
}
int64_t Units::fromtext(const char *text, int samplerate, int time_format,
- float frame_rate, float frames_per_foot)
+ float frame_rate, float frames_per_foot,
+ double timecode_offset)
{
int64_t hours, total_samples;
int minutes, frames, feet;
total_seconds = seconds + minutes*60 + hours*3600;
break; }
+ case TIME_TIMECODE:
case TIME_HMSF: {
hours = get_int64(text); skip_seperators(text);
minutes = get_int64(text); skip_seperators(text);
seconds = get_int64(text); skip_seperators(text);
frames = get_int64(text);
total_seconds = frames/frame_rate + seconds + minutes*60 + hours*3600;
+ if( time_format == TIME_TIMECODE )
+ total_seconds -= timecode_offset;
break; }
case TIME_SAMPLES: {
}
double Units::text_to_seconds(const char *text, int samplerate, int time_format,
- float frame_rate, float frames_per_foot)
+ float frame_rate, float frames_per_foot,
+ double timecode_offset)
{
return (double)fromtext(text, samplerate, time_format,
- frame_rate, frames_per_foot) / samplerate;
+ frame_rate, frames_per_foot, timecode_offset) / samplerate;
}
if (!strcmp(tcf,TIME_HMS2__STR)) return(TIME_HMS2);
if (!strcmp(tcf,TIME_HMS3__STR)) return(TIME_HMS3);
if (!strcmp(tcf,TIME_HMSF__STR)) return(TIME_HMSF);
+ if (!strcmp(tcf,TIME_TIMECODE__STR)) return(TIME_TIMECODE);
if (!strcmp(tcf,TIME_SAMPLES__STR)) return(TIME_SAMPLES);
if (!strcmp(tcf,TIME_SAMPLES_HEX__STR)) return(TIME_SAMPLES_HEX);
if (!strcmp(tcf,TIME_FRAMES__STR)) return(TIME_FRAMES);
case TIME_SECONDS: fmt = TIME_SECONDS_TEXT; break;
case TIME_MS1:
case TIME_MS2: fmt = TIME_MS2_TEXT; break;
+ case TIME_TIMECODE: fmt = TIME_TIMECODE_TEXT; break;
}
return strcpy(string,fmt);
}
if(!strcmp(string, TIME_HMS3_TEXT)) return TIME_HMS3;
if(!strcmp(string, TIME_SECONDS_TEXT)) return TIME_SECONDS;
if(!strcmp(string, TIME_MS2_TEXT)) return TIME_MS2;
+ if(!strcmp(string, TIME_TIMECODE_TEXT)) return TIME_TIMECODE;
return TIME_HMS;
}
case TIME_HMS: return "0:00:00.000";
case TIME_HMS2: return "0:00:00";
case TIME_HMS3: return "00:00:00";
+ case TIME_TIMECODE:
case TIME_HMSF: return "0:00:00:00";
case TIME_SAMPLES: return 0;
case TIME_SAMPLES_HEX: return 0;
#define TOTALFREQS 1024
// slots per octave
#define OCTAVE 105
-#define TOTAL_TIMEFORMATS 7
+#define TOTAL_TIMEFORMATS 8
// h:mm:ss.sss
#define TIME_HMS 0
#define TIME_HMS2__STR "h:mm:ss"
#define TIME_HMS3__STR "hh:mm:ss"
#define TIME_HMSF__STR "h:mm:ss:ff"
+#define TIME_TIMECODE__STR "timecode"
#define TIME_SAMPLES__STR "audio samples"
#define TIME_SAMPLES_HEX__STR "audio samples (hex)"
#define TIME_FRAMES__STR "video frames"
#define TIME_MS2 10
#define TIME_MS2_TEXT _("Minutes:Seconds")
+#define TIME_TIMECODE 11
+#define TIME_TIMECODE_TEXT _("Timecode")
+
class Units;
class DB
static int64_t tosamples(double frames, int sample_rate, float framerate);
// give text representation as time
static char* totext(char *text, int64_t samples, int time_format,
- int samplerate, float frame_rate = 0, float frames_per_foot = 0);
+ int samplerate, float frame_rate = 0, float frames_per_foot = 0,
+ double timecode_offset = 0);
// give text representation as time
static char* totext(char *text, double seconds, int time_format,
- int sample_rate = 0, float frame_rate = 0, float frames_per_foot = 0);
+ int sample_rate = 0, float frame_rate = 0, float frames_per_foot = 0,
+ double timecode_offset = 0);
// convert time to samples
static int64_t fromtext(const char *text, int samplerate,
- int time_format, float frame_rate, float frames_per_foot);
+ int time_format, float frame_rate, float frames_per_foot,
+ double timecode_offset);
// Convert text to seconds
static double text_to_seconds(const char *text, int samplerate,
- int time_format, float frame_rate = 0, float frames_per_foot = 0);
+ int time_format, float frame_rate = 0, float frames_per_foot = 0,
+ double timecode_offset = 0);
static char* size_totext(int64_t bytes, char *text);
static float xy_to_polar(int x, int y);
long VFrame::get_data_size()
{
- return calculate_data_size(w, h, bytes_per_line, color_model) - 4;
+ return calculate_data_size(w, h, bytes_per_line, color_model) - BC_COLOR_ALIGN;
}
long VFrame::calculate_data_size(int w, int h, int bytes_per_line, int color_model)
#
# Description of FFmpeg Video Plugins
#
+F_addroi: Mark a region of interest in a video frame.
F_amplify: Amplify changes between successive video frames.
F_atadenoise: Apply an Adaptive Temporal Averaging Denoiser.
F_avgblur: Apply average blur filter.
F_bbox: Compute bounding box for each frame.
F_bench: Benchmarks part of a filtergraph.
+F_bilateral: Apply bilateral filter, spatial smoothing while
+ preserving edges.
F_bitplanenoise: Measure bit plane noise.
F_blackdetect: Detect video intervals that are (almost) black.
F_blackframe: Detect frames that are (almost) black.
able to change the power and the radius of the
boxblur applied to luma, chroma and alpha.
F_bwdif: Deinterlaces the input image.
+F_cas: Apply Contrast Adaptive Sharpen filter to video.
F_chromahold: Turns a certain color range into gray.
F_chromakey: Turns a certain color into transparency. Operates on
YUV colors.
F_cropdetect: Auto-detect crop size.
F_curves: Adjust components curves.
F_datascope: Video data analysis.
+F_dblur: Apply Directional blur filter.
F_dctdnoiz: Denoise frames using 2D DCT.
F_deband: Debands video.
F_deblock: Deblock video.
F_fspp: Applies Fast Simple Post-processing filter.
F_gblur: Apply Gaussian Blur filter.
F_gradfun: Debands video quickly using gradients.
+F_gradients: Draws a transparent gradient.
F_graphmonitor: Show various filtergraph stats.
F_greyedge: Estimates scene illumination by grey edge
assumption.
F_mandelbrot: Render a Mandelbrot fractal.
F_maskfun: Create Mask.
F_mcdeint: Applies motion compensating deinterlacing.
+F_median: Pick median pixel from rectangle defined by radius.
F_mestimate: Generate motion vectors.
F_metadata: Manipulate video frame metadata.
F_mpdecimate: Remove near-duplicate frames.
F_perms: Set permissions for the output video frame.
F_perspective: Corrects the perspective of video.
F_phase: Phase shift fields.
+F_photosensitivity: Filter out photosensitive epilepsy seizure-inducing
+ flashes.
F_pixscope: Pixel data analysis for checking color and levels.
It will display sample values of color channels.
F_pp: Filters video using libpostproc.
F_scale: Scale the input video size and/or convert the image
format.
F_scale_cuda: GPU accelerated video resizer.
+F_scdet: Detect video scene change.
+F_scroll: Scroll input video horizontally and/or vertically
+ by constant speed.
F_separatefields: Split input video frames into fields.
F_setparams: Force field, or color property for the output video
frame.
F_showpalette: Display frame palette.
F_shuffleframes: Shuffles video frames.
F_shuffleplanes: Shuffles video planes.
+F_sierpinski: Generate a Sierpinski carpet/triangle fractal, and
+ randomly pan around.
F_signalstats: Separates statistics from video analysis.
F_smartblur: Blurs the input video without impacting the outlines.
Through the settings you can select the radius, the
F_tblend: Blend successive frames.
F_testsrc: Generate test pattern.
F_testsrc2: Generate another test pattern.
+F_thistogram: Compute and draw a color distribution histogram for
+ the input video across time.
F_tile: Tile several successive frames together.
F_tinterlace: Performs temporal field interlacing.
F_tlut2: Compute and apply a lookup table from two
successive frames.
+F_tmedian: Pick median pixels from successive frames.
F_tmix: Mix successive video frames.
F_transpose: Transposes input video.
F_transpose_vaapi: VAAPI VPP for transpose.
F_unsharp: Sharpen or blur the input video.
+F_untile: Untile a frame into a sequence of frames.
F_uspp: Applies Ultra Simple/Slow Post-processing filter.
+F_v360: Convert 360 videos between various formats.
F_vaguedenoiser: Applies a Wavelet based Denoiser.
F_vectorscope: Video vectorscope.
F_vflip: Flips the input video vertically.
F_xbr: Scales the input using xBR algorithm.
F_yadif: Deinterlaces the input image.
F_yadif_cuda: Deinterlace CUDA frames.
+F_yaepblur: Yet another edge preserving blur filter.
F_yuvtestsrc: Generate YUV test pattern.
F_zoompan: Applies Zoom & Pan effect.
#
where signal amplitude is saturated along a smooth
curve.
F_astats: Shows time domain statistics about audio frames.
+F_asubboost: Boost subwoofer frequencies.
F_atempo: Adjusts audio tempo.
F_atrim: Pick one continuous section from the input and drop
the rest.
// To get the keyframes to work, resampling is always done in the forward
// direction with the plugin converting to reverse.
int ResampleRTResample::read_samples(Samples *buffer,
- int64_t start,
- int64_t len)
+ int64_t start, int64_t len, int direction)
{
int result = plugin->read_samples(buffer,
0,
{
public:
ResampleRTResample(ResampleRT *plugin);
- int read_samples(Samples *buffer, int64_t start, int64_t len);
+ int read_samples(Samples *buffer, int64_t start, int64_t len, int direction);
ResampleRT *plugin;
};
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
//new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", "editpanelW_up.png", "editpanelW_hi.png", "editpanelW_dn.png", "wrench");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
new_button("goto.png",
editpanel_up, editpanel_hi, editpanel_dn,
"goto");
+ new_button("clapper.png",
+ editpanel_up, editpanel_hi, editpanel_dn,
+ "clapperbutton");
new_button("top_justify.png",
editpanel_up, editpanel_hi, editpanel_dn,
"top_justify");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
new_button("goto.png",
editpanel_up, editpanel_hi, editpanel_dn,
"goto");
+ new_button("clapper.png",
+ editpanel_up, editpanel_hi, editpanel_dn,
+ "clapperbutton");
new_button("top_justify.png",
editpanel_up, editpanel_hi, editpanel_dn,
"top_justify");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn, "slicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
splice_data = new_button("splice.png", editpanel_up, editpanel_hi, editpanel_dn,"splicebutton");
new_button("toclip.png", editpanel_up, editpanel_hi, editpanel_dn, "toclip");
new_button("goto.png", editpanel_up, editpanel_hi, editpanel_dn, "goto");
+ new_button("clapper.png", editpanel_up, editpanel_hi, editpanel_dn, "clapperbutton");
new_button("top_justify.png", editpanel_up, editpanel_hi, editpanel_dn, "top_justify");
new_button("undo.png", editpanel_up, editpanel_hi, editpanel_dn, "undo");
new_button("wrench.png", editpanel_up, editpanel_hi, editpanel_dn, "wrench");
--- /dev/null
+diff --git a/libswscale/x86/yuv_2_rgb.asm b/libswscale/x86/yuv_2_rgb.asm
+index 575a84d921..003dff1f25 100644
+--- a/libswscale/x86/yuv_2_rgb.asm
++++ b/libswscale/x86/yuv_2_rgb.asm
+@@ -268,9 +268,9 @@ cglobal %1_420_%2%3, GPR_num, GPR_num, reg_num, parameters
+ por m2, m7
+ por m1, m6 ; g5 b5 r6 g6 b6 r7 g7 b7 r8 g8 b8 r9 g9 b9 r10 g10
+ por m2, m3 ; b10 r11 g11 b11 r12 g12 b12 r13 g13 b13 r14 g14 b14 r15 g15 b15
+- mova [imageq], m0
+- mova [imageq + 16], m1
+- mova [imageq + 32], m2
++ movu [imageq], m0
++ movu [imageq + 16], m1
++ movu [imageq + 32], m2
+ %endif ; mmsize = 16
+ %else ; PACK RGB15/16/32
+ packuswb m0, m1
+@@ -300,10 +300,10 @@ cglobal %1_420_%2%3, GPR_num, GPR_num, reg_num, parameters
+ punpckhwd m_green, m_red
+ punpcklwd m5, m6
+ punpckhwd m_alpha, m6
+- mova [imageq + 0], m_blue
+- mova [imageq + 8 * time_num], m_green
+- mova [imageq + 16 * time_num], m5
+- mova [imageq + 24 * time_num], m_alpha
++ movu [imageq + 0], m_blue
++ movu [imageq + 8 * time_num], m_green
++ movu [imageq + 16 * time_num], m5
++ movu [imageq + 24 * time_num], m_alpha
+ %else ; PACK RGB15/16
+ %define depth 2
+ %if cpuflag(ssse3)
+@@ -342,8 +342,8 @@ cglobal %1_420_%2%3, GPR_num, GPR_num, reg_num, parameters
+ mova m2, m0
+ punpcklbw m0, m1
+ punpckhbw m2, m1
+- mova [imageq], m0
+- mova [imageq + 8 * time_num], m2
++ movu [imageq], m0
++ movu [imageq + 8 * time_num], m2
+ %endif ; PACK RGB15/16
+ %endif ; PACK RGB15/16/32
+