Use "frame" for video-frames, fixes video-monitor state.
authorRobin Gareus <robin@gareus.org>
Sun, 21 Jan 2018 10:27:26 +0000 (11:27 +0100)
committerRobin Gareus <robin@gareus.org>
Sun, 21 Jan 2018 10:27:26 +0000 (11:27 +0100)
revert/amend/undo 30b087ab for video-related units

gtk2_ardour/export_video_dialog.cc
gtk2_ardour/video_image_frame.cc
gtk2_ardour/video_monitor.cc
gtk2_ardour/video_monitor.h
gtk2_ardour/video_timeline.cc
gtk2_ardour/video_timeline.h

index eefd8def9076256243cfcaa59baac3d843c3dfc1..5ba9fb37baa857a2cc2a989e0be5971e54fba1f6 100644 (file)
@@ -96,7 +96,7 @@ ExportVideoDialog::ExportVideoDialog ()
        , optimizations_checkbox (_("Codec Optimizations:"))
        , optimizations_label ("-")
        , deinterlace_checkbox (_("Deinterlace"))
-       , bframes_checkbox (_("Use [2] B-samples (MPEG 2 or 4 only)"))
+       , bframes_checkbox (_("Use [2] B-frames (MPEG 2 or 4 only)"))
        , fps_checkbox (_("Override FPS (Default is to retain FPS from the input video file):"))
        , meta_checkbox (_("Include Session Metadata"))
 #if 1 /* tentative debug mode */
index cb3fe733b06e855a5abebaa45f275482167d66e1..5d0aea3407de707bf3dd55e0918b04fd2c7d1dc1 100644 (file)
@@ -250,7 +250,7 @@ VideoImageFrame::http_download_done (char *data){
        }
 
        exposeimg();
-       /* don't request samples too quickly, wait after user has zoomed */
+       /* don't request frames too quickly, wait after user has zoomed */
        Glib::usleep(40000);
 
        if (queued_request) {
index cb146e4ea5f6ac0d10da13b6b12fffde70e9c98b..f67aabca9ed5fe4d9dcf1fc18bee023548a92f1e 100644 (file)
@@ -37,7 +37,7 @@ using namespace ARDOUR_UI_UTILS;
 VideoMonitor::VideoMonitor (PublicEditor *ed, std::string xjadeo_bin_path)
        : editor (ed)
 {
-       manually_seeked_sample = 0;
+       manually_seeked_frame = 0;
        fps =0.0; // = _session->timecode_frames_per_second();
        sync_by_manual_seek = true;
        _restore_settings_mask = 0;
@@ -72,7 +72,7 @@ VideoMonitor::start ()
                return true;
        }
 
-       manually_seeked_sample = 0;
+       manually_seeked_frame = 0;
        sync_by_manual_seek = false;
        if (clock_connection.connected()) { clock_connection.disconnect(); }
 
@@ -126,7 +126,7 @@ void
 VideoMonitor::open (std::string filename)
 {
        if (!is_started()) return;
-       manually_seeked_sample = 0;
+       manually_seeked_frame = 0;
        osdmode = 10; // 1: frameno, 2: timecode, 8: box
        starting = 15;
        process->write_to_stdin("load " + filename + "\n");
@@ -384,8 +384,8 @@ VideoMonitor::parse_output (std::string d, size_t /*s*/)
                                                osdmode = atoi(value);
                                                if (starting || atoi(xjadeo_settings["osd mode"]) != osdmode) {
                                                        if (!starting && _session) _session->set_dirty ();
-                                                       if ((osdmode & 1) == 1) { UiState("xjadeo-window-osd-sample-on"); }
-                                                       if ((osdmode & 1) == 0) { UiState("xjadeo-window-osd-sample-off"); }
+                                                       if ((osdmode & 1) == 1) { UiState("xjadeo-window-osd-frame-on"); }
+                                                       if ((osdmode & 1) == 0) { UiState("xjadeo-window-osd-frame-off"); }
                                                        if ((osdmode & 2) == 2) { UiState("xjadeo-window-osd-timecode-on"); }
                                                        if ((osdmode & 2) == 0) { UiState("xjadeo-window-osd-timecode-off"); }
                                                        if ((osdmode & 8) == 8) { UiState("xjadeo-window-osd-box-on"); }
@@ -526,8 +526,8 @@ VideoMonitor::manual_seek (samplepos_t when, bool /*force*/, ARDOUR::sampleoffse
        }
        if (video_frame < 0 ) video_frame = 0;
 
-       if (video_frame == manually_seeked_sample) { return; }
-       manually_seeked_sample = video_frame;
+       if (video_frame == manually_seeked_frame) { return; }
+       manually_seeked_frame = video_frame;
 
 #if 0 /* DEBUG */
        std::cout <<"seek: " << video_frame << std::endl;
index 1abd47231224df9406a05f90322d716d619a0513..ec804ba01cd92f34c7de53fd3f5c532626c8e7e2 100644 (file)
@@ -98,7 +98,7 @@ class VideoMonitor : public sigc::trackable , public ARDOUR::SessionHandlePtr, p
        XJSettings xjadeo_settings;
 
        void xjadeo_sync_setup ();
-       ARDOUR::samplepos_t manually_seeked_sample;
+       ARDOUR::samplepos_t manually_seeked_frame;
        ARDOUR::sampleoffset_t video_offset;
        bool sync_by_manual_seek;
        sigc::connection clock_connection;
index e336d59534ab648975c57af9f8b6bb83a1c4da94..0c5b0801248512a3714c089fd1f9e37b984fcbce 100644 (file)
@@ -48,8 +48,8 @@ using namespace VideoUtils;
 
 VideoTimeLine::VideoTimeLine (PublicEditor *ed, ArdourCanvas::Container *vbg, int initial_height)
        : editor (ed)
-               , videotl_group(vbg)
-               , bar_height(initial_height)
+       , videotl_group(vbg)
+       , bar_height(initial_height)
 {
        video_start_offset = 0L;
        video_offset = 0L;
@@ -62,7 +62,7 @@ VideoTimeLine::VideoTimeLine (PublicEditor *ed, ArdourCanvas::Container *vbg, in
        video_filename = "";
        local_file = true;
        video_file_fps = 25.0;
-       flush_samples = false;
+       _flush_frames = false;
        vmonitor=0;
        reopen_vmonitor=false;
        find_xjadeo();
@@ -119,7 +119,7 @@ VideoTimeLine::close_session ()
        sessionsave.disconnect();
        close_video_monitor();
 
-       remove_samples();
+       remove_frames ();
        video_filename = "";
        video_duration = 0;
        GuiUpdate("set-xjadeo-sensitive-off");
@@ -231,24 +231,24 @@ VideoTimeLine::get_state ()
 }
 
 void
-VideoTimeLine::remove_samples ()
+VideoTimeLine::remove_frames ()
 {
-       for (VideoSamples::iterator i = video_frames.begin(); i != video_frames.end(); ++i ) {
-               VideoImageFrame *sample = (*i);
-               delete sample;
+       for (VideoFrames::iterator i = video_frames.begin(); i != video_frames.end(); ++i ) {
+               VideoImageFrame* frame = (*i);
+               delete frame;
                (*i) = 0;
        }
        video_frames.clear();
 }
 
-VideoImageFrame *
+VideoImageFrame*
 VideoTimeLine::get_video_frame (samplepos_t vfn, int cut, int rightend)
 {
        if (vfn==0) cut=0;
-       for (VideoSamples::iterator i = video_frames.begin(); i != video_frames.end(); ++i) {
-               VideoImageFrame *sample = (*i);
-               if (abs(sample->get_video_frame_number()-vfn)<=cut
-                   && sample->get_rightend() == rightend) { return sample; }
+       for (VideoFrames::iterator i = video_frames.begin(); i != video_frames.end(); ++i) {
+               VideoImageFrame* frame = (*i);
+               if (abs(frame->get_video_frame_number()-vfn)<=cut
+                   && frame->get_rightend() == rightend) { return frame; }
        }
        return 0;
 }
@@ -294,11 +294,11 @@ VideoTimeLine::update_video_timeline()
        const samplepos_t leftmost_sample =  editor->leftmost_sample();
 
        /* Outline:
-        * 1) calculate how many samples there should be in current zoom (plus 1 page on each side)
-        * 2) calculate first sample and distance between video-frames (according to zoom)
-        * 3) destroy/add samples
-        * 4) reposition existing samples
-        * 5) assign framenumber to samples -> request/decode video.
+        * 1) calculate how many frames there should be in current zoom (plus 1 page on each side)
+        * 2) calculate first frame and distance between video-frames (according to zoom)
+        * 3) destroy/add frames
+        * 4) reposition existing frames
+        * 5) assign framenumber to frames -> request/decode video.
         */
 
        /* video-file and session properties */
@@ -309,7 +309,7 @@ VideoTimeLine::update_video_timeline()
        /* variables needed to render videotimeline -- what needs to computed first */
        samplepos_t vtl_start; /* unit: audio-samples ; first displayed video-frame */
        samplepos_t vtl_dist;  /* unit: audio-samples ; distance between displayed video-frames */
-       unsigned int visible_video_frames; /* number of samples that fit on current canvas */
+       unsigned int visible_video_frames; /* number of frames that fit on current canvas */
 
        if (_session->config.get_videotimeline_pullup()) {
                apv = _session->sample_rate();
@@ -338,43 +338,43 @@ VideoTimeLine::update_video_timeline()
        leftmost_video_frame = floor (floor((long double)(leftmost_sample - video_start_offset - video_offset ) / vtl_dist) * vtl_dist / apv);
 
        vtl_start = rint (video_offset + video_start_offset + leftmost_video_frame * apv);
-       visible_video_frames = 2 + ceil((double)editor->current_page_samples() / vtl_dist); /* +2 left+right partial samples */
+       visible_video_frames = 2 + ceil((double)editor->current_page_samples() / vtl_dist); /* +2 left+right partial frames */
 
        /* expand timeline (cache next/prev page images) */
        vtl_start -= visible_video_frames * vtl_dist;
        visible_video_frames *=3;
 
-       /* don't request samples that are too far to the right */
+       /* don't request frames that are too far to the right */
        if (vtl_start < video_offset) {
                visible_video_frames = std::max((double)0.0, (double)visible_video_frames + ceil((double)(vtl_start - video_offset)/vtl_dist));
                vtl_start = video_offset;
        }
 
        /* apply video-file constraints
-        * (first sample in video is at video_start_offset) */
+        * (first frame in video is at video_start_offset) */
        if (vtl_start > video_start_offset + video_duration + video_offset ) {
                visible_video_frames = 0;
        }
        /* trim end.
-        * end = position on timeline (video-offset)  minus  video-file's first sample position
+        * end = position on timeline (video-offset)  minus  video-file's first frame position
         * TODO optimize: compute rather than iterate */
        while (visible_video_frames > 0 && vtl_start + (visible_video_frames-1) * vtl_dist >= video_start_offset + video_duration + video_offset) {
                --visible_video_frames;
        }
 
-       if (flush_samples) {
-               remove_samples();
-               flush_samples=false;
+       if (_flush_frames) {
+               remove_frames ();
+               _flush_frames = false;
        }
 
        while (video_frames.size() < visible_video_frames) {
-               VideoImageFrame *sample;
-               sample = new VideoImageFrame(*editor, *videotl_group, display_vframe_width, bar_height, video_server_url, translated_filename());
-               sample->ImgChanged.connect (*this, invalidator (*this), boost::bind (&PublicEditor::queue_visual_videotimeline_update, editor), gui_context());
-               video_frames.push_back(sample);
+               VideoImageFrame *frame;
+               frame = new VideoImageFrame(*editor, *videotl_group, display_vframe_width, bar_height, video_server_url, translated_filename());
+               frame->ImgChanged.connect (*this, invalidator (*this), boost::bind (&PublicEditor::queue_visual_videotimeline_update, editor), gui_context());
+               video_frames.push_back(frame);
        }
 
-       VideoSamples outdated_video_frames;
+       VideoFrames outdated_video_frames;
        std::list<int> remaining;
 
        outdated_video_frames = video_frames;
@@ -400,21 +400,21 @@ VideoTimeLine::update_video_timeline()
                        rightend = display_vframe_width * (video_start_offset + video_duration + video_offset - vfpos) / vtl_dist;
                        //printf("lf(e): %lu\n", vframeno); // XXX
                }
-               VideoImageFrame * sample = get_video_frame(vframeno, cut, rightend);
-               if (sample) {
-                 sample->set_position(vfpos);
-                       outdated_video_frames.remove(sample);
+               VideoImageFrame* frame = get_video_frame(vframeno, cut, rightend);
+               if (frame) {
+                 frame->set_position(vfpos);
+                       outdated_video_frames.remove (frame);
                } else {
                        remaining.push_back(vfcount);
                }
        }
 
-       for (VideoSamples::iterator i = outdated_video_frames.begin(); i != outdated_video_frames.end(); ++i ) {
-               VideoImageFrame *sample = (*i);
+       for (VideoFrames::iterator i = outdated_video_frames.begin(); i != outdated_video_frames.end(); ++i ) {
+               VideoImageFrame* frame = (*i);
                if (remaining.empty()) {
-                 sample->set_position(-2 * vtl_dist + leftmost_sample); /* move off screen */
+                 frame->set_position(-2 * vtl_dist + leftmost_sample); /* move off screen */
                } else {
-                       int vfcount=remaining.front();
+                       int vfcount = remaining.front();
                        remaining.pop_front();
                        samplepos_t vfpos = vtl_start + vfcount * vtl_dist; /* unit: audio-samples */
                        samplepos_t vframeno = rint ((vfpos - video_offset) / apv);  /* unit: video-frames */
@@ -423,8 +423,8 @@ VideoTimeLine::update_video_timeline()
                                rightend = display_vframe_width * (video_start_offset + video_duration + video_offset - vfpos) / vtl_dist;
                                //printf("lf(n): %lu\n", vframeno); // XXX
                        }
-                       sample->set_position(vfpos);
-                       sample->set_videoframe(vframeno, rightend);
+                       frame->set_position(vfpos);
+                       frame->set_videoframe(vframeno, rightend);
                }
        }
 }
@@ -609,9 +609,9 @@ VideoTimeLine::gui_update(std::string const & t) {
                editor->toggle_xjadeo_viewoption(2, 1);
        } else if (t == "xjadeo-window-osd-timecode-off") {
                editor->toggle_xjadeo_viewoption(2, 0);
-       } else if (t == "xjadeo-window-osd-sample-on") {
+       } else if (t == "xjadeo-window-osd-frame-on") {
                editor->toggle_xjadeo_viewoption(3, 1);
-       } else if (t == "xjadeo-window-osd-sample-off") {
+       } else if (t == "xjadeo-window-osd-frame-off") {
                editor->toggle_xjadeo_viewoption(3, 0);
        } else if (t == "xjadeo-window-osd-box-on") {
                editor->toggle_xjadeo_viewoption(4, 1);
@@ -650,7 +650,7 @@ VideoTimeLine::vmon_update () {
 
 void
 VideoTimeLine::flush_local_cache () {
-       flush_samples = true;
+       _flush_frames = true;
        vmon_update();
 }
 
index 55dd29a89720f4341553bfa3b6d3d0d7121de8a6..fd882adb3eec1223541bf6f2f5ca05a7a9b0f5ec 100644 (file)
@@ -45,7 +45,7 @@ class PublicEditor;
  *  creates \ref VideoImageFrame as neccesary (which
  *  query the server for image-data).
  *
- *  This class contains the algorithm to position the single samples
+ *  This class contains the algorithm to position the single frames
  *  on the timeline according to current-zoom level and video-file
  *  attributes. see \ref update_video_timeline()
  *
@@ -94,8 +94,8 @@ class VideoTimeLine : public sigc::trackable, public ARDOUR::SessionHandlePtr, p
        void close_session ();
        void sync_session_state (); /* video-monitor does not actively report window/pos changes, query it */
        float get_apv(); /* audio samples per video frame; */
-       ARDOUR::samplecnt_t get_duration () { return video_duration;}
-       ARDOUR::sampleoffset_t get_offset () { return video_offset;}
+       ARDOUR::samplecnt_t    get_duration () { return video_duration;}
+       ARDOUR::sampleoffset_t get_offset ()   { return video_offset;}
        ARDOUR::sampleoffset_t quantify_samples_to_apv (ARDOUR::sampleoffset_t offset) { return rint(offset/get_apv())*get_apv(); }
        void set_offset (ARDOUR::sampleoffset_t offset) { video_offset = quantify_samples_to_apv(offset); } // this function does not update video_offset_p, call save_undo() to finalize changes to this! - this fn is currently only used from editor_drag.cc
 
@@ -129,11 +129,12 @@ class VideoTimeLine : public sigc::trackable, public ARDOUR::SessionHandlePtr, p
        std::string xjadeo_version;
        std::string harvid_version;
 
-       typedef std::list<VideoImageFrame*> VideoSamples;
-       VideoSamples video_frames;
-       VideoImageFrame *get_video_frame (samplepos_t vfn, int cut=0, int rightend = -1);
-       bool        flush_samples;
-       void        remove_samples ();
+       typedef std::list<VideoImageFrame*> VideoFrames;
+       VideoFrames video_frames;
+       VideoImageFrame* get_video_frame (samplepos_t vfn, int cut=0, int rightend = -1);
+
+       void remove_frames ();
+       bool _flush_frames;
 
        std::string translated_filename ();