/*
- Copyright (C) 2012-2019 Carl Hetherington <cth@carlh.net>
+ Copyright (C) 2012-2021 Carl Hetherington <cth@carlh.net>
This file is part of DCP-o-matic.
*/
+
/** @file src/film_viewer.cc
* @brief A wx widget to view a preview of a Film.
*/
+
+#include "closed_captions_dialog.h"
#include "film_viewer.h"
-#include "playhead_to_timecode_dialog.h"
+#include "gl_video_view.h"
+#include "nag_dialog.h"
#include "playhead_to_frame_dialog.h"
+#include "playhead_to_timecode_dialog.h"
+#include "simple_video_view.h"
#include "wx_util.h"
-#include "closed_captions_dialog.h"
#include "lib/film.h"
#include "lib/ratio.h"
#include "lib/util.h"
#include "lib/config.h"
#include "lib/compose.hpp"
#include "lib/dcpomatic_log.h"
+#include "lib/text_content.h"
extern "C" {
#include <libavutil/pixfmt.h>
}
#include <iostream>
#include <iomanip>
-using std::string;
-using std::pair;
-using std::min;
-using std::max;
-using std::cout;
-using std::list;
+
using std::bad_alloc;
-using std::make_pair;
-using std::exception;
-using boost::shared_ptr;
-using boost::dynamic_pointer_cast;
-using boost::weak_ptr;
+using std::cout;
+using std::dynamic_pointer_cast;
+using std::make_shared;
+using std::max;
+using std::shared_ptr;
+using std::string;
+using std::vector;
using boost::optional;
+#if BOOST_VERSION >= 106100
+using namespace boost::placeholders;
+#endif
using dcp::Size;
+using namespace dcpomatic;
+
static
int
return reinterpret_cast<FilmViewer*>(data)->audio_callback (out, frames);
}
+
FilmViewer::FilmViewer (wxWindow* p)
- : _panel (new wxPanel (p))
- , _coalesce_player_changes (false)
- , _audio (DCPOMATIC_RTAUDIO_API)
- , _audio_channels (0)
- , _audio_block_size (1024)
- , _playing (false)
- , _latency_history_count (0)
- , _dropped (0)
+ : _audio (DCPOMATIC_RTAUDIO_API)
, _closed_captions_dialog (new ClosedCaptionsDialog(p, this))
- , _outline_content (false)
- , _eyes (EYES_LEFT)
- , _pad_black (false)
-#ifdef DCPOMATIC_VARIANT_SWAROOP
- , _in_watermark (false)
- , _background_image (false)
-#endif
{
-#ifndef __WXOSX__
- _panel->SetDoubleBuffered (true);
+#if wxCHECK_VERSION(3, 1, 0)
+ switch (Config::instance()->video_view_type()) {
+ case Config::VIDEO_VIEW_OPENGL:
+ _video_view = std::make_shared<GLVideoView>(this, p);
+ break;
+ case Config::VIDEO_VIEW_SIMPLE:
+ _video_view = std::make_shared<SimpleVideoView>(this, p);
+ break;
+ }
+#else
+ _video_view = std::make_shared<SimpleVideoView>(this, p);
#endif
- _panel->SetBackgroundStyle (wxBG_STYLE_PAINT);
- _panel->SetBackgroundColour (*wxBLACK);
-
- _panel->Bind (wxEVT_PAINT, boost::bind (&FilmViewer::paint_panel, this));
- _panel->Bind (wxEVT_SIZE, boost::bind (&FilmViewer::panel_sized, this, _1));
- _timer.Bind (wxEVT_TIMER, boost::bind (&FilmViewer::timer, this));
+ _video_view->Sized.connect (boost::bind(&FilmViewer::video_view_sized, this));
+ _video_view->TooManyDropped.connect (boost::bind(boost::ref(TooManyDropped)));
- set_film (shared_ptr<Film> ());
+ set_film (shared_ptr<Film>());
- _config_changed_connection = Config::instance()->Changed.connect (bind (&FilmViewer::config_changed, this, _1));
+ _config_changed_connection = Config::instance()->Changed.connect(bind(&FilmViewer::config_changed, this, _1));
config_changed (Config::SOUND_OUTPUT);
}
+
FilmViewer::~FilmViewer ()
{
stop ();
}
+
+/** Ask for ::get() to be called next time we are idle */
+void
+FilmViewer::request_idle_display_next_frame ()
+{
+ if (_idle_get) {
+ return;
+ }
+
+ _idle_get = true;
+ DCPOMATIC_ASSERT (signal_manager);
+ signal_manager->when_idle (boost::bind(&FilmViewer::idle_handler, this));
+}
+
+
+void
+FilmViewer::idle_handler ()
+{
+ if (!_idle_get) {
+ return;
+ }
+
+ if (_video_view->display_next_frame(true) == VideoView::AGAIN) {
+ /* get() could not complete quickly so we'll try again later */
+ signal_manager->when_idle (boost::bind(&FilmViewer::idle_handler, this));
+ } else {
+ _idle_get = false;
+ }
+}
+
+
void
FilmViewer::set_film (shared_ptr<Film> film)
{
}
_film = film;
- _video_position = DCPTime ();
- _player_video.first.reset ();
- _player_video.second = DCPTime ();
- _frame.reset ();
+ _video_view->clear ();
_closed_captions_dialog->clear ();
if (!_film) {
_player.reset ();
recreate_butler ();
- _frame.reset ();
- refresh_panel ();
+ _video_view->update ();
return;
}
try {
- _player.reset (new Player (_film, _film->playlist ()));
+ _player = make_shared<Player>(_film, _optimise_for_j2k ? Image::Alignment::COMPACT : Image::Alignment::PADDED);
_player->set_fast ();
if (_dcp_decode_reduction) {
_player->set_dcp_decode_reduction (_dcp_decode_reduction);
}
} catch (bad_alloc &) {
- error_dialog (_panel, _("There is not enough free memory to do that."));
+ error_dialog (_video_view->get(), _("There is not enough free memory to do that."));
_film.reset ();
return;
}
_player->set_play_referenced ();
_film->Change.connect (boost::bind (&FilmViewer::film_change, this, _1, _2));
+ _film->LengthChange.connect (boost::bind(&FilmViewer::film_length_change, this));
_player->Change.connect (boost::bind (&FilmViewer::player_change, this, _1, _2, _3));
+ film_change (ChangeType::DONE, Film::Property::VIDEO_FRAME_RATE);
+ film_change (ChangeType::DONE, Film::Property::THREE_D);
+ film_length_change ();
+
/* Keep about 1 second's worth of history samples */
_latency_history_count = _film->audio_frame_rate() / _audio_block_size;
+ _closed_captions_dialog->update_tracks (_film);
+
recreate_butler ();
calculate_sizes ();
slow_refresh ();
}
+
void
FilmViewer::recreate_butler ()
{
- bool const was_running = stop ();
+ suspend ();
_butler.reset ();
if (!_film) {
+ resume ();
return;
}
- AudioMapping map = AudioMapping (_film->audio_channels(), _audio_channels);
+#if wxCHECK_VERSION(3, 1, 0)
+ auto const j2k_gl_optimised = dynamic_pointer_cast<GLVideoView>(_video_view) && _optimise_for_j2k;
+#else
+ auto const j2k_gl_optimised = false;
+#endif
- if (_audio_channels != 2 || _film->audio_channels() < 3) {
- for (int i = 0; i < min (_film->audio_channels(), _audio_channels); ++i) {
- map.set (i, i, 1);
- }
- } else {
- /* Special case: stereo output, at least 3 channel input.
- Map so that Lt = L(-3dB) + Ls(-3dB) + C(-6dB) + Lfe(-10dB)
- Rt = R(-3dB) + Rs(-3dB) + C(-6dB) + Lfe(-10dB)
- */
- map.set (dcp::LEFT, 0, 1 / sqrt(2)); // L -> Lt
- map.set (dcp::RIGHT, 1, 1 / sqrt(2)); // R -> Rt
- map.set (dcp::CENTRE, 0, 1 / 2.0); // C -> Lt
- map.set (dcp::CENTRE, 1, 1 / 2.0); // C -> Rt
- map.set (dcp::LFE, 0, 1 / sqrt(10)); // Lfe -> Lt
- map.set (dcp::LFE, 1, 1 / sqrt(10)); // Lfe -> Rt
- map.set (dcp::LS, 0, 1 / sqrt(2)); // Ls -> Lt
- map.set (dcp::RS, 1, 1 / sqrt(2)); // Rs -> Rt
- }
+ _butler = std::make_shared<Butler>(
+ _film,
+ _player,
+ Config::instance()->audio_mapping(_audio_channels),
+ _audio_channels,
+ bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24),
+ VideoRange::FULL,
+ j2k_gl_optimised ? Image::Alignment::COMPACT : Image::Alignment::PADDED,
+ true,
+ j2k_gl_optimised
+ );
- _butler.reset (new Butler(_player, map, _audio_channels, bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24), false, true));
if (!Config::instance()->sound() && !_audio.isStreamOpen()) {
_butler->disable_audio ();
}
- _closed_captions_dialog->set_film_and_butler (_film, _butler);
-
- if (was_running) {
- start ();
- }
-}
+ _closed_captions_dialog->set_butler (_butler);
-void
-FilmViewer::refresh_panel ()
-{
- _panel->Refresh ();
- _panel->Update ();
+ resume ();
}
-void
-FilmViewer::get ()
-{
- DCPOMATIC_ASSERT (_butler);
-
- do {
- Butler::Error e;
- _player_video = _butler->get_video (&e);
- if (!_player_video.first && e == Butler::AGAIN) {
- signal_manager->when_idle (boost::bind(&FilmViewer::get, this));
- return;
- }
- } while (
- _player_video.first &&
- _film->three_d() &&
- _eyes != _player_video.first->eyes() &&
- _player_video.first->eyes() != EYES_BOTH
- );
-
- _butler->rethrow ();
-
- display_player_video ();
-}
void
-FilmViewer::display_player_video ()
-{
- if (!_player_video.first) {
- _frame.reset ();
- refresh_panel ();
- return;
- }
-
- if (_playing && (time() - _player_video.second) > one_video_frame()) {
- /* Too late; just drop this frame before we try to get its image (which will be the time-consuming
- part if this frame is J2K).
- */
- _video_position = _player_video.second;
- ++_dropped;
- return;
- }
-
- /* In an ideal world, what we would do here is:
- *
- * 1. convert to XYZ exactly as we do in the DCP creation path.
- * 2. convert back to RGB for the preview display, compensating
- * for the monitor etc. etc.
- *
- * but this is inefficient if the source is RGB. Since we don't
- * (currently) care too much about the precise accuracy of the preview's
- * colour mapping (and we care more about its speed) we try to short-
- * circuit this "ideal" situation in some cases.
- *
- * The content's specified colour conversion indicates the colourspace
- * which the content is in (according to the user).
- *
- * PlayerVideo::image (bound to PlayerVideo::force) will take the source
- * image and convert it (from whatever the user has said it is) to RGB.
- */
-
- _frame = _player_video.first->image (bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24), false, true);
-
- ImageChanged (_player_video.first);
-
- _video_position = _player_video.second;
- _inter_position = _player_video.first->inter_position ();
- _inter_size = _player_video.first->inter_size ();
-
- refresh_panel ();
-
- _closed_captions_dialog->update (time());
-}
-
-void
-FilmViewer::timer ()
+FilmViewer::set_outline_content (bool o)
{
- if (!_film || !_playing) {
- return;
- }
-
- get ();
- PositionChanged ();
- DCPTime const next = _video_position + one_video_frame();
-
- if (next >= _film->length()) {
- stop ();
- Finished ();
- return;
- }
-
- LOG_DEBUG_PLAYER("%1 -> %2; delay %3", next.seconds(), time().seconds(), max((next.seconds() - time().seconds()) * 1000, 1.0));
- _timer.Start (max ((next.seconds() - time().seconds()) * 1000, 1.0), wxTIMER_ONE_SHOT);
-
- if (_butler) {
- _butler->rethrow ();
- }
+ _outline_content = o;
+ _video_view->update ();
}
-bool
-#ifdef DCPOMATIC_VARIANT_SWAROOP
-FilmViewer::maybe_draw_background_image (wxPaintDC& dc)
-{
- optional<boost::filesystem::path> bg = Config::instance()->player_background_image();
- if (bg) {
- wxImage image (std_to_wx(bg->string()));
- wxBitmap bitmap (image);
- dc.DrawBitmap (bitmap, max(0, (_panel_size.width - image.GetSize().GetWidth()) / 2), max(0, (_panel_size.height - image.GetSize().GetHeight()) / 2));
- return true;
- }
-
- return false;
-}
-#else
-FilmViewer::maybe_draw_background_image (wxPaintDC &)
-{
- return false;
-}
-#endif
void
-FilmViewer::paint_panel ()
+FilmViewer::set_outline_subtitles (optional<dcpomatic::Rect<double>> rect)
{
- wxPaintDC dc (_panel);
-
-#ifdef DCPOMATIC_VARIANT_SWAROOP
- if (_background_image) {
- dc.Clear ();
- maybe_draw_background_image (dc);
- return;
- }
-#endif
-
- if (!_out_size.width || !_out_size.height || !_film || !_frame || _out_size != _frame->size()) {
- dc.Clear ();
- } else {
-
- wxImage frame (_out_size.width, _out_size.height, _frame->data()[0], true);
- wxBitmap frame_bitmap (frame);
- dc.DrawBitmap (frame_bitmap, 0, max(0, (_panel_size.height - _out_size.height) / 2));
-
-#ifdef DCPOMATIC_VARIANT_SWAROOP
- DCPTime const period = DCPTime::from_seconds(Config::instance()->player_watermark_period() * 60);
- int64_t n = _video_position.get() / period.get();
- DCPTime from(n * period.get());
- DCPTime to = from + DCPTime::from_seconds(Config::instance()->player_watermark_duration() / 1000.0);
- if (from <= _video_position && _video_position <= to) {
- if (!_in_watermark) {
- _in_watermark = true;
- _watermark_x = rand() % _panel_size.width;
- _watermark_y = rand() % _panel_size.height;
- }
- dc.SetTextForeground(*wxWHITE);
- string wm = Config::instance()->player_watermark_theatre();
- boost::posix_time::ptime t = boost::posix_time::second_clock::local_time();
- wm += "\n" + boost::posix_time::to_iso_extended_string(t);
- dc.DrawText(std_to_wx(wm), _watermark_x, _watermark_y);
- } else {
- _in_watermark = false;
- }
-#endif
- }
-
- if (_out_size.width < _panel_size.width) {
- /* XXX: these colours are right for GNOME; may need adjusting for other OS */
- wxPen p (_pad_black ? wxColour(0, 0, 0) : wxColour(240, 240, 240));
- wxBrush b (_pad_black ? wxColour(0, 0, 0) : wxColour(240, 240, 240));
- dc.SetPen (p);
- dc.SetBrush (b);
- dc.DrawRectangle (_out_size.width, 0, _panel_size.width - _out_size.width, _panel_size.height);
- }
-
- if (_out_size.height < _panel_size.height) {
- wxPen p (_pad_black ? wxColour(0, 0, 0) : wxColour(240, 240, 240));
- wxBrush b (_pad_black ? wxColour(0, 0, 0) : wxColour(240, 240, 240));
- dc.SetPen (p);
- dc.SetBrush (b);
- int const gap = (_panel_size.height - _out_size.height) / 2;
- dc.DrawRectangle (0, 0, _panel_size.width, gap);
- dc.DrawRectangle (0, gap + _out_size.height + 1, _panel_size.width, gap + 1);
- }
-
- if (_outline_content) {
- wxPen p (wxColour (255, 0, 0), 2);
- dc.SetPen (p);
- dc.SetBrush (*wxTRANSPARENT_BRUSH);
- dc.DrawRectangle (_inter_position.x, _inter_position.y + (_panel_size.height - _out_size.height) / 2, _inter_size.width, _inter_size.height);
- }
+ _outline_subtitles = rect;
+ _video_view->update ();
}
-void
-FilmViewer::set_outline_content (bool o)
-{
- _outline_content = o;
- refresh_panel ();
-}
void
FilmViewer::set_eyes (Eyes e)
{
- _eyes = e;
+ _video_view->set_eyes (e);
slow_refresh ();
}
+
void
-FilmViewer::panel_sized (wxSizeEvent& ev)
+FilmViewer::video_view_sized ()
{
- _panel_size.width = ev.GetSize().GetWidth();
- _panel_size.height = ev.GetSize().GetHeight();
-
calculate_sizes ();
if (!quick_refresh()) {
slow_refresh ();
}
- PositionChanged ();
}
+
void
FilmViewer::calculate_sizes ()
{
return;
}
- Ratio const * container = _film->container ();
+ auto const container = _film->container ();
- float const panel_ratio = _panel_size.ratio ();
- float const film_ratio = container ? container->ratio () : 1.78;
+ auto const view_ratio = float(_video_view->get()->GetSize().x) / _video_view->get()->GetSize().y;
+ auto const film_ratio = container ? container->ratio () : 1.78;
- if (panel_ratio < film_ratio) {
+ dcp::Size out_size;
+ if (view_ratio < film_ratio) {
/* panel is less widscreen than the film; clamp width */
- _out_size.width = _panel_size.width;
- _out_size.height = lrintf (_out_size.width / film_ratio);
+ out_size.width = _video_view->get()->GetSize().x;
+ out_size.height = lrintf (out_size.width / film_ratio);
} else {
/* panel is more widescreen than the film; clamp height */
- _out_size.height = _panel_size.height;
- _out_size.width = lrintf (_out_size.height * film_ratio);
+ out_size.height = _video_view->get()->GetSize().y;
+ out_size.width = lrintf (out_size.height * film_ratio);
}
/* Catch silly values */
- _out_size.width = max (64, _out_size.width);
- _out_size.height = max (64, _out_size.height);
+ out_size.width = max (64, out_size.width);
+ out_size.height = max (64, out_size.height);
+
+ _player->set_video_container_size (out_size);
+}
+
+
+void
+FilmViewer::suspend ()
+{
+ ++_suspended;
+ if (_audio.isStreamRunning()) {
+ _audio.abortStream();
+ }
+}
- _player->set_video_container_size (_out_size);
+
+void
+FilmViewer::resume ()
+{
+ DCPOMATIC_ASSERT (_suspended > 0);
+ --_suspended;
+ if (_playing && !_suspended) {
+ if (_audio.isStreamOpen()) {
+ _audio.setStreamTime (_video_view->position().seconds());
+ _audio.startStream ();
+ }
+ _video_view->start ();
+ }
}
+
void
FilmViewer::start ()
{
return;
}
- optional<bool> v = PlaybackPermitted ();
+ auto v = PlaybackPermitted ();
if (v && !*v) {
/* Computer says no */
return;
}
+ /* We are about to set up the audio stream from the position of the video view.
+ If there is `lazy' seek in progress we need to wait for it to go through so that
+ _video_view->position() gives us a sensible answer.
+ */
+ while (_idle_get) {
+ idle_handler ();
+ }
+
+ /* Take the video view's idea of position as our `playhead' and start the
+ audio stream (which is the timing reference) there.
+ */
if (_audio.isStreamOpen()) {
- _audio.setStreamTime (_video_position.seconds());
- _audio.startStream ();
+ _audio.setStreamTime (_video_view->position().seconds());
+ try {
+ _audio.startStream ();
+ } catch (RtAudioError& e) {
+ _audio_channels = 0;
+ error_dialog (
+ _video_view->get(),
+ _("There was a problem starting audio playback. Please try another audio output device in Preferences."), std_to_wx(e.what())
+ );
+ }
}
_playing = true;
- _dropped = 0;
- timer ();
+ /* Calling start() below may directly result in Stopped being emitted, and if that
+ * happens we want it to come after the Started signal, so do that first.
+ */
Started (position());
+ _video_view->start ();
}
+
bool
FilmViewer::stop ()
{
}
_playing = false;
+ _video_view->stop ();
Stopped (position());
+
+ _video_view->rethrow ();
return true;
}
+
void
FilmViewer::player_change (ChangeType type, int property, bool frequent)
{
- if (type != CHANGE_TYPE_DONE || frequent) {
+ if (type != ChangeType::DONE || frequent) {
return;
}
return;
}
+ player_change ({property});
+}
+
+
+void
+FilmViewer::player_change (vector<int> properties)
+{
calculate_sizes ();
- bool refreshed = false;
- if (
- property == VideoContentProperty::CROP ||
- property == VideoContentProperty::SCALE ||
- property == VideoContentProperty::FADE_IN ||
- property == VideoContentProperty::FADE_OUT ||
- property == VideoContentProperty::COLOUR_CONVERSION ||
- property == PlayerProperty::VIDEO_CONTAINER_SIZE ||
- property == PlayerProperty::FILM_CONTAINER
- ) {
- refreshed = quick_refresh ();
- }
-
- if (!refreshed) {
+
+ bool try_quick_refresh = false;
+ bool update_ccap_tracks = false;
+
+ for (auto i: properties) {
+ if (
+ i == VideoContentProperty::CROP ||
+ i == VideoContentProperty::CUSTOM_RATIO ||
+ i == VideoContentProperty::CUSTOM_SIZE ||
+ i == VideoContentProperty::FADE_IN ||
+ i == VideoContentProperty::FADE_OUT ||
+ i == VideoContentProperty::COLOUR_CONVERSION ||
+ i == PlayerProperty::VIDEO_CONTAINER_SIZE ||
+ i == PlayerProperty::FILM_CONTAINER
+ ) {
+ try_quick_refresh = true;
+ }
+
+ if (i == TextContentProperty::USE || i == TextContentProperty::TYPE || i == TextContentProperty::DCP_TRACK) {
+ update_ccap_tracks = true;
+ }
+ }
+
+ if (!try_quick_refresh || !quick_refresh()) {
slow_refresh ();
}
- PositionChanged ();
+
+ if (update_ccap_tracks) {
+ _closed_captions_dialog->update_tracks (_film);
+ }
}
+
void
FilmViewer::film_change (ChangeType type, Film::Property p)
{
- if (type == CHANGE_TYPE_DONE && p == Film::AUDIO_CHANNELS) {
+ if (type != ChangeType::DONE) {
+ return;
+ }
+
+ if (p == Film::Property::AUDIO_CHANNELS) {
recreate_butler ();
+ } else if (p == Film::Property::VIDEO_FRAME_RATE) {
+ _video_view->set_video_frame_rate (_film->video_frame_rate());
+ } else if (p == Film::Property::THREE_D) {
+ _video_view->set_three_d (_film->three_d());
+ } else if (p == Film::Property::CONTENT) {
+ _closed_captions_dialog->update_tracks (_film);
}
}
+
+void
+FilmViewer::film_length_change ()
+{
+ _video_view->set_length (_film->length());
+}
+
+
/** Re-get the current frame slowly by seeking */
void
FilmViewer::slow_refresh ()
{
- seek (_video_position, true);
+ seek (_video_view->position(), true);
}
+
/** Try to re-get the current frame quickly by resetting the metadata
* in the PlayerVideo that we used last time.
* @return true if this was possible, false if not.
bool
FilmViewer::quick_refresh ()
{
- if (!_player_video.first) {
- return false;
- }
-
- if (!_player_video.first->reset_metadata (_film, _player->video_container_size(), _film->frame_size())) {
- return false;
+ if (!_video_view || !_film || !_player) {
+ return true;
}
-
- display_player_video ();
- return true;
+ return _video_view->reset_metadata (_film, _player->video_container_size());
}
+
void
FilmViewer::seek (shared_ptr<Content> content, ContentTime t, bool accurate)
{
- optional<DCPTime> dt = _player->content_time_to_dcp (content, t);
+ auto dt = _player->content_time_to_dcp (content, t);
if (dt) {
seek (*dt, accurate);
}
}
+
void
FilmViewer::set_coalesce_player_changes (bool c)
{
_coalesce_player_changes = c;
if (!c) {
- BOOST_FOREACH (int i, _pending_player_changes) {
- player_change (CHANGE_TYPE_DONE, i, false);
- }
+ player_change (_pending_player_changes);
_pending_player_changes.clear ();
}
}
+
void
FilmViewer::seek (DCPTime t, bool accurate)
{
return;
}
- if (t < DCPTime ()) {
+ if (t < DCPTime()) {
t = DCPTime ();
}
- if (t >= _film->length ()) {
- t = _film->length ();
+ if (t >= _film->length()) {
+ t = _film->length() - one_video_frame();
}
- bool const was_running = stop ();
+ suspend ();
_closed_captions_dialog->clear ();
_butler->seek (t, accurate);
- get ();
- if (was_running) {
- start ();
+ if (!_playing) {
+ /* We're not playing, so let the GUI thread get on and
+ come back later to get the next frame after the seek.
+ */
+ request_idle_display_next_frame ();
+ } else {
+ /* We're going to start playing again straight away
+ so wait for the seek to finish.
+ */
+ while (_video_view->display_next_frame(false) == VideoView::AGAIN) {}
}
- PositionChanged ();
+ resume ();
}
+
void
FilmViewer::config_changed (Config::Property p)
{
-#ifdef DCPOMATIC_VARIANT_SWAROOP
- if (p == Config::PLAYER_BACKGROUND_IMAGE) {
- refresh_panel ();
+ if (p == Config::AUDIO_MAPPING) {
+ recreate_butler ();
return;
}
-#endif
if (p != Config::SOUND && p != Config::SOUND_OUTPUT) {
return;
unsigned int st = 0;
if (Config::instance()->sound_output()) {
while (st < _audio.getDeviceCount()) {
- if (_audio.getDeviceInfo(st).name == Config::instance()->sound_output().get()) {
- break;
+ try {
+ if (_audio.getDeviceInfo(st).name == Config::instance()->sound_output().get()) {
+ break;
+ }
+ } catch (RtAudioError&) {
+ /* Something went wrong with that device so we don't want to use it anyway */
}
++st;
}
st = _audio.getDefaultOutputDevice();
}
- _audio_channels = _audio.getDeviceInfo(st).outputChannels;
-
- RtAudio::StreamParameters sp;
- sp.deviceId = st;
- sp.nChannels = _audio_channels;
- sp.firstChannel = 0;
try {
+ _audio_channels = _audio.getDeviceInfo(st).outputChannels;
+ RtAudio::StreamParameters sp;
+ sp.deviceId = st;
+ sp.nChannels = _audio_channels;
+ sp.firstChannel = 0;
_audio.openStream (&sp, 0, RTAUDIO_FLOAT32, 48000, &_audio_block_size, &rtaudio_callback, this);
-#ifdef DCPOMATIC_USE_RTERROR
- } catch (RtError& e) {
-#else
} catch (RtAudioError& e) {
-#endif
+ _audio_channels = 0;
error_dialog (
- _panel,
+ _video_view->get(),
_("Could not set up audio output. There will be no audio during the preview."), std_to_wx(e.what())
);
}
}
}
+
DCPTime
FilmViewer::uncorrected_time () const
{
- if (_audio.isStreamRunning ()) {
+ if (_audio.isStreamRunning()) {
return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime());
}
- return _video_position;
+ return _video_view->position();
}
-DCPTime
-FilmViewer::time () const
+
+optional<DCPTime>
+FilmViewer::audio_time () const
{
- if (_audio.isStreamRunning ()) {
- return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime ()) -
- DCPTime::from_frames (average_latency(), _film->audio_frame_rate());
+ if (!_audio.isStreamRunning()) {
+ return {};
}
- return _video_position;
+ return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime ()) -
+ DCPTime::from_frames (average_latency(), _film->audio_frame_rate());
+}
+
+
+DCPTime
+FilmViewer::time () const
+{
+ return audio_time().get_value_or(_video_view->position());
}
+
int
FilmViewer::audio_callback (void* out_p, unsigned int frames)
{
while (true) {
- optional<DCPTime> t = _butler->get_audio (reinterpret_cast<float*> (out_p), frames);
+ auto t = _butler->get_audio (Butler::Behaviour::NON_BLOCKING, reinterpret_cast<float*> (out_p), frames);
if (!t || DCPTime(uncorrected_time() - *t) < one_video_frame()) {
/* There was an underrun or this audio is on time; carry on */
break;
return 0;
}
+
Frame
FilmViewer::average_latency () const
{
}
Frame total = 0;
- BOOST_FOREACH (Frame i, _latency_history) {
+ for (auto i: _latency_history) {
total += i;
}
return total / _latency_history.size();
}
+
void
FilmViewer::set_dcp_decode_reduction (optional<int> reduction)
{
}
}
+
optional<int>
FilmViewer::dcp_decode_reduction () const
{
return _dcp_decode_reduction;
}
+
DCPTime
FilmViewer::one_video_frame () const
{
- return DCPTime::from_frames (1, _film->video_frame_rate());
+ return DCPTime::from_frames (1, _film ? _film->video_frame_rate() : 24);
}
+
/** Open a dialog box showing our film's closed captions */
void
FilmViewer::show_closed_captions ()
_closed_captions_dialog->Show();
}
+
void
FilmViewer::seek_by (DCPTime by, bool accurate)
{
- seek (_video_position + by, accurate);
+ seek (_video_view->position() + by, accurate);
}
+
void
FilmViewer::set_pad_black (bool p)
{
_pad_black = p;
}
+
+
+/** Called when a player has finished the current film.
+ * May be called from a non-UI thread.
+ */
+void
+FilmViewer::finished ()
+{
+ emit (boost::bind(&FilmViewer::ui_finished, this));
+}
+
+
+/** Called by finished() in the UI thread */
+void
+FilmViewer::ui_finished ()
+{
+ stop ();
+ Finished ();
+}
+
+
+int
+FilmViewer::dropped () const
+{
+ return _video_view->dropped ();
+}
+
+
+int
+FilmViewer::errored () const
+{
+ return _video_view->errored ();
+}
+
+
+int
+FilmViewer::gets () const
+{
+ return _video_view->gets ();
+}
+
+
+void
+FilmViewer::image_changed (shared_ptr<PlayerVideo> pv)
+{
+ emit (boost::bind(boost::ref(ImageChanged), pv));
+}
+
+
+void
+FilmViewer::set_optimise_for_j2k (bool o)
+{
+ _optimise_for_j2k = o;
+ _video_view->set_optimise_for_j2k (o);
+}
+