}
_video_view->Sized.connect (boost::bind(&FilmViewer::video_view_sized, this));
- _timer.Bind (wxEVT_TIMER, boost::bind(&FilmViewer::timer, this));
set_film (shared_ptr<Film> ());
/** Ask for ::get() to be called next time we are idle */
void
-FilmViewer::request_idle_get ()
+FilmViewer::request_idle_display_next_frame ()
{
if (_idle_get) {
return;
return;
}
- if (get(true)) {
+ if (_video_view->display_next_frame(true)) {
_idle_get = false;
} else {
/* get() could not complete quickly so we'll try again later */
}
_film = film;
- _video_position = DCPTime ();
- _player_video.first.reset ();
- _player_video.second = DCPTime ();
- _video_view->set_image (shared_ptr<Image>());
+ _video_view->clear ();
_closed_captions_dialog->clear ();
if (!_film) {
_player->set_play_referenced ();
_film->Change.connect (boost::bind (&FilmViewer::film_change, this, _1, _2));
+ _film->LengthChange.connect (boost::bind(&FilmViewer::film_length_change, this));
_player->Change.connect (boost::bind (&FilmViewer::player_change, this, _1, _2, _3));
/* Keep about 1 second's worth of history samples */
return;
}
- AudioMapping map = AudioMapping (_film->audio_channels(), _audio_channels);
-
- if (_audio_channels != 2 || _film->audio_channels() < 3) {
- for (int i = 0; i < min (_film->audio_channels(), _audio_channels); ++i) {
- map.set (i, i, 1);
- }
- } else {
- /* Special case: stereo output, at least 3 channel input.
- Map so that Lt = L(-3dB) + Ls(-3dB) + C(-6dB) + Lfe(-10dB)
- Rt = R(-3dB) + Rs(-3dB) + C(-6dB) + Lfe(-10dB)
- */
- if (_film->audio_channels() > 0) {
- map.set (dcp::LEFT, 0, 1 / sqrt(2)); // L -> Lt
- }
- if (_film->audio_channels() > 1) {
- map.set (dcp::RIGHT, 1, 1 / sqrt(2)); // R -> Rt
- }
- if (_film->audio_channels() > 2) {
- map.set (dcp::CENTRE, 0, 1 / 2.0); // C -> Lt
- map.set (dcp::CENTRE, 1, 1 / 2.0); // C -> Rt
- }
- if (_film->audio_channels() > 3) {
- map.set (dcp::LFE, 0, 1 / sqrt(10)); // Lfe -> Lt
- map.set (dcp::LFE, 1, 1 / sqrt(10)); // Lfe -> Rt
- }
- if (_film->audio_channels() > 4) {
- map.set (dcp::LS, 0, 1 / sqrt(2)); // Ls -> Lt
- }
- if (_film->audio_channels() > 5) {
- map.set (dcp::RS, 1, 1 / sqrt(2)); // Rs -> Rt
- }
- }
+ _butler.reset(
+ new Butler(
+ _player,
+ Config::instance()->audio_mapping(_audio_channels),
+ _audio_channels,
+ bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24),
+ false,
+ true
+ )
+ );
- _butler.reset (new Butler(_player, map, _audio_channels, bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24), false, true));
if (!Config::instance()->sound() && !_audio.isStreamOpen()) {
_butler->disable_audio ();
}
_state_timer.unset ();
}
-/** Try to get a frame from the butler and display it.
- * @param lazy true to return false quickly if no video is available quickly (i.e. we are waiting for the butler).
- * false to ask the butler to block until it has video (unless it is suspended).
- * @return true on success, false if we did nothing because it would have taken too long.
- */
-bool
-FilmViewer::get (bool lazy)
-{
- DCPOMATIC_ASSERT (_butler);
- ++_gets;
-
- do {
- Butler::Error e;
- _player_video = _butler->get_video (!lazy, &e);
- if (!_player_video.first && e == Butler::AGAIN) {
- if (lazy) {
- /* No video available; return saying we failed */
- return false;
- } else {
- /* Player was suspended; come back later */
- signal_manager->when_idle (boost::bind(&FilmViewer::get, this, false));
- return false;
- }
- }
- } while (
- _player_video.first &&
- _film->three_d() &&
- _eyes != _player_video.first->eyes() &&
- _player_video.first->eyes() != EYES_BOTH
- );
-
- try {
- _butler->rethrow ();
- } catch (DecodeError& e) {
- error_dialog (_video_view->get(), e.what());
- }
-
- display_player_video ();
- PositionChanged ();
-
- return true;
-}
-
-void
-FilmViewer::display_player_video ()
-{
- if (!_player_video.first) {
- _video_view->set_image (shared_ptr<Image>());
- refresh_view ();
- return;
- }
-
- if (_playing && !_suspended && (time() - _player_video.second) > one_video_frame()) {
- /* Too late; just drop this frame before we try to get its image (which will be the time-consuming
- part if this frame is J2K).
- */
- _video_position = _player_video.second;
- ++_dropped;
- return;
- }
-
- /* In an ideal world, what we would do here is:
- *
- * 1. convert to XYZ exactly as we do in the DCP creation path.
- * 2. convert back to RGB for the preview display, compensating
- * for the monitor etc. etc.
- *
- * but this is inefficient if the source is RGB. Since we don't
- * (currently) care too much about the precise accuracy of the preview's
- * colour mapping (and we care more about its speed) we try to short-
- * circuit this "ideal" situation in some cases.
- *
- * The content's specified colour conversion indicates the colourspace
- * which the content is in (according to the user).
- *
- * PlayerVideo::image (bound to PlayerVideo::force) will take the source
- * image and convert it (from whatever the user has said it is) to RGB.
- */
-
- _state_timer.set ("get image");
-
- _video_view->set_image (
- _player_video.first->image(bind(&PlayerVideo::force, _1, AV_PIX_FMT_RGB24), false, true)
- );
-
- _state_timer.set ("ImageChanged");
- ImageChanged (_player_video.first);
- _state_timer.unset ();
-
- _video_position = _player_video.second;
- _inter_position = _player_video.first->inter_position ();
- _inter_size = _player_video.first->inter_size ();
-
- refresh_view ();
-
- _closed_captions_dialog->update (time());
-}
-
-void
-FilmViewer::timer ()
-{
- if (!_film || !_playing || _suspended) {
- return;
- }
-
- get (false);
- DCPTime const next = _video_position + one_video_frame();
-
- if (next >= _film->length()) {
- stop ();
- Finished ();
- return;
- }
-
- LOG_DEBUG_PLAYER("%1 -> %2; delay %3", next.seconds(), time().seconds(), max((next.seconds() - time().seconds()) * 1000, 1.0));
- _timer.Start (max ((next.seconds() - time().seconds()) * 1000, 1.0), wxTIMER_ONE_SHOT);
-
- if (_butler) {
- _butler->rethrow ();
- }
-}
-
void
FilmViewer::set_outline_content (bool o)
{
if (!quick_refresh()) {
slow_refresh ();
}
- PositionChanged ();
}
void
--_suspended;
if (_playing && !_suspended) {
if (_audio.isStreamOpen()) {
- _audio.setStreamTime (_video_position.seconds());
+ _audio.setStreamTime (_video_view->position().seconds());
_audio.startStream ();
}
- timer ();
+ _video_view->start ();
}
}
}
if (_audio.isStreamOpen()) {
- _audio.setStreamTime (_video_position.seconds());
+ _audio.setStreamTime (_video_view->position().seconds());
_audio.startStream ();
}
- _playing = true;
_dropped = 0;
- timer ();
+ _playing = true;
+ _video_view->start ();
Started (position());
}
}
_playing = false;
+ _video_view->stop ();
Stopped (position());
return true;
}
if (!refreshed) {
slow_refresh ();
}
- PositionChanged ();
}
void
FilmViewer::film_change (ChangeType type, Film::Property p)
{
- if (type == CHANGE_TYPE_DONE && p == Film::AUDIO_CHANNELS) {
+ if (type != CHANGE_TYPE_DONE) {
+ return;
+ }
+
+ if (p == Film::AUDIO_CHANNELS) {
recreate_butler ();
+ } else if (p == Film::VIDEO_FRAME_RATE) {
+ _video_view->set_video_frame_rate (_film->video_frame_rate());
}
}
+void
+FilmViewer::film_length_change ()
+{
+ _video_view->set_length (_film->length());
+}
+
/** Re-get the current frame slowly by seeking */
void
FilmViewer::slow_refresh ()
{
- seek (_video_position, true);
+ seek (_video_view->position(), true);
}
/** Try to re-get the current frame quickly by resetting the metadata
bool
FilmViewer::quick_refresh ()
{
- if (!_player_video.first) {
+ if (!_video_view->_player_video.first) {
return false;
}
- if (!_player_video.first->reset_metadata (_film, _player->video_container_size(), _film->frame_size())) {
+ if (!_video_view->_player_video.first->reset_metadata (_film, _player->video_container_size(), _film->frame_size())) {
return false;
}
- display_player_video ();
+ _video_view->display_player_video ();
return true;
}
_butler->seek (t, accurate);
if (!_playing) {
- request_idle_get ();
+ request_idle_display_next_frame ();
} else {
- /* Make sure we get a frame so that _video_position is set up before we resume */
- while (!get(true)) {}
+ while (!_video_view->display_next_frame(false)) {}
}
resume ();
}
#endif
+ if (p == Config::AUDIO_MAPPING) {
+ recreate_butler ();
+ return;
+ }
+
if (p != Config::SOUND && p != Config::SOUND_OUTPUT) {
return;
}
return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime());
}
- return _video_position;
+ return _video_view->position();
+}
+
+optional<DCPTime>
+FilmViewer::audio_time () const
+{
+ if (!_audio.isStreamRunning()) {
+ return optional<DCPTime>();
+ }
+
+ return DCPTime::from_seconds (const_cast<RtAudio*>(&_audio)->getStreamTime ()) -
+ DCPTime::from_frames (average_latency(), _film->audio_frame_rate());
}
DCPTime
DCPTime::from_frames (average_latency(), _film->audio_frame_rate());
}
- return _video_position;
+ return _video_view->position();
}
int
void
FilmViewer::seek_by (DCPTime by, bool accurate)
{
- seek (_video_position + by, accurate);
+ seek (_video_view->position() + by, accurate);
}
void
{
_pad_black = p;
}
+
+/* May be called from a non-UI thread */
+void
+FilmViewer::emit_finished ()
+{
+ emit (boost::bind(boost::ref(Finished)));
+}
+