X-Git-Url: https://main.carlh.net/gitweb/?p=dcpomatic.git;a=blobdiff_plain;f=src%2Flib%2Fplayer.cc;h=5de089ba91aa9ad297bbe299a2b2ad9b076ec76e;hp=465fcad6d7fc342eb66f78dc318b89a550d062fc;hb=da44da6f31f97d39ca91c35955e573e76371f2c2;hpb=ce2de2f4229d75c0a473d13413c7c553b0980055 diff --git a/src/lib/player.cc b/src/lib/player.cc index 465fcad6d..5de089ba9 100644 --- a/src/lib/player.cc +++ b/src/lib/player.cc @@ -134,12 +134,6 @@ Player::construct () } -Player::~Player () -{ - delete _shuffler; -} - - void Player::setup_pieces () { @@ -170,8 +164,7 @@ Player::setup_pieces_unlocked () auto old_pieces = _pieces; _pieces.clear (); - delete _shuffler; - _shuffler = new Shuffler(); + _shuffler.reset (new Shuffler()); _shuffler->Video.connect(bind(&Player::video, this, _1, _2)); for (auto i: playlist()->content()) { @@ -226,7 +219,7 @@ Player::setup_pieces_unlocked () if (decoder->video) { if (i->video->frame_type() == VideoFrameType::THREE_D_LEFT || i->video->frame_type() == VideoFrameType::THREE_D_RIGHT) { /* We need a Shuffler to cope with 3D L/R video data arriving out of sequence */ - decoder->video->Data.connect (bind(&Shuffler::video, _shuffler, weak_ptr(piece), _1)); + decoder->video->Data.connect (bind(&Shuffler::video, _shuffler.get(), weak_ptr(piece), _1)); } else { decoder->video->Data.connect (bind(&Player::video, this, weak_ptr(piece), _1)); } @@ -266,12 +259,28 @@ Player::setup_pieces_unlocked () } } + for (auto i = _pieces.begin(); i != _pieces.end(); ++i) { + if (auto video = (*i)->content->video) { + if (video->use() && video->frame_type() != VideoFrameType::THREE_D_LEFT && video->frame_type() != VideoFrameType::THREE_D_RIGHT) { + /* Look for content later in the content list with in-use video that overlaps this */ + auto period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film)); + auto j = i; + ++j; + for (; j != _pieces.end(); ++j) { + if ((*j)->content->video && (*j)->content->video->use()) { + (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period); + } + } + } + } + } + _black = Empty (_film, playlist(), bind(&have_video, _1), _playback_length); _silent = Empty (_film, playlist(), bind(&have_audio, _1), _playback_length); - _last_video_time = {}; + _last_video_time = boost::optional(); _last_video_eyes = Eyes::BOTH; - _last_audio_time = {}; + _last_audio_time = boost::optional(); } @@ -825,6 +834,10 @@ Player::open_subtitles_for_frame (DCPTime time) const void Player::video (weak_ptr wp, ContentVideo video) { + if (_suspended) { + return; + } + auto piece = wp.lock (); if (!piece) { return; @@ -851,6 +864,10 @@ Player::video (weak_ptr wp, ContentVideo video) return; } + if (piece->ignore_video && piece->ignore_video->contains(time)) { + return; + } + /* Fill gaps that we discover now that we have some video which needs to be emitted. This is where we need to fill to. */ @@ -931,6 +948,10 @@ Player::video (weak_ptr wp, ContentVideo video) void Player::audio (weak_ptr wp, AudioStreamPtr stream, ContentAudio content_audio) { + if (_suspended) { + return; + } + DCPOMATIC_ASSERT (content_audio.audio->frames() > 0); auto piece = wp.lock (); @@ -1001,6 +1022,10 @@ Player::audio (weak_ptr wp, AudioStreamPtr stream, ContentAudio content_a void Player::bitmap_text_start (weak_ptr wp, weak_ptr wc, ContentBitmapText subtitle) { + if (_suspended) { + return; + } + auto piece = wp.lock (); auto text = wc.lock (); if (!piece || !text) { @@ -1040,6 +1065,10 @@ Player::bitmap_text_start (weak_ptr wp, weak_ptr wc, C void Player::plain_text_start (weak_ptr wp, weak_ptr wc, ContentStringText subtitle) { + if (_suspended) { + return; + } + auto piece = wp.lock (); auto text = wc.lock (); if (!piece || !text) { @@ -1085,6 +1114,10 @@ Player::plain_text_start (weak_ptr wp, weak_ptr wc, Co void Player::subtitle_stop (weak_ptr wp, weak_ptr wc, ContentTime to) { + if (_suspended) { + return; + } + auto text = wc.lock (); if (!text) { return; @@ -1179,6 +1212,16 @@ Player::seek (DCPTime time, bool accurate) void Player::emit_video (shared_ptr pv, DCPTime time) { + if (!_film->three_d()) { + if (pv->eyes() == Eyes::LEFT) { + /* Use left-eye images for both eyes... */ + pv->set_eyes (Eyes::BOTH); + } else if (pv->eyes() == Eyes::RIGHT) { + /* ...and discard the right */ + return; + } + } + /* We need a delay to give a little wiggle room to ensure that relevent subtitles arrive at the player before the video that requires them. */ @@ -1324,6 +1367,10 @@ Player::playlist () const void Player::atmos (weak_ptr, ContentAtmos data) { + if (_suspended) { + return; + } + Atmos (data.data, DCPTime::from_frames(data.frame, _film->video_frame_rate()), data.metadata); }