}
-Player::~Player ()
-{
- delete _shuffler;
-}
-
-
void
Player::setup_pieces ()
{
auto old_pieces = _pieces;
_pieces.clear ();
- delete _shuffler;
- _shuffler = new Shuffler();
+ _shuffler.reset (new Shuffler());
_shuffler->Video.connect(bind(&Player::video, this, _1, _2));
for (auto i: playlist()->content()) {
if (decoder->video) {
if (i->video->frame_type() == VideoFrameType::THREE_D_LEFT || i->video->frame_type() == VideoFrameType::THREE_D_RIGHT) {
/* We need a Shuffler to cope with 3D L/R video data arriving out of sequence */
- decoder->video->Data.connect (bind(&Shuffler::video, _shuffler, weak_ptr<Piece>(piece), _1));
+ decoder->video->Data.connect (bind(&Shuffler::video, _shuffler.get(), weak_ptr<Piece>(piece), _1));
} else {
decoder->video->Data.connect (bind(&Player::video, this, weak_ptr<Piece>(piece), _1));
}
}
}
+ for (auto i = _pieces.begin(); i != _pieces.end(); ++i) {
+ if (auto video = (*i)->content->video) {
+ if (video->use() && video->frame_type() != VideoFrameType::THREE_D_LEFT && video->frame_type() != VideoFrameType::THREE_D_RIGHT) {
+ /* Look for content later in the content list with in-use video that overlaps this */
+ auto period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film));
+ auto j = i;
+ ++j;
+ for (; j != _pieces.end(); ++j) {
+ if ((*j)->content->video && (*j)->content->video->use()) {
+ (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period);
+ }
+ }
+ }
+ }
+ }
+
_black = Empty (_film, playlist(), bind(&have_video, _1), _playback_length);
_silent = Empty (_film, playlist(), bind(&have_audio, _1), _playback_length);
- _last_video_time = {};
+ _last_video_time = boost::optional<dcpomatic::DCPTime>();
_last_video_eyes = Eyes::BOTH;
- _last_audio_time = {};
+ _last_audio_time = boost::optional<dcpomatic::DCPTime>();
}
void
Player::video (weak_ptr<Piece> wp, ContentVideo video)
{
+ if (_suspended) {
+ return;
+ }
+
auto piece = wp.lock ();
if (!piece) {
return;
return;
}
+ if (piece->ignore_video && piece->ignore_video->contains(time)) {
+ return;
+ }
+
/* Fill gaps that we discover now that we have some video which needs to be emitted.
This is where we need to fill to.
*/
void
Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
{
+ if (_suspended) {
+ return;
+ }
+
DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
auto piece = wp.lock ();
void
Player::bitmap_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentBitmapText subtitle)
{
+ if (_suspended) {
+ return;
+ }
+
auto piece = wp.lock ();
auto text = wc.lock ();
if (!piece || !text) {
void
Player::plain_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentStringText subtitle)
{
+ if (_suspended) {
+ return;
+ }
+
auto piece = wp.lock ();
auto text = wc.lock ();
if (!piece || !text) {
void
Player::subtitle_stop (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentTime to)
{
+ if (_suspended) {
+ return;
+ }
+
auto text = wc.lock ();
if (!text) {
return;
void
Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
{
+ if (!_film->three_d()) {
+ if (pv->eyes() == Eyes::LEFT) {
+ /* Use left-eye images for both eyes... */
+ pv->set_eyes (Eyes::BOTH);
+ } else if (pv->eyes() == Eyes::RIGHT) {
+ /* ...and discard the right */
+ return;
+ }
+ }
+
/* We need a delay to give a little wiggle room to ensure that relevent subtitles arrive at the
player before the video that requires them.
*/
void
Player::atmos (weak_ptr<Piece>, ContentAtmos data)
{
+ if (_suspended) {
+ return;
+ }
+
Atmos (data.data, DCPTime::from_frames(data.frame, _film->video_frame_rate()), data.metadata);
}