Use dcp::file_to_string().
[dcpomatic.git] / src / lib / player.cc
index 465fcad6d7fc342eb66f78dc318b89a550d062fc..5de089ba91aa9ad297bbe299a2b2ad9b076ec76e 100644 (file)
@@ -134,12 +134,6 @@ Player::construct ()
 }
 
 
-Player::~Player ()
-{
-       delete _shuffler;
-}
-
-
 void
 Player::setup_pieces ()
 {
@@ -170,8 +164,7 @@ Player::setup_pieces_unlocked ()
        auto old_pieces = _pieces;
        _pieces.clear ();
 
-       delete _shuffler;
-       _shuffler = new Shuffler();
+       _shuffler.reset (new Shuffler());
        _shuffler->Video.connect(bind(&Player::video, this, _1, _2));
 
        for (auto i: playlist()->content()) {
@@ -226,7 +219,7 @@ Player::setup_pieces_unlocked ()
                if (decoder->video) {
                        if (i->video->frame_type() == VideoFrameType::THREE_D_LEFT || i->video->frame_type() == VideoFrameType::THREE_D_RIGHT) {
                                /* We need a Shuffler to cope with 3D L/R video data arriving out of sequence */
-                               decoder->video->Data.connect (bind(&Shuffler::video, _shuffler, weak_ptr<Piece>(piece), _1));
+                               decoder->video->Data.connect (bind(&Shuffler::video, _shuffler.get(), weak_ptr<Piece>(piece), _1));
                        } else {
                                decoder->video->Data.connect (bind(&Player::video, this, weak_ptr<Piece>(piece), _1));
                        }
@@ -266,12 +259,28 @@ Player::setup_pieces_unlocked ()
                }
        }
 
+       for (auto i = _pieces.begin(); i != _pieces.end(); ++i) {
+               if (auto video = (*i)->content->video) {
+                       if (video->use() && video->frame_type() != VideoFrameType::THREE_D_LEFT && video->frame_type() != VideoFrameType::THREE_D_RIGHT) {
+                               /* Look for content later in the content list with in-use video that overlaps this */
+                               auto period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(_film));
+                               auto j = i;
+                               ++j;
+                               for (; j != _pieces.end(); ++j) {
+                                       if ((*j)->content->video && (*j)->content->video->use()) {
+                                               (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(_film)).overlap(period);
+                                       }
+                               }
+                       }
+               }
+       }
+
        _black = Empty (_film, playlist(), bind(&have_video, _1), _playback_length);
        _silent = Empty (_film, playlist(), bind(&have_audio, _1), _playback_length);
 
-       _last_video_time = {};
+       _last_video_time = boost::optional<dcpomatic::DCPTime>();
        _last_video_eyes = Eyes::BOTH;
-       _last_audio_time = {};
+       _last_audio_time = boost::optional<dcpomatic::DCPTime>();
 }
 
 
@@ -825,6 +834,10 @@ Player::open_subtitles_for_frame (DCPTime time) const
 void
 Player::video (weak_ptr<Piece> wp, ContentVideo video)
 {
+       if (_suspended) {
+               return;
+       }
+
        auto piece = wp.lock ();
        if (!piece) {
                return;
@@ -851,6 +864,10 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                return;
        }
 
+       if (piece->ignore_video && piece->ignore_video->contains(time)) {
+               return;
+       }
+
        /* Fill gaps that we discover now that we have some video which needs to be emitted.
           This is where we need to fill to.
        */
@@ -931,6 +948,10 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
 void
 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
 {
+       if (_suspended) {
+               return;
+       }
+
        DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
 
        auto piece = wp.lock ();
@@ -1001,6 +1022,10 @@ Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_a
 void
 Player::bitmap_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentBitmapText subtitle)
 {
+       if (_suspended) {
+               return;
+       }
+
        auto piece = wp.lock ();
        auto text = wc.lock ();
        if (!piece || !text) {
@@ -1040,6 +1065,10 @@ Player::bitmap_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, C
 void
 Player::plain_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentStringText subtitle)
 {
+       if (_suspended) {
+               return;
+       }
+
        auto piece = wp.lock ();
        auto text = wc.lock ();
        if (!piece || !text) {
@@ -1085,6 +1114,10 @@ Player::plain_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, Co
 void
 Player::subtitle_stop (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, ContentTime to)
 {
+       if (_suspended) {
+               return;
+       }
+
        auto text = wc.lock ();
        if (!text) {
                return;
@@ -1179,6 +1212,16 @@ Player::seek (DCPTime time, bool accurate)
 void
 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
 {
+       if (!_film->three_d()) {
+               if (pv->eyes() == Eyes::LEFT) {
+                       /* Use left-eye images for both eyes... */
+                       pv->set_eyes (Eyes::BOTH);
+               } else if (pv->eyes() == Eyes::RIGHT) {
+                       /* ...and discard the right */
+                       return;
+               }
+       }
+
        /* We need a delay to give a little wiggle room to ensure that relevent subtitles arrive at the
           player before the video that requires them.
        */
@@ -1324,6 +1367,10 @@ Player::playlist () const
 void
 Player::atmos (weak_ptr<Piece>, ContentAtmos data)
 {
+       if (_suspended) {
+               return;
+       }
+
        Atmos (data.data, DCPTime::from_frames(data.frame, _film->video_frame_rate()), data.metadata);
 }