Remove in-place translations support.
[dcpomatic.git] / src / lib / player.cc
index c7dd83d6daa152a47ad294ffcf80bf077059589d..c03cb97a59f50ad8549f8142279e48a236a801dc 100644 (file)
@@ -143,7 +143,7 @@ Player::construct ()
        connect();
        set_video_container_size(film->frame_size());
 
-       film_change (ChangeType::DONE, Film::Property::AUDIO_PROCESSOR);
+       film_change(ChangeType::DONE, FilmProperty::AUDIO_PROCESSOR);
 
        setup_pieces ();
        seek (DCPTime (), true);
@@ -191,6 +191,7 @@ Player::Player(Player&& other)
        , _silent(std::move(other._silent))
        , _active_texts(std::move(other._active_texts))
        , _audio_processor(std::move(other._audio_processor))
+       , _disable_audio_processor(other._disable_audio_processor)
        , _playback_length(other._playback_length.load())
        , _subtitle_alignment(other._subtitle_alignment)
 {
@@ -230,6 +231,7 @@ Player::operator=(Player&& other)
        _silent = std::move(other._silent);
        _active_texts = std::move(other._active_texts);
        _audio_processor = std::move(other._audio_processor);
+       _disable_audio_processor = other._disable_audio_processor;
        _playback_length = other._playback_length.load();
        _subtitle_alignment = other._subtitle_alignment;
 
@@ -378,13 +380,29 @@ Player::setup_pieces ()
                return v && v->use() && v->frame_type() != VideoFrameType::THREE_D_LEFT && v->frame_type() != VideoFrameType::THREE_D_RIGHT;
        };
 
-       for (auto i = _pieces.begin(); i != _pieces.end(); ++i) {
-               if (ignore_overlap((*i)->content->video)) {
+       for (auto piece = _pieces.begin(); piece != _pieces.end(); ++piece) {
+               if (ignore_overlap((*piece)->content->video)) {
                        /* Look for content later in the content list with in-use video that overlaps this */
-                       auto const period = DCPTimePeriod((*i)->content->position(), (*i)->content->end(film));
-                       for (auto j = std::next(i); j != _pieces.end(); ++j) {
-                               if ((*j)->content->video && ignore_overlap((*j)->content->video)) {
-                                       (*i)->ignore_video = DCPTimePeriod((*j)->content->position(), (*j)->content->end(film)).overlap(period);
+                       auto const period = (*piece)->content->period(film);
+                       for (auto later_piece = std::next(piece); later_piece != _pieces.end(); ++later_piece) {
+                               if (ignore_overlap((*later_piece)->content->video)) {
+                                       if (auto overlap = (*later_piece)->content->period(film).overlap(period)) {
+                                               (*piece)->ignore_video.push_back(*overlap);
+                                       }
+                               }
+                       }
+               }
+       }
+
+       for (auto piece = _pieces.begin(); piece != _pieces.end(); ++piece) {
+               if ((*piece)->content->atmos) {
+                       /* Look for content later in the content list with ATMOS that overlaps this */
+                       auto const period = (*piece)->content->period(film);
+                       for (auto later_piece = std::next(piece); later_piece != _pieces.end(); ++later_piece) {
+                               if ((*later_piece)->content->atmos) {
+                                       if (auto overlap = (*later_piece)->content->period(film).overlap(period)) {
+                                               (*piece)->ignore_atmos.push_back(*overlap);
+                                       }
                                }
                        }
                }
@@ -465,7 +483,7 @@ Player::playlist_change (ChangeType type)
 
 
 void
-Player::film_change (ChangeType type, Film::Property p)
+Player::film_change(ChangeType type, FilmProperty p)
 {
        /* Here we should notice Film properties that affect our output, and
           alert listeners that our output now would be different to how it was
@@ -477,9 +495,9 @@ Player::film_change (ChangeType type, Film::Property p)
                return;
        }
 
-       if (p == Film::Property::CONTAINER) {
+       if (p == FilmProperty::CONTAINER) {
                Change (type, PlayerProperty::FILM_CONTAINER, false);
-       } else if (p == Film::Property::VIDEO_FRAME_RATE) {
+       } else if (p == FilmProperty::VIDEO_FRAME_RATE) {
                /* Pieces contain a FrameRateChange which contains the DCP frame rate,
                   so we need new pieces here.
                */
@@ -487,12 +505,12 @@ Player::film_change (ChangeType type, Film::Property p)
                        setup_pieces ();
                }
                Change (type, PlayerProperty::FILM_VIDEO_FRAME_RATE, false);
-       } else if (p == Film::Property::AUDIO_PROCESSOR) {
+       } else if (p == FilmProperty::AUDIO_PROCESSOR) {
                if (type == ChangeType::DONE && film->audio_processor ()) {
                        boost::mutex::scoped_lock lm (_mutex);
                        _audio_processor = film->audio_processor()->clone(film->audio_frame_rate());
                }
-       } else if (p == Film::Property::AUDIO_CHANNELS) {
+       } else if (p == FilmProperty::AUDIO_CHANNELS) {
                if (type == ChangeType::DONE) {
                        boost::mutex::scoped_lock lm (_mutex);
                        _audio_merger.clear ();
@@ -754,7 +772,12 @@ Player::pass ()
        }
        case BLACK:
                LOG_DEBUG_PLAYER ("Emit black for gap at %1", to_string(_black.position()));
-               emit_video (black_player_video_frame(Eyes::BOTH), _black.position());
+               if (film->three_d()) {
+                       emit_video(black_player_video_frame(Eyes::LEFT), _black.position());
+                       emit_video(black_player_video_frame(Eyes::RIGHT), _black.position());
+               } else {
+                       emit_video(black_player_video_frame(Eyes::BOTH), _black.position());
+               }
                _black.set_position (_black.position() + one_video_frame());
                break;
        case SILENT:
@@ -811,17 +834,18 @@ Player::pass ()
                [](state_pair const& a, state_pair const& b) { return a.second.last_push_end.get() < b.second.last_push_end.get(); }
                );
 
+       std::map<AudioStreamPtr, StreamState> alive_stream_states;
+
        if (latest_last_push_end != have_pushed.end()) {
                LOG_DEBUG_PLAYER("Leading audio stream is in %1 at %2", latest_last_push_end->second.piece->content->path(0), to_string(latest_last_push_end->second.last_push_end.get()));
-       }
 
-       /* Now make a list of those streams that are less than ignore_streams_behind behind the leader */
-       std::map<AudioStreamPtr, StreamState> alive_stream_states;
-       for (auto const& i: _stream_states) {
-               if (!i.second.last_push_end || (latest_last_push_end->second.last_push_end.get() - i.second.last_push_end.get()) < dcpomatic::DCPTime::from_seconds(ignore_streams_behind)) {
-                       alive_stream_states.insert(i);
-               } else {
-                       LOG_DEBUG_PLAYER("Ignoring stream %1 because it is too far behind", i.second.piece->content->path(0));
+               /* Now make a list of those streams that are less than ignore_streams_behind behind the leader */
+               for (auto const& i: _stream_states) {
+                       if (!i.second.last_push_end || (latest_last_push_end->second.last_push_end.get() - i.second.last_push_end.get()) < dcpomatic::DCPTime::from_seconds(ignore_streams_behind)) {
+                               alive_stream_states.insert(i);
+                       } else {
+                               LOG_DEBUG_PLAYER("Ignoring stream %1 because it is too far behind", i.second.piece->content->path(0));
+                       }
                }
        }
 
@@ -899,7 +923,7 @@ Player::open_subtitles_for_frame (DCPTime time) const
 
                /* Bitmap subtitles */
                for (auto i: j.bitmap) {
-                       if (!i.image) {
+                       if (!i.image || i.image->size().width == 0 || i.image->size().height == 0) {
                                continue;
                        }
 
@@ -920,7 +944,10 @@ Player::open_subtitles_for_frame (DCPTime time) const
                /* String subtitles (rendered to an image) */
                if (!j.string.empty()) {
                        auto s = render_text(j.string, _video_container_size, time, vfr);
-                       copy (s.begin(), s.end(), back_inserter (captions));
+                       copy_if(s.begin(), s.end(), back_inserter(captions), [](PositionImage const& image) {
+                               return image.image->size().width && image.image->size().height;
+                       });
+
                }
        }
 
@@ -970,6 +997,28 @@ Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
                return;
        }
 
+       vector<Eyes> eyes_to_emit;
+
+       if (!film->three_d()) {
+               if (video.eyes == Eyes::RIGHT) {
+                       /* 2D film, 3D content: discard right */
+                       return;
+               } else if (video.eyes == Eyes::LEFT) {
+                       /* 2D film, 3D content: emit left as "both" */
+                       video.eyes = Eyes::BOTH;
+                       eyes_to_emit = { Eyes::BOTH };
+               }
+       } else {
+               if (video.eyes == Eyes::BOTH) {
+                       /* 3D film, 2D content; emit "both" for left and right */
+                       eyes_to_emit = { Eyes::LEFT, Eyes::RIGHT };
+               }
+       }
+
+       if (eyes_to_emit.empty()) {
+               eyes_to_emit = { video.eyes };
+       }
+
        /* Time of the first frame we will emit */
        DCPTime const time = content_video_to_dcp (piece, video.frame);
        LOG_DEBUG_PLAYER("Received video frame %1 at %2", video.frame, to_string(time));
@@ -982,7 +1031,12 @@ Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
                return;
        }
 
-       if (piece->ignore_video && piece->ignore_video->contains(time)) {
+       auto ignore_video = std::find_if(
+               piece->ignore_video.begin(),
+               piece->ignore_video.end(),
+               [time](DCPTimePeriod period) { return period.contains(time); }
+               );
+       if (ignore_video != piece->ignore_video.end()) {
                return;
        }
 
@@ -998,7 +1052,7 @@ Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
                if ((fill_to - fill_from) > one_video_frame() / 2) {
                        auto last = _last_video.find (weak_piece);
                        if (film->three_d()) {
-                               auto fill_to_eyes = video.eyes;
+                               auto fill_to_eyes = eyes_to_emit[0];
                                if (fill_to_eyes == Eyes::BOTH) {
                                        fill_to_eyes = Eyes::LEFT;
                                }
@@ -1040,32 +1094,37 @@ Player::video (weak_ptr<Piece> weak_piece, ContentVideo video)
 
        auto const content_video = piece->content->video;
 
-       _last_video[weak_piece] = std::make_shared<PlayerVideo>(
-               video.image,
-               content_video->actual_crop(),
-               content_video->fade(film, video.frame),
-               scale_for_display(
-                       content_video->scaled_size(film->frame_size()),
+       auto scaled_size = content_video->scaled_size(film->frame_size());
+       DCPOMATIC_ASSERT(scaled_size);
+
+       for (auto eyes: eyes_to_emit) {
+               _last_video[weak_piece] = std::make_shared<PlayerVideo>(
+                       video.image,
+                       content_video->actual_crop(),
+                       content_video->fade(film, video.frame),
+                       scale_for_display(
+                               *scaled_size,
+                               _video_container_size,
+                               film->frame_size(),
+                               content_video->pixel_quanta()
+                               ),
                        _video_container_size,
-                       film->frame_size(),
-                       content_video->pixel_quanta()
-                       ),
-               _video_container_size,
-               video.eyes,
-               video.part,
-               content_video->colour_conversion(),
-               content_video->range(),
-               piece->content,
-               video.frame,
-               false
-               );
-
-       DCPTime t = time;
-       for (int i = 0; i < frc.repeat; ++i) {
-               if (t < piece->content->end(film)) {
-                       emit_video (_last_video[weak_piece], t);
+                       eyes,
+                       video.part,
+                       content_video->colour_conversion(),
+                       content_video->range(),
+                       piece->content,
+                       video.frame,
+                       false
+                       );
+
+               DCPTime t = time;
+               for (int i = 0; i < frc.repeat; ++i) {
+                       if (t < piece->content->end(film)) {
+                               emit_video (_last_video[weak_piece], t);
+                       }
+                       t += one_video_frame ();
                }
-               t += one_video_frame ();
        }
 }
 
@@ -1153,7 +1212,7 @@ Player::audio (weak_ptr<Piece> weak_piece, AudioStreamPtr stream, ContentAudio c
 
        /* Process */
 
-       if (_audio_processor) {
+       if (_audio_processor && !_disable_audio_processor) {
                content_audio.audio = _audio_processor->run(content_audio.audio, film->audio_channels());
        }
 
@@ -1361,7 +1420,7 @@ Player::seek (DCPTime time, bool accurate)
        _last_video.clear ();
 
        for (auto& state: _stream_states) {
-               state.second.last_push_end = {};
+               state.second.last_push_end = boost::none;
        }
 }
 
@@ -1372,16 +1431,6 @@ Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
        auto film = _film.lock();
        DCPOMATIC_ASSERT(film);
 
-       if (!film->three_d()) {
-               if (pv->eyes() == Eyes::LEFT) {
-                       /* Use left-eye images for both eyes... */
-                       pv->set_eyes (Eyes::BOTH);
-               } else if (pv->eyes() == Eyes::RIGHT) {
-                       /* ...and discard the right */
-                       return;
-               }
-       }
-
        /* We need a delay to give a little wiggle room to ensure that relevant subtitles arrive at the
           player before the video that requires them.
        */
@@ -1568,6 +1617,15 @@ Player::atmos (weak_ptr<Piece> weak_piece, ContentAtmos data)
                return;
        }
 
+       auto ignore_atmos = std::find_if(
+               piece->ignore_atmos.begin(),
+               piece->ignore_atmos.end(),
+               [dcp_time](DCPTimePeriod period) { return period.contains(dcp_time); }
+               );
+       if (ignore_atmos != piece->ignore_atmos.end()) {
+               return;
+       }
+
        Atmos (data.data, dcp_time, data.metadata);
 }
 
@@ -1578,3 +1636,11 @@ Player::signal_change(ChangeType type, int property)
        Change(type, property, false);
 }
 
+
+/** Must be called from the same thread that calls ::pass() */
+void
+Player::set_disable_audio_processor()
+{
+       _disable_audio_processor = true;
+}
+