Add DCPContent::resolution().
[dcpomatic.git] / src / lib / player.cc
index 62527e3ebbf3b60408bd2cc967ffb568f529ddf8..ab05d42ada522c25627cf5c7381bf13a07c649fb 100644 (file)
@@ -1,5 +1,5 @@
 /*
-    Copyright (C) 2013-2019 Carl Hetherington <cth@carlh.net>
+    Copyright (C) 2013-2020 Carl Hetherington <cth@carlh.net>
 
     This file is part of DCP-o-matic.
 
@@ -84,7 +84,7 @@ int const PlayerProperty::FILM_CONTAINER = 702;
 int const PlayerProperty::FILM_VIDEO_FRAME_RATE = 703;
 int const PlayerProperty::DCP_DECODE_REDUCTION = 704;
 
-Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
+Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist, DCPTime playback_length)
        : _film (film)
        , _playlist (playlist)
        , _suspended (0)
@@ -97,6 +97,7 @@ Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist
        , _play_referenced (false)
        , _audio_merger (_film->audio_frame_rate())
        , _shuffler (0)
+       , _playback_length (playback_length)
 {
        _film_changed_connection = _film->Change.connect (bind (&Player::film_change, this, _1, _2));
        /* The butler must hear about this first, so since we are proxying this through to the butler we must
@@ -125,15 +126,15 @@ Player::setup_pieces ()
 }
 
 bool
-have_video (shared_ptr<Piece> piece)
+have_video (shared_ptr<const Content> content)
 {
-       return piece->decoder && piece->decoder->video;
+       return static_cast<bool>(content->video);
 }
 
 bool
-have_audio (shared_ptr<Piece> piece)
+have_audio (shared_ptr<const Content> content)
 {
-       return piece->decoder && piece->decoder->audio;
+       return static_cast<bool>(content->audio);
 }
 
 void
@@ -237,15 +238,12 @@ Player::setup_pieces_unlocked ()
                }
        }
 
-       _black = Empty (_film, _pieces, bind(&have_video, _1));
-       _silent = Empty (_film, _pieces, bind(&have_audio, _1));
+       _black = Empty (_film, _playlist, bind(&have_video, _1), _playback_length);
+       _silent = Empty (_film, _playlist, bind(&have_audio, _1), _playback_length);
 
        _last_video_time = DCPTime ();
        _last_video_eyes = EYES_BOTH;
        _last_audio_time = DCPTime ();
-
-       /* Cached value to save recalculating it on every ::pass */
-       _film_length = _film->length ();
 }
 
 void
@@ -346,7 +344,8 @@ Player::black_player_video_frame (Eyes eyes) const
                        PresetColourConversion::all().front().conversion,
                        VIDEO_RANGE_FULL,
                        boost::weak_ptr<Content>(),
-                       boost::optional<Frame>()
+                       boost::optional<Frame>(),
+                       false
                )
        );
 }
@@ -565,15 +564,14 @@ bool
 Player::pass ()
 {
        boost::mutex::scoped_lock lm (_mutex);
-       DCPOMATIC_ASSERT (_film_length);
 
        if (_suspended) {
                /* We can't pass in this state */
                return false;
        }
 
-       if (*_film_length == DCPTime()) {
-               /* Special case of an empty Film; just give one black frame */
+       if (_playback_length == DCPTime()) {
+               /* Special; just give one black frame */
                emit_video (black_player_video_frame(EYES_BOTH), DCPTime());
                return true;
        }
@@ -679,7 +677,7 @@ Player::pass ()
        /* Work out the time before which the audio is definitely all here.  This is the earliest last_push_end of one
           of our streams, or the position of the _silent.
        */
-       DCPTime pull_to = *_film_length;
+       DCPTime pull_to = _playback_length;
        for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
                if (!i->second.piece->done && i->second.last_push_end < pull_to) {
                        pull_to = i->second.last_push_end;
@@ -850,7 +848,8 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                        piece->content->video->colour_conversion(),
                        piece->content->video->range(),
                        piece->content,
-                       video.frame
+                       video.frame,
+                       false
                        )
                );
 
@@ -900,9 +899,7 @@ Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_a
                if (remaining_frames == 0) {
                        return;
                }
-               shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
-               cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
-               content_audio.audio = cut;
+               content_audio.audio.reset (new AudioBuffers(content_audio.audio, remaining_frames, 0));
        }
 
        DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
@@ -955,8 +952,15 @@ Player::bitmap_text_start (weak_ptr<Piece> wp, weak_ptr<const TextContent> wc, C
 
        PlayerText ps;
        shared_ptr<Image> image = subtitle.sub.image;
+
        /* We will scale the subtitle up to fit _video_container_size */
-       dcp::Size scaled_size (subtitle.sub.rectangle.width * _video_container_size.width, subtitle.sub.rectangle.height * _video_container_size.height);
+       int const width = subtitle.sub.rectangle.width * _video_container_size.width;
+       int const height = subtitle.sub.rectangle.height * _video_container_size.height;
+       if (width == 0 || height == 0) {
+               return;
+       }
+
+       dcp::Size scaled_size (width, height);
        ps.bitmap.push_back (BitmapText(image->scale(scaled_size, dcp::YUV_TO_RGB_REC601, image->pixel_format(), true, _fast), subtitle.sub.rectangle));
        DCPTime from (content_time_to_dcp (piece, subtitle.from()));
 
@@ -1188,8 +1192,7 @@ Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTi
        if (remaining_frames <= 0) {
                return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
        }
-       shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
-       cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
+       shared_ptr<AudioBuffers> cut (new AudioBuffers(audio, remaining_frames, discard_frames));
        return make_pair(cut, time + discard_time);
 }