X-Git-Url: https://main.carlh.net/gitweb/?a=blobdiff_plain;ds=sidebyside;f=src%2Flib%2Fplayer.cc;h=0a4eb54e7aa09367b374011d5c7a490ee10405eb;hb=44c918a83e1e64922448ba20b5dfdcf30d026f84;hp=361f41c92cf07263a083bdd872136cfb68e4ce3c;hpb=89ee4cc6019036fa4fc0a6e07e052ffdc3b136ac;p=dcpomatic.git diff --git a/src/lib/player.cc b/src/lib/player.cc index 361f41c92..0a4eb54e7 100644 --- a/src/lib/player.cc +++ b/src/lib/player.cc @@ -45,6 +45,7 @@ #include "dcp_subtitle_content.h" #include "dcp_subtitle_decoder.h" #include "audio_processor.h" +#include "playlist.h" #include #include #include @@ -68,14 +69,16 @@ using boost::weak_ptr; using boost::dynamic_pointer_cast; using boost::optional; -Player::Player (shared_ptr film) +Player::Player (shared_ptr film, shared_ptr playlist) : _film (film) + , _playlist (playlist) , _have_valid_pieces (false) , _ignore_video (false) , _always_burn_subtitles (false) { - _film_content_changed_connection = _film->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3)); _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1)); + _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this)); + _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3)); set_video_container_size (_film->frame_size ()); film_changed (Film::AUDIO_PROCESSOR); @@ -87,11 +90,9 @@ Player::setup_pieces () list > old_pieces = _pieces; _pieces.clear (); - ContentList content = _film->content (); + BOOST_FOREACH (shared_ptr i, _playlist->content ()) { - for (ContentList::iterator i = content.begin(); i != content.end(); ++i) { - - if (!(*i)->paths_valid ()) { + if (!i->paths_valid ()) { continue; } @@ -101,13 +102,13 @@ Player::setup_pieces () /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */ DCPTime best_overlap_t; shared_ptr best_overlap; - for (ContentList::iterator j = content.begin(); j != content.end(); ++j) { - shared_ptr vc = dynamic_pointer_cast (*j); + BOOST_FOREACH (shared_ptr j, _playlist->content ()) { + shared_ptr vc = dynamic_pointer_cast (j); if (!vc) { continue; } - DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end()); + DCPTime const overlap = max (vc->position(), i->position()) - min (vc->end(), i->end()); if (overlap > best_overlap_t) { best_overlap = vc; best_overlap_t = overlap; @@ -123,20 +124,20 @@ Player::setup_pieces () } /* FFmpeg */ - shared_ptr fc = dynamic_pointer_cast (*i); + shared_ptr fc = dynamic_pointer_cast (i); if (fc) { decoder.reset (new FFmpegDecoder (fc, _film->log())); frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate()); } - shared_ptr dc = dynamic_pointer_cast (*i); + shared_ptr dc = dynamic_pointer_cast (i); if (dc) { decoder.reset (new DCPDecoder (dc)); frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate()); } /* ImageContent */ - shared_ptr ic = dynamic_pointer_cast (*i); + shared_ptr ic = dynamic_pointer_cast (i); if (ic) { /* See if we can re-use an old ImageDecoder */ for (list >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) { @@ -154,21 +155,21 @@ Player::setup_pieces () } /* SndfileContent */ - shared_ptr sc = dynamic_pointer_cast (*i); + shared_ptr sc = dynamic_pointer_cast (i); if (sc) { decoder.reset (new SndfileDecoder (sc)); frc = best_overlap_frc; } /* SubRipContent */ - shared_ptr rc = dynamic_pointer_cast (*i); + shared_ptr rc = dynamic_pointer_cast (i); if (rc) { decoder.reset (new SubRipDecoder (rc)); frc = best_overlap_frc; } /* DCPSubtitleContent */ - shared_ptr dsc = dynamic_pointer_cast (*i); + shared_ptr dsc = dynamic_pointer_cast (i); if (dsc) { decoder.reset (new DCPSubtitleDecoder (dsc)); frc = best_overlap_frc; @@ -179,14 +180,14 @@ Player::setup_pieces () vd->set_ignore_video (); } - _pieces.push_back (shared_ptr (new Piece (*i, decoder, frc.get ()))); + _pieces.push_back (shared_ptr (new Piece (i, decoder, frc.get ()))); } _have_valid_pieces = true; } void -Player::content_changed (weak_ptr w, int property, bool frequent) +Player::playlist_content_changed (weak_ptr w, int property, bool frequent) { shared_ptr c = w.lock (); if (!c) { @@ -232,6 +233,13 @@ Player::set_video_container_size (dcp::Size s) _black_image->make_black (); } +void +Player::playlist_changed () +{ + _have_valid_pieces = false; + Changed (false); +} + void Player::film_changed (Film::Property p) { @@ -240,10 +248,7 @@ Player::film_changed (Film::Property p) last time we were run. */ - if (p == Film::CONTENT) { - _have_valid_pieces = false; - Changed (false); - } else if (p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) { + if (p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) { Changed (false); } else if (p == Film::AUDIO_PROCESSOR) { if (_film->audio_processor ()) { @@ -356,38 +361,57 @@ Player::get_video (DCPTime time, bool accurate) /* No video content at this time */ pvf.push_back (black_player_video_frame (time)); } else { - /* Create a PlayerVideo from the content's video at this time */ - - shared_ptr piece = ov.back (); - shared_ptr decoder = dynamic_pointer_cast (piece->decoder); - DCPOMATIC_ASSERT (decoder); - shared_ptr video_content = dynamic_pointer_cast (piece->content); - DCPOMATIC_ASSERT (video_content); - - list content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate); - if (content_video.empty ()) { - pvf.push_back (black_player_video_frame (time)); - return pvf; + /* Decide which pieces of content to use */ + list > ov_to_use; + + /* Always use the last one */ + list >::reverse_iterator i = ov.rbegin (); + ov_to_use.push_back (*i); + VideoFrameType const first_type = dynamic_pointer_cast ((*i)->content)->video_frame_type (); + + ++i; + if (i != ov.rend ()) { + shared_ptr vc = dynamic_pointer_cast ((*i)->content); + /* Use the second to last if it's the other part of a 3D content pair */ + if ( + (first_type == VIDEO_FRAME_TYPE_3D_LEFT && vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) || + (first_type == VIDEO_FRAME_TYPE_3D_RIGHT && vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_LEFT) + ) { + /* Other part of a pair of 3D content */ + ov_to_use.push_back (*i); + } } - dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ()); - - for (list::const_iterator i = content_video.begin(); i != content_video.end(); ++i) { - pvf.push_back ( - shared_ptr ( - new PlayerVideo ( - i->image, - content_video_to_dcp (piece, i->frame), - video_content->crop (), - video_content->fade (i->frame), - image_size, - _video_container_size, - i->eyes, - i->part, - video_content->colour_conversion () - ) - ) - ); + BOOST_FOREACH (shared_ptr piece, ov_to_use) { + shared_ptr decoder = dynamic_pointer_cast (piece->decoder); + DCPOMATIC_ASSERT (decoder); + shared_ptr video_content = dynamic_pointer_cast (piece->content); + DCPOMATIC_ASSERT (video_content); + + list content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate); + if (content_video.empty ()) { + pvf.push_back (black_player_video_frame (time)); + } else { + dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ()); + + for (list::const_iterator i = content_video.begin(); i != content_video.end(); ++i) { + pvf.push_back ( + shared_ptr ( + new PlayerVideo ( + i->image, + content_video_to_dcp (piece, i->frame), + video_content->crop (), + video_content->fade (i->frame), + image_size, + _video_container_size, + i->eyes, + i->part, + video_content->colour_conversion () + ) + ) + ); + } + } } } @@ -440,10 +464,15 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) request = DCPTime (); } - Frame const content_frame = dcp_to_content_audio (*i, request); + Frame const content_frame = dcp_to_resampled_audio (*i, request); BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) { + if (j->channels() == 0) { + /* Some content (e.g. DCPs) can have streams with no channels */ + continue; + } + /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */ ContentAudio all = decoder->get_audio (j, content_frame, request_frames, accurate); @@ -492,60 +521,34 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) Frame Player::dcp_to_content_video (shared_ptr piece, DCPTime t) const { - /* s is the offset of t from the start position of this content */ + shared_ptr vc = dynamic_pointer_cast (piece->content); DCPTime s = t - piece->content->position (); - s = DCPTime (max (DCPTime::Type (0), s.get ())); - s = DCPTime (min (piece->content->length_after_trim().get(), s.get())); - - /* Convert this to the content frame */ - return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) / piece->frc.factor (); + s = min (piece->content->length_after_trim(), s); + return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start ()).frames (vc->video_frame_rate ()); } DCPTime Player::content_video_to_dcp (shared_ptr piece, Frame f) const { - DCPTime t = DCPTime::from_frames (f * piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position (); - if (t < DCPTime ()) { - t = DCPTime (); - } - - return t; + shared_ptr vc = dynamic_pointer_cast (piece->content); + ContentTime const c = ContentTime::from_frames (f, vc->video_frame_rate ()) - piece->content->trim_start (); + return max (DCPTime (), DCPTime (c, piece->frc) + piece->content->position ()); } Frame -Player::dcp_to_content_audio (shared_ptr piece, DCPTime t) const +Player::dcp_to_resampled_audio (shared_ptr piece, DCPTime t) const { - /* s is the offset of t from the start position of this content */ - DCPTime s = t - piece->content->position (); - s = DCPTime (max (DCPTime::Type (0), s.get ())); - s = DCPTime (min (piece->content->length_after_trim().get(), s.get())); - - /* Convert this to the content frame */ - return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate()); + DCPTime s = t - piece->content->position () + DCPTime (piece->content->trim_start (), piece->frc); + s = max (DCPTime (), min (piece->content->length_after_trim(), s)); + return s.frames (_film->audio_frame_rate ()); } ContentTime Player::dcp_to_content_subtitle (shared_ptr piece, DCPTime t) const { - /* s is the offset of t from the start position of this content */ DCPTime s = t - piece->content->position (); - s = DCPTime (max (DCPTime::Type (0), s.get ())); - s = DCPTime (min (piece->content->length_after_trim().get(), s.get())); - - return ContentTime (s + piece->content->trim_start(), piece->frc); -} - -void -PlayerStatistics::dump (shared_ptr log) const -{ - log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL); - log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL); -} - -PlayerStatistics const & -Player::statistics () const -{ - return _statistics; + s = min (piece->content->length_after_trim(), s); + return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start()); } /** @param burnt true to return only subtitles to be burnt, false to return only