X-Git-Url: https://main.carlh.net/gitweb/?a=blobdiff_plain;f=src%2Flib%2Fplayer.cc;h=64fcfc93e7682efcfef68394b6106920aa53bd05;hb=bdbe925a467f9b7149322ad8d1c090d4c1e6d5c3;hp=3ceaac8c1b667fe39781dc027f5b21a2289e128e;hpb=bd0fbdae25424a491b30427443a0ce2b338522b8;p=dcpomatic.git diff --git a/src/lib/player.cc b/src/lib/player.cc index 3ceaac8c1..64fcfc93e 100644 --- a/src/lib/player.cc +++ b/src/lib/player.cc @@ -46,9 +46,14 @@ #include "dcp_subtitle_decoder.h" #include "audio_processor.h" #include "playlist.h" +#include +#include +#include +#include #include #include #include +#include #include "i18n.h" @@ -74,7 +79,10 @@ Player::Player (shared_ptr film, shared_ptr playlist , _playlist (playlist) , _have_valid_pieces (false) , _ignore_video (false) + , _ignore_audio (false) , _always_burn_subtitles (false) + , _fast (false) + , _play_referenced (false) { _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1)); _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this)); @@ -126,13 +134,13 @@ Player::setup_pieces () /* FFmpeg */ shared_ptr fc = dynamic_pointer_cast (i); if (fc) { - decoder.reset (new FFmpegDecoder (fc, _film->log())); + decoder.reset (new FFmpegDecoder (fc, _film->log(), _fast)); frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate()); } shared_ptr dc = dynamic_pointer_cast (i); if (dc) { - decoder.reset (new DCPDecoder (dc)); + decoder.reset (new DCPDecoder (dc, _fast)); frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate()); } @@ -157,7 +165,7 @@ Player::setup_pieces () /* SndfileContent */ shared_ptr sc = dynamic_pointer_cast (i); if (sc) { - decoder.reset (new SndfileDecoder (sc)); + decoder.reset (new SndfileDecoder (sc, _fast)); frc = best_overlap_frc; } @@ -180,6 +188,11 @@ Player::setup_pieces () vd->set_ignore_video (); } + shared_ptr ad = dynamic_pointer_cast (decoder); + if (ad && _ignore_audio) { + ad->set_ignore_audio (); + } + _pieces.push_back (shared_ptr (new Piece (i, decoder, frc.get ()))); } @@ -293,8 +306,8 @@ Player::transform_image_subtitles (list subs) const true ), Position ( - rint (_video_container_size.width * i->rectangle.x), - rint (_video_container_size.height * i->rectangle.y) + lrint (_video_container_size.width * i->rectangle.x), + lrint (_video_container_size.height * i->rectangle.y) ) ) ); @@ -321,7 +334,10 @@ Player::black_player_video_frame (DCPTime time) const ); } -/** @return All PlayerVideos at the given time (there may be two frames for 3D) */ +/** @return All PlayerVideos at the given time. There may be none if the content + * at `time' is a DCP which we are passing through (i.e. referring to by reference) + * or 2 if we have 3D. + */ list > Player::get_video (DCPTime time, bool accurate) { @@ -375,6 +391,11 @@ Player::get_video (DCPTime time, bool accurate) shared_ptr video_content = dynamic_pointer_cast (piece->content); DCPOMATIC_ASSERT (video_content); + shared_ptr dcp_content = dynamic_pointer_cast (video_content); + if (dcp_content && dcp_content->reference_video () && !_play_referenced) { + continue; + } + bool const use = /* always use the last video */ piece == last || @@ -424,6 +445,7 @@ Player::get_video (DCPTime time, bool accurate) return pvf; } +/** @return Audio data or 0 if the only audio data here is referenced DCP data */ shared_ptr Player::get_audio (DCPTime time, DCPTime length, bool accurate) { @@ -441,11 +463,25 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) return audio; } - for (list >::iterator i = ov.begin(); i != ov.end(); ++i) { + bool all_referenced = true; + BOOST_FOREACH (shared_ptr i, ov) { + shared_ptr audio_content = dynamic_pointer_cast (i->content); + shared_ptr dcp_content = dynamic_pointer_cast (i->content); + if (audio_content && (!dcp_content || !dcp_content->reference_audio ())) { + /* There is audio content which is not from a DCP or not set to be referenced */ + all_referenced = false; + } + } + + if (all_referenced && !_play_referenced) { + return shared_ptr (); + } + + BOOST_FOREACH (shared_ptr i, ov) { - shared_ptr content = dynamic_pointer_cast ((*i)->content); + shared_ptr content = dynamic_pointer_cast (i->content); DCPOMATIC_ASSERT (content); - shared_ptr decoder = dynamic_pointer_cast ((*i)->decoder); + shared_ptr decoder = dynamic_pointer_cast (i->decoder); DCPOMATIC_ASSERT (decoder); /* The time that we should request from the content */ @@ -464,7 +500,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) request = DCPTime (); } - Frame const content_frame = dcp_to_resampled_audio (*i, request); + Frame const content_frame = dcp_to_resampled_audio (i, request); BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) { @@ -501,7 +537,7 @@ Player::get_audio (DCPTime time, DCPTime length, bool accurate) } if (_audio_processor) { - dcp_mapped = _audio_processor->run (dcp_mapped); + dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ()); } all.audio = dcp_mapped; @@ -524,18 +560,25 @@ Player::dcp_to_content_video (shared_ptr piece, DCPTime t) const shared_ptr vc = dynamic_pointer_cast (piece->content); DCPTime s = t - piece->content->position (); s = min (piece->content->length_after_trim(), s); - /* We're returning a frame index here so we need to floor() the conversion since we want to know the frame - that contains t, I think + s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc)); + + /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange) + then convert that ContentTime to frames at the content's rate. However this fails for + situations like content at 29.9978733fps, DCP at 30fps. The accuracy of the Time type is not + enough to distinguish between the two with low values of time (e.g. 3200 in Time units). + + Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat. */ - return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start ()).frames_floor (vc->video_frame_rate ()); + return s.frames_floor (piece->frc.dcp) / piece->frc.factor (); } DCPTime Player::content_video_to_dcp (shared_ptr piece, Frame f) const { shared_ptr vc = dynamic_pointer_cast (piece->content); - ContentTime const c = ContentTime::from_frames (f, vc->video_frame_rate ()) - piece->content->trim_start (); - return max (DCPTime (), DCPTime (c, piece->frc) + piece->content->position ()); + /* See comment in dcp_to_content_video */ + DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc); + return max (DCPTime (), d + piece->content->position ()); } Frame @@ -572,6 +615,11 @@ Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt) continue; } + shared_ptr dcp_content = dynamic_pointer_cast (subtitle_content); + if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) { + continue; + } + shared_ptr subtitle_decoder = dynamic_pointer_cast ((*j)->decoder); ContentTime const from = dcp_to_content_subtitle (*j, time); /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */ @@ -645,6 +693,13 @@ Player::set_ignore_video () _ignore_video = true; } +/** Set this player never to produce any audio data */ +void +Player::set_ignore_audio () +{ + _ignore_audio = true; +} + /** Set whether or not this player should always burn text subtitles into the image, * regardless of the content settings. * @param burn true to always burn subtitles, false to obey content settings. @@ -654,3 +709,43 @@ Player::set_always_burn_subtitles (bool burn) { _always_burn_subtitles = burn; } + +void +Player::set_fast () +{ + _fast = true; + _have_valid_pieces = false; +} + +void +Player::set_play_referenced () +{ + _play_referenced = true; + _have_valid_pieces = false; +} + +list > +Player::get_reel_assets () +{ + list > a; + + BOOST_FOREACH (shared_ptr i, _playlist->content ()) { + shared_ptr j = dynamic_pointer_cast (i); + if (!j) { + continue; + } + /* XXX: hack hack hack */ + DCPDecoder decoder (j, false); + if (j->reference_video ()) { + a.push_back (decoder.reels().front()->main_picture ()); + } + if (j->reference_audio ()) { + a.push_back (decoder.reels().front()->main_sound ()); + } + if (j->reference_subtitle ()) { + a.push_back (decoder.reels().front()->main_subtitle ()); + } + } + + return a; +}