shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> ((*i)->content);
/* Use the second to last if it's the other part of a 3D content pair */
if (
- (first_type == EYES_LEFT && vc->video_frame_type() == EYES_RIGHT) ||
- (first_type == EYES_RIGHT && vc->video_frame_type() == EYES_LEFT)
+ (first_type == VIDEO_FRAME_TYPE_3D_LEFT && vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
+ (first_type == VIDEO_FRAME_TYPE_3D_RIGHT && vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_LEFT)
) {
/* Other part of a pair of 3D content */
ov_to_use.push_back (*i);
request = DCPTime ();
}
- Frame const content_frame = dcp_to_content_audio (*i, request);
+ Frame const content_frame = dcp_to_resampled_audio (*i, request);
BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) {
Frame
Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
{
- /* s is the offset of t from the start position of this content */
+ shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (piece->content);
DCPTime s = t - piece->content->position ();
- s = DCPTime (max (DCPTime::Type (0), s.get ()));
- s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
-
- /* Convert this to the content frame */
- return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) / piece->frc.factor ();
+ s = min (piece->content->length_after_trim(), s);
+ return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start ()).frames (vc->video_frame_rate ());
}
DCPTime
Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
{
- DCPTime t = DCPTime::from_frames (f * piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position ();
- if (t < DCPTime ()) {
- t = DCPTime ();
- }
-
- return t;
+ shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (piece->content);
+ ContentTime const c = ContentTime::from_frames (f, vc->video_frame_rate ()) - piece->content->trim_start ();
+ return max (DCPTime (), DCPTime (c, piece->frc) + piece->content->position ());
}
Frame
-Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
+Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
{
- /* s is the offset of t from the start position of this content */
- DCPTime s = t - piece->content->position ();
- s = DCPTime (max (DCPTime::Type (0), s.get ()));
- s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
-
- /* Convert this to the content frame */
- return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
+ DCPTime s = t - piece->content->position () + DCPTime (piece->content->trim_start (), piece->frc);
+ s = max (DCPTime (), min (piece->content->length_after_trim(), s));
+ return s.frames (_film->audio_frame_rate ());
}
ContentTime
Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
{
- /* s is the offset of t from the start position of this content */
DCPTime s = t - piece->content->position ();
- s = DCPTime (max (DCPTime::Type (0), s.get ()));
- s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
-
- return ContentTime (s + piece->content->trim_start(), piece->frc);
-}
-
-void
-PlayerStatistics::dump (shared_ptr<Log> log) const
-{
- log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
- log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
-}
-
-PlayerStatistics const &
-Player::statistics () const
-{
- return _statistics;
+ s = min (piece->content->length_after_trim(), s);
+ return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
}
/** @param burnt true to return only subtitles to be burnt, false to return only