void
Player::setup_pieces ()
{
- list<shared_ptr<ImageDecoder> > old_image_decoders;
- BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
- shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> (i->decoder);
- if (imd) {
- old_image_decoders.push_back (imd);
- }
- }
-
_pieces.clear ();
BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
continue;
}
- shared_ptr<Decoder> decoder = decoder_factory (i, old_image_decoders, _film->log(), _fast);
+ shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
if (!decoder) {
decoder->audio->set_ignore ();
}
+ if (decoder->audio && _fast) {
+ decoder->audio->set_fast ();
+ }
+
+ shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
+ if (dcp && _play_referenced) {
+ dcp->set_decode_referenced ();
+ }
+
_pieces.push_back (shared_ptr<Piece> (new Piece (i, decoder, frc)));
}
property == DCPContentProperty::CAN_BE_PLAYED ||
property == SubtitleContentProperty::COLOUR ||
property == SubtitleContentProperty::OUTLINE ||
- property == SubtitleContentProperty::OUTLINE_COLOUR ||
- property == FFmpegContentProperty::SUBTITLE_STREAM
+ property == SubtitleContentProperty::SHADOW ||
+ property == SubtitleContentProperty::EFFECT_COLOUR ||
+ property == FFmpegContentProperty::SUBTITLE_STREAM ||
+ property == VideoContentProperty::COLOUR_CONVERSION
) {
_have_valid_pieces = false;
Changed (frequent);
+ } else if (
+ property == SubtitleContentProperty::LINE_SPACING ||
+ property == SubtitleContentProperty::OUTLINE_WIDTH ||
+ property == SubtitleContentProperty::Y_SCALE ||
+ property == SubtitleContentProperty::FADE_IN ||
+ property == SubtitleContentProperty::FADE_OUT
+ ) {
+
+ /* These changes just need the pieces' decoders to be reset.
+ It's quite possible that other changes could be handled by
+ this branch rather than the _have_valid_pieces = false branch
+ above. This would make things a lot faster.
+ */
+
+ reset_pieces ();
+ Changed (frequent);
+
} else if (
property == ContentProperty::VIDEO_FRAME_RATE ||
property == SubtitleContentProperty::USE ||
property == SubtitleContentProperty::X_OFFSET ||
property == SubtitleContentProperty::Y_OFFSET ||
property == SubtitleContentProperty::X_SCALE ||
- property == SubtitleContentProperty::Y_SCALE ||
property == SubtitleContentProperty::FONTS ||
property == VideoContentProperty::CROP ||
property == VideoContentProperty::SCALE ||
property == VideoContentProperty::FADE_IN ||
- property == VideoContentProperty::FADE_OUT ||
- property == VideoContentProperty::COLOUR_CONVERSION
+ property == VideoContentProperty::FADE_OUT
) {
Changed (frequent);
/* Text subtitles (rendered to an image) */
if (!ps.text.empty ()) {
- list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
+ list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size, time);
copy (s.begin (), s.end (), back_inserter (sub_images));
}
shared_ptr<PlayerVideo> (
new PlayerVideo (
i->image,
- content_video_to_dcp (piece, i->frame),
+ time,
piece->content->video->crop (),
- piece->content->video->fade (i->frame),
+ piece->content->video->fade (i->frame.index()),
image_size,
_video_container_size,
- i->eyes,
+ i->frame.eyes(),
i->part,
piece->content->video->colour_conversion ()
)
{
list<shared_ptr<Piece> > subs = overlaps (time, time + length, has_subtitle);
- PlayerSubtitles ps (time, length);
+ PlayerSubtitles ps (time);
for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
if (!(*j)->content->subtitle->use () || (!_always_burn_subtitles && (burnt != (*j)->content->subtitle->burn ()))) {
}
s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds(), 1000));
s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds(), 1000));
- ps.text.push_back (s);
+ ps.text.push_back (SubtitleString (s, (*j)->content->subtitle->outline_width()));
ps.add_fonts ((*j)->content->subtitle->fonts ());
}
}
scoped_ptr<DCPDecoder> decoder;
try {
- decoder.reset (new DCPDecoder (j, _film->log(), false));
+ decoder.reset (new DCPDecoder (j, _film->log()));
} catch (...) {
return a;
}
int64_t offset = 0;
BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
+
+ DCPOMATIC_ASSERT (j->video_frame_rate ());
+ double const cfr = j->video_frame_rate().get();
+ Frame const trim_start = j->trim_start().frames_round (cfr);
+ Frame const trim_end = j->trim_end().frames_round (cfr);
+
DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
if (j->reference_video ()) {
+ DCPOMATIC_ASSERT (k->main_picture ());
+ k->main_picture()->set_entry_point (trim_start);
+ k->main_picture()->set_duration (k->main_picture()->intrinsic_duration() - trim_start - trim_end);
a.push_back (
ReferencedReelAsset (
k->main_picture (),
}
if (j->reference_audio ()) {
+ DCPOMATIC_ASSERT (k->main_sound ());
+ k->main_sound()->set_entry_point (trim_start);
+ k->main_sound()->set_duration (k->main_sound()->intrinsic_duration() - trim_start - trim_end);
a.push_back (
ReferencedReelAsset (
k->main_sound (),
if (j->reference_subtitle ()) {
DCPOMATIC_ASSERT (k->main_subtitle ());
+ k->main_subtitle()->set_entry_point (trim_start);
+ k->main_subtitle()->set_duration (k->main_subtitle()->intrinsic_duration() - trim_start - trim_end);
a.push_back (
ReferencedReelAsset (
k->main_subtitle (),
return overlaps;
}
+
+void
+Player::reset_pieces ()
+{
+ BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
+ i->decoder->reset ();
+ }
+}