2 Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
4 This file is part of DCP-o-matic.
6 DCP-o-matic is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2 of the License, or
9 (at your option) any later version.
11 DCP-o-matic is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
28 #include "raw_image_proxy.h"
31 #include "render_subtitles.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
83 , _playlist (playlist)
84 , _have_valid_pieces (false)
85 , _ignore_video (false)
86 , _ignore_audio (false)
87 , _always_burn_subtitles (false)
89 , _play_referenced (false)
90 , _audio_merger (_film->audio_frame_rate())
92 _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93 _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94 _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95 set_video_container_size (_film->frame_size ());
97 film_changed (Film::AUDIO_PROCESSOR);
99 seek (DCPTime (), true);
103 Player::setup_pieces ()
107 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
109 if (!i->paths_valid ()) {
113 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
117 /* Not something that we can decode; e.g. Atmos content */
121 if (decoder->video && _ignore_video) {
122 decoder->video->set_ignore ();
125 if (decoder->audio && _ignore_audio) {
126 decoder->audio->set_ignore ();
129 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130 if (dcp && _play_referenced) {
131 dcp->set_decode_referenced ();
134 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135 _pieces.push_back (piece);
137 if (decoder->video) {
138 decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
141 if (decoder->audio) {
142 decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
145 if (decoder->subtitle) {
146 decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147 decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
151 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152 if (i->content->audio) {
153 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154 _stream_states[j] = StreamState (i, i->content->position ());
159 if (!_play_referenced) {
160 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161 shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
163 if (dc->reference_video()) {
164 _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
166 if (dc->reference_audio()) {
167 _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
173 _have_valid_pieces = true;
177 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
179 shared_ptr<Content> c = w.lock ();
185 property == ContentProperty::POSITION ||
186 property == ContentProperty::LENGTH ||
187 property == ContentProperty::TRIM_START ||
188 property == ContentProperty::TRIM_END ||
189 property == ContentProperty::PATH ||
190 property == VideoContentProperty::FRAME_TYPE ||
191 property == DCPContentProperty::NEEDS_ASSETS ||
192 property == DCPContentProperty::NEEDS_KDM ||
193 property == SubtitleContentProperty::COLOUR ||
194 property == SubtitleContentProperty::OUTLINE ||
195 property == SubtitleContentProperty::SHADOW ||
196 property == SubtitleContentProperty::EFFECT_COLOUR ||
197 property == FFmpegContentProperty::SUBTITLE_STREAM ||
198 property == VideoContentProperty::COLOUR_CONVERSION
201 _have_valid_pieces = false;
205 property == SubtitleContentProperty::LINE_SPACING ||
206 property == SubtitleContentProperty::OUTLINE_WIDTH ||
207 property == SubtitleContentProperty::Y_SCALE ||
208 property == SubtitleContentProperty::FADE_IN ||
209 property == SubtitleContentProperty::FADE_OUT ||
210 property == ContentProperty::VIDEO_FRAME_RATE ||
211 property == SubtitleContentProperty::USE ||
212 property == SubtitleContentProperty::X_OFFSET ||
213 property == SubtitleContentProperty::Y_OFFSET ||
214 property == SubtitleContentProperty::X_SCALE ||
215 property == SubtitleContentProperty::FONTS ||
216 property == VideoContentProperty::CROP ||
217 property == VideoContentProperty::SCALE ||
218 property == VideoContentProperty::FADE_IN ||
219 property == VideoContentProperty::FADE_OUT
227 Player::set_video_container_size (dcp::Size s)
229 if (s == _video_container_size) {
233 _video_container_size = s;
235 _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
236 _black_image->make_black ();
242 Player::playlist_changed ()
244 _have_valid_pieces = false;
249 Player::film_changed (Film::Property p)
251 /* Here we should notice Film properties that affect our output, and
252 alert listeners that our output now would be different to how it was
253 last time we were run.
256 if (p == Film::CONTAINER) {
258 } else if (p == Film::VIDEO_FRAME_RATE) {
259 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
260 so we need new pieces here.
262 _have_valid_pieces = false;
264 } else if (p == Film::AUDIO_PROCESSOR) {
265 if (_film->audio_processor ()) {
266 _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
272 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
274 list<PositionImage> all;
276 for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
281 /* We will scale the subtitle up to fit _video_container_size */
282 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
284 /* Then we need a corrective translation, consisting of two parts:
286 * 1. that which is the result of the scaling of the subtitle by _video_container_size; this will be
287 * rect.x * _video_container_size.width and rect.y * _video_container_size.height.
289 * 2. that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
290 * (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
291 * (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
293 * Combining these two translations gives these expressions.
300 dcp::YUV_TO_RGB_REC601,
301 i->image->pixel_format (),
306 lrint (_video_container_size.width * i->rectangle.x),
307 lrint (_video_container_size.height * i->rectangle.y)
316 shared_ptr<PlayerVideo>
317 Player::black_player_video_frame () const
319 return shared_ptr<PlayerVideo> (
321 shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
324 _video_container_size,
325 _video_container_size,
328 PresetColourConversion::all().front().conversion
334 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
336 DCPTime s = t - piece->content->position ();
337 s = min (piece->content->length_after_trim(), s);
338 s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
340 /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
341 then convert that ContentTime to frames at the content's rate. However this fails for
342 situations like content at 29.9978733fps, DCP at 30fps. The accuracy of the Time type is not
343 enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
345 Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
347 return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
351 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
353 /* See comment in dcp_to_content_video */
354 DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
355 return max (DCPTime (), d + piece->content->position ());
359 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
361 DCPTime s = t - piece->content->position ();
362 s = min (piece->content->length_after_trim(), s);
363 /* See notes in dcp_to_content_video */
364 return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
368 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
370 /* See comment in dcp_to_content_video */
371 DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
372 return max (DCPTime (), d + piece->content->position ());
376 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
378 DCPTime s = t - piece->content->position ();
379 s = min (piece->content->length_after_trim(), s);
380 return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
384 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
386 return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
389 list<shared_ptr<Font> >
390 Player::get_subtitle_fonts ()
392 if (!_have_valid_pieces) {
396 list<shared_ptr<Font> > fonts;
397 BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
398 if (p->content->subtitle) {
399 /* XXX: things may go wrong if there are duplicate font IDs
400 with different font files.
402 list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
403 copy (f.begin(), f.end(), back_inserter (fonts));
410 /** Set this player never to produce any video data */
412 Player::set_ignore_video ()
414 _ignore_video = true;
417 /** Set this player never to produce any audio data */
419 Player::set_ignore_audio ()
421 _ignore_audio = true;
424 /** Set whether or not this player should always burn text subtitles into the image,
425 * regardless of the content settings.
426 * @param burn true to always burn subtitles, false to obey content settings.
429 Player::set_always_burn_subtitles (bool burn)
431 _always_burn_subtitles = burn;
438 _have_valid_pieces = false;
442 Player::set_play_referenced ()
444 _play_referenced = true;
445 _have_valid_pieces = false;
448 list<ReferencedReelAsset>
449 Player::get_reel_assets ()
451 list<ReferencedReelAsset> a;
453 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
454 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
459 scoped_ptr<DCPDecoder> decoder;
461 decoder.reset (new DCPDecoder (j, _film->log()));
467 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
469 DCPOMATIC_ASSERT (j->video_frame_rate ());
470 double const cfr = j->video_frame_rate().get();
471 Frame const trim_start = j->trim_start().frames_round (cfr);
472 Frame const trim_end = j->trim_end().frames_round (cfr);
473 int const ffr = _film->video_frame_rate ();
475 DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
476 if (j->reference_video ()) {
477 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
478 DCPOMATIC_ASSERT (ra);
479 ra->set_entry_point (ra->entry_point() + trim_start);
480 ra->set_duration (ra->duration() - trim_start - trim_end);
482 ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
486 if (j->reference_audio ()) {
487 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
488 DCPOMATIC_ASSERT (ra);
489 ra->set_entry_point (ra->entry_point() + trim_start);
490 ra->set_duration (ra->duration() - trim_start - trim_end);
492 ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
496 if (j->reference_subtitle ()) {
497 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
498 DCPOMATIC_ASSERT (ra);
499 ra->set_entry_point (ra->entry_point() + trim_start);
500 ra->set_duration (ra->duration() - trim_start - trim_end);
502 ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
506 /* Assume that main picture duration is the length of the reel */
507 offset += k->main_picture()->duration ();
514 list<shared_ptr<Piece> >
515 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
517 if (!_have_valid_pieces) {
521 list<shared_ptr<Piece> > overlaps;
522 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
523 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
524 overlaps.push_back (i);
534 if (!_have_valid_pieces) {
538 shared_ptr<Piece> earliest;
539 DCPTime earliest_content;
541 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
543 DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
544 if (!earliest || t < earliest_content) {
545 earliest_content = t;
552 /* No more content; fill up with silent black */
553 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
555 remaining_video.from = _last_time.get() + one_video_frame();
557 fill_video (remaining_video);
558 fill_audio (DCPTimePeriod (_last_audio_time, _playlist->length()));
562 earliest->done = earliest->decoder->pass ();
563 if (earliest->done && earliest->content->audio) {
564 /* Flush the Player audio system for this piece */
565 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
566 audio_flush (earliest, i);
570 /* Emit any audio that is ready */
572 DCPTime pull_from = _playlist->length ();
573 for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
574 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
575 pull_from = i->second.last_push_end;
579 list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
580 for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
581 DCPOMATIC_ASSERT (i->second >= _last_audio_time);
582 fill_audio (DCPTimePeriod (_last_audio_time, i->second));
583 Audio (i->first, i->second);
584 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
591 Player::video (weak_ptr<Piece> wp, ContentVideo video)
593 shared_ptr<Piece> piece = wp.lock ();
598 FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
599 if (frc.skip && (video.frame % 2) == 1) {
603 /* Time and period of the frame we will emit */
604 DCPTime const time = content_video_to_dcp (piece, video.frame);
605 DCPTimePeriod const period (time, time + one_video_frame());
607 /* Discard if it's outside the content's period */
608 if (time < piece->content->position() || time >= piece->content->end()) {
612 /* Get any subtitles */
614 optional<PositionImage> subtitles;
616 for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
618 if (!i->second.overlap (period)) {
622 list<PositionImage> sub_images;
624 /* Image subtitles */
625 list<PositionImage> c = transform_image_subtitles (i->first.image);
626 copy (c.begin(), c.end(), back_inserter (sub_images));
628 /* Text subtitles (rendered to an image) */
629 if (!i->first.text.empty ()) {
630 list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
631 copy (s.begin (), s.end (), back_inserter (sub_images));
634 if (!sub_images.empty ()) {
635 subtitles = merge (sub_images);
642 fill_video (DCPTimePeriod (_last_time.get() + one_video_frame(), time));
648 piece->content->video->crop (),
649 piece->content->video->fade (video.frame),
650 piece->content->video->scale().size (
651 piece->content->video, _video_container_size, _film->frame_size ()
653 _video_container_size,
656 piece->content->video->colour_conversion ()
661 _last_video->set_subtitle (subtitles.get ());
666 Video (_last_video, *_last_time);
668 /* Discard any subtitles we no longer need */
670 for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
671 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
674 if (i->second.to < time) {
675 _subtitles.erase (i);
683 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
685 shared_ptr<AudioContent> content = piece->content->audio;
686 DCPOMATIC_ASSERT (content);
688 shared_ptr<Resampler> r = resampler (content, stream, false);
693 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
694 ContentAudio content_audio;
695 content_audio.audio = ro.first;
696 content_audio.frame = ro.second;
698 /* Compute time in the DCP */
699 DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
701 audio_transform (content, stream, content_audio, time);
704 /** Do our common processing on some audio */
706 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
710 if (content->gain() != 0) {
711 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
712 gain->apply_gain (content->gain ());
713 content_audio.audio = gain;
718 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
719 dcp_mapped->make_silent ();
721 AudioMapping map = stream->mapping ();
722 for (int i = 0; i < map.input_channels(); ++i) {
723 for (int j = 0; j < dcp_mapped->channels(); ++j) {
724 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
725 dcp_mapped->accumulate_channel (
726 content_audio.audio.get(),
728 static_cast<dcp::Channel> (j),
729 map.get (i, static_cast<dcp::Channel> (j))
735 content_audio.audio = dcp_mapped;
739 if (_audio_processor) {
740 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
745 _audio_merger.push (content_audio.audio, time);
746 DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
747 _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
751 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
753 shared_ptr<Piece> piece = wp.lock ();
758 shared_ptr<AudioContent> content = piece->content->audio;
759 DCPOMATIC_ASSERT (content);
762 if (stream->frame_rate() != content->resampled_frame_rate()) {
763 shared_ptr<Resampler> r = resampler (content, stream, true);
764 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
765 content_audio.audio = ro.first;
766 content_audio.frame = ro.second;
769 /* Compute time in the DCP */
770 DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
771 /* And the end of this block in the DCP */
772 DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
774 /* Remove anything that comes before the start or after the end of the content */
775 if (time < piece->content->position()) {
776 DCPTime const discard_time = piece->content->position() - time;
777 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
778 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
779 if (remaining_frames <= 0) {
780 /* This audio is entirely discarded */
783 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
784 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
785 content_audio.audio = cut;
786 time += discard_time;
787 } else if (time > piece->content->end()) {
790 } else if (end > piece->content->end()) {
791 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
792 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
793 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
794 content_audio.audio = cut;
797 audio_transform (content, stream, content_audio, time);
801 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
803 shared_ptr<Piece> piece = wp.lock ();
808 /* Apply content's subtitle offsets */
809 subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
810 subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
812 /* Apply content's subtitle scale */
813 subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
814 subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
816 /* Apply a corrective translation to keep the subtitle centred after that scale */
817 subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
818 subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
821 ps.image.push_back (subtitle.sub);
822 DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
824 if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
825 _subtitles.push_back (make_pair (ps, period));
827 Subtitle (ps, period);
832 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
834 shared_ptr<Piece> piece = wp.lock ();
840 DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
842 BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
843 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
844 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
845 float const xs = piece->content->subtitle->x_scale();
846 float const ys = piece->content->subtitle->y_scale();
847 float size = s.size();
849 /* Adjust size to express the common part of the scaling;
850 e.g. if xs = ys = 0.5 we scale size by 2.
852 if (xs > 1e-5 && ys > 1e-5) {
853 size *= 1 / min (1 / xs, 1 / ys);
857 /* Then express aspect ratio changes */
858 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
859 s.set_aspect_adjust (xs / ys);
862 s.set_in (dcp::Time(period.from.seconds(), 1000));
863 s.set_out (dcp::Time(period.to.seconds(), 1000));
864 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
865 ps.add_fonts (piece->content->subtitle->fonts ());
868 if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
869 _subtitles.push_back (make_pair (ps, period));
871 Subtitle (ps, period);
876 Player::seek (DCPTime time, bool accurate)
878 if (_audio_processor) {
879 _audio_processor->flush ();
882 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
883 if (i->content->position() <= time && time < i->content->end()) {
884 i->decoder->seek (dcp_to_content_time (i, time), accurate);
890 _last_time = time - one_video_frame ();
892 _last_time = optional<DCPTime> ();
896 shared_ptr<Resampler>
897 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
899 ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
900 if (i != _resamplers.end ()) {
905 return shared_ptr<Resampler> ();
909 "Creating new resampler from %1 to %2 with %3 channels",
910 stream->frame_rate(),
911 content->resampled_frame_rate(),
915 shared_ptr<Resampler> r (
916 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
919 _resamplers[make_pair(content, stream)] = r;
924 Player::fill_video (DCPTimePeriod period)
926 /* XXX: this may not work for 3D */
927 BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
928 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
929 if (_playlist->video_content_at(j) && _last_video) {
930 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
932 Video (black_player_video_frame(), j);
939 Player::fill_audio (DCPTimePeriod period)
941 BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
944 DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
945 Frame const samples = block.frames_round(_film->audio_frame_rate());
947 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
948 silence->make_silent ();
957 Player::one_video_frame () const
959 return DCPTime::from_frames (1, _film->video_frame_rate ());