2 Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
4 This file is part of DCP-o-matic.
6 DCP-o-matic is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2 of the License, or
9 (at your option) any later version.
11 DCP-o-matic is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
28 #include "raw_image_proxy.h"
31 #include "render_subtitles.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
83 , _playlist (playlist)
84 , _have_valid_pieces (false)
85 , _ignore_video (false)
86 , _ignore_audio (false)
87 , _always_burn_subtitles (false)
89 , _play_referenced (false)
90 , _audio_merger (_film->audio_frame_rate())
92 _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93 _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94 _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95 set_video_container_size (_film->frame_size ());
97 film_changed (Film::AUDIO_PROCESSOR);
99 seek (DCPTime (), true);
103 Player::setup_pieces ()
107 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
109 if (!i->paths_valid ()) {
113 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
117 /* Not something that we can decode; e.g. Atmos content */
121 if (decoder->video && _ignore_video) {
122 decoder->video->set_ignore ();
125 if (decoder->audio && _ignore_audio) {
126 decoder->audio->set_ignore ();
129 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130 if (dcp && _play_referenced) {
131 dcp->set_decode_referenced ();
134 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135 _pieces.push_back (piece);
137 if (decoder->video) {
138 decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
141 if (decoder->audio) {
142 decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
145 if (decoder->subtitle) {
146 decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147 decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
151 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152 if (i->content->audio) {
153 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154 _stream_states[j] = StreamState (i, i->content->position ());
159 if (!_play_referenced) {
160 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161 shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
163 if (dc->reference_video()) {
164 _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
166 if (dc->reference_audio()) {
167 _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
173 _have_valid_pieces = true;
177 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
179 shared_ptr<Content> c = w.lock ();
185 property == ContentProperty::POSITION ||
186 property == ContentProperty::LENGTH ||
187 property == ContentProperty::TRIM_START ||
188 property == ContentProperty::TRIM_END ||
189 property == ContentProperty::PATH ||
190 property == VideoContentProperty::FRAME_TYPE ||
191 property == DCPContentProperty::NEEDS_ASSETS ||
192 property == DCPContentProperty::NEEDS_KDM ||
193 property == SubtitleContentProperty::COLOUR ||
194 property == SubtitleContentProperty::OUTLINE ||
195 property == SubtitleContentProperty::SHADOW ||
196 property == SubtitleContentProperty::EFFECT_COLOUR ||
197 property == FFmpegContentProperty::SUBTITLE_STREAM ||
198 property == VideoContentProperty::COLOUR_CONVERSION
201 _have_valid_pieces = false;
205 property == SubtitleContentProperty::LINE_SPACING ||
206 property == SubtitleContentProperty::OUTLINE_WIDTH ||
207 property == SubtitleContentProperty::Y_SCALE ||
208 property == SubtitleContentProperty::FADE_IN ||
209 property == SubtitleContentProperty::FADE_OUT ||
210 property == ContentProperty::VIDEO_FRAME_RATE ||
211 property == SubtitleContentProperty::USE ||
212 property == SubtitleContentProperty::X_OFFSET ||
213 property == SubtitleContentProperty::Y_OFFSET ||
214 property == SubtitleContentProperty::X_SCALE ||
215 property == SubtitleContentProperty::FONTS ||
216 property == VideoContentProperty::CROP ||
217 property == VideoContentProperty::SCALE ||
218 property == VideoContentProperty::FADE_IN ||
219 property == VideoContentProperty::FADE_OUT
227 Player::set_video_container_size (dcp::Size s)
229 if (s == _video_container_size) {
233 _video_container_size = s;
235 _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
236 _black_image->make_black ();
242 Player::playlist_changed ()
244 _have_valid_pieces = false;
249 Player::film_changed (Film::Property p)
251 /* Here we should notice Film properties that affect our output, and
252 alert listeners that our output now would be different to how it was
253 last time we were run.
256 if (p == Film::CONTAINER) {
258 } else if (p == Film::VIDEO_FRAME_RATE) {
259 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
260 so we need new pieces here.
262 _have_valid_pieces = false;
264 } else if (p == Film::AUDIO_PROCESSOR) {
265 if (_film->audio_processor ()) {
266 _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
272 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
274 list<PositionImage> all;
276 for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
281 /* We will scale the subtitle up to fit _video_container_size */
282 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
284 /* Then we need a corrective translation, consisting of two parts:
286 * 1. that which is the result of the scaling of the subtitle by _video_container_size; this will be
287 * rect.x * _video_container_size.width and rect.y * _video_container_size.height.
289 * 2. that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
290 * (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
291 * (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
293 * Combining these two translations gives these expressions.
300 dcp::YUV_TO_RGB_REC601,
301 i->image->pixel_format (),
306 lrint (_video_container_size.width * i->rectangle.x),
307 lrint (_video_container_size.height * i->rectangle.y)
316 shared_ptr<PlayerVideo>
317 Player::black_player_video_frame () const
319 return shared_ptr<PlayerVideo> (
321 shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
324 _video_container_size,
325 _video_container_size,
328 PresetColourConversion::all().front().conversion
334 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
336 DCPTime s = t - piece->content->position ();
337 s = min (piece->content->length_after_trim(), s);
338 s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
340 /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
341 then convert that ContentTime to frames at the content's rate. However this fails for
342 situations like content at 29.9978733fps, DCP at 30fps. The accuracy of the Time type is not
343 enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
345 Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
347 return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
351 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
353 /* See comment in dcp_to_content_video */
354 DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
355 return max (DCPTime (), d + piece->content->position ());
359 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
361 DCPTime s = t - piece->content->position ();
362 s = min (piece->content->length_after_trim(), s);
363 /* See notes in dcp_to_content_video */
364 return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
368 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
370 /* See comment in dcp_to_content_video */
371 DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
372 return max (DCPTime (), d + piece->content->position ());
376 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
378 DCPTime s = t - piece->content->position ();
379 s = min (piece->content->length_after_trim(), s);
380 return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
384 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
386 return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
389 list<shared_ptr<Font> >
390 Player::get_subtitle_fonts ()
392 if (!_have_valid_pieces) {
396 list<shared_ptr<Font> > fonts;
397 BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
398 if (p->content->subtitle) {
399 /* XXX: things may go wrong if there are duplicate font IDs
400 with different font files.
402 list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
403 copy (f.begin(), f.end(), back_inserter (fonts));
410 /** Set this player never to produce any video data */
412 Player::set_ignore_video ()
414 _ignore_video = true;
417 /** Set this player never to produce any audio data */
419 Player::set_ignore_audio ()
421 _ignore_audio = true;
424 /** Set whether or not this player should always burn text subtitles into the image,
425 * regardless of the content settings.
426 * @param burn true to always burn subtitles, false to obey content settings.
429 Player::set_always_burn_subtitles (bool burn)
431 _always_burn_subtitles = burn;
438 _have_valid_pieces = false;
442 Player::set_play_referenced ()
444 _play_referenced = true;
445 _have_valid_pieces = false;
448 list<ReferencedReelAsset>
449 Player::get_reel_assets ()
451 list<ReferencedReelAsset> a;
453 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
454 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
459 scoped_ptr<DCPDecoder> decoder;
461 decoder.reset (new DCPDecoder (j, _film->log()));
467 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
469 DCPOMATIC_ASSERT (j->video_frame_rate ());
470 double const cfr = j->video_frame_rate().get();
471 Frame const trim_start = j->trim_start().frames_round (cfr);
472 Frame const trim_end = j->trim_end().frames_round (cfr);
473 int const ffr = _film->video_frame_rate ();
475 DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
476 if (j->reference_video ()) {
477 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
478 DCPOMATIC_ASSERT (ra);
479 ra->set_entry_point (ra->entry_point() + trim_start);
480 ra->set_duration (ra->duration() - trim_start - trim_end);
482 ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
486 if (j->reference_audio ()) {
487 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
488 DCPOMATIC_ASSERT (ra);
489 ra->set_entry_point (ra->entry_point() + trim_start);
490 ra->set_duration (ra->duration() - trim_start - trim_end);
492 ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
496 if (j->reference_subtitle ()) {
497 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
498 DCPOMATIC_ASSERT (ra);
499 ra->set_entry_point (ra->entry_point() + trim_start);
500 ra->set_duration (ra->duration() - trim_start - trim_end);
502 ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
506 /* Assume that main picture duration is the length of the reel */
507 offset += k->main_picture()->duration ();
514 list<shared_ptr<Piece> >
515 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
517 if (!_have_valid_pieces) {
521 list<shared_ptr<Piece> > overlaps;
522 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
523 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
524 overlaps.push_back (i);
534 if (!_have_valid_pieces) {
538 shared_ptr<Piece> earliest;
539 DCPTime earliest_content;
541 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
543 DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
544 if (!earliest || t < earliest_content) {
545 earliest_content = t;
552 /* No more content; fill up with silent black */
553 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
554 if (_last_video_time) {
555 remaining_video.from = _last_video_time.get() + one_video_frame();
557 fill_video (remaining_video);
558 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
559 if (_last_audio_time) {
560 remaining_audio.from = _last_audio_time.get();
562 fill_audio (remaining_audio);
566 earliest->done = earliest->decoder->pass ();
567 if (earliest->done && earliest->content->audio) {
568 /* Flush the Player audio system for this piece */
569 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
570 audio_flush (earliest, i);
574 /* Emit any audio that is ready */
576 DCPTime pull_from = _playlist->length ();
577 for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
578 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
579 pull_from = i->second.last_push_end;
583 list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
584 for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
585 DCPOMATIC_ASSERT (!_last_audio_time || i->second >= _last_audio_time.get());
586 if (_last_audio_time) {
587 fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
589 Audio (i->first, i->second);
590 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
597 Player::video (weak_ptr<Piece> wp, ContentVideo video)
599 shared_ptr<Piece> piece = wp.lock ();
604 FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
605 if (frc.skip && (video.frame % 2) == 1) {
609 /* Time and period of the frame we will emit */
610 DCPTime const time = content_video_to_dcp (piece, video.frame);
611 DCPTimePeriod const period (time, time + one_video_frame());
613 /* Discard if it's outside the content's period */
614 if (time < piece->content->position() || time >= piece->content->end()) {
618 /* Get any subtitles */
620 optional<PositionImage> subtitles;
622 for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
624 if (!i->second.overlap (period)) {
628 list<PositionImage> sub_images;
630 /* Image subtitles */
631 list<PositionImage> c = transform_image_subtitles (i->first.image);
632 copy (c.begin(), c.end(), back_inserter (sub_images));
634 /* Text subtitles (rendered to an image) */
635 if (!i->first.text.empty ()) {
636 list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
637 copy (s.begin (), s.end (), back_inserter (sub_images));
640 if (!sub_images.empty ()) {
641 subtitles = merge (sub_images);
647 if (_last_video_time) {
648 fill_video (DCPTimePeriod (_last_video_time.get() + one_video_frame(), time));
654 piece->content->video->crop (),
655 piece->content->video->fade (video.frame),
656 piece->content->video->scale().size (
657 piece->content->video, _video_container_size, _film->frame_size ()
659 _video_container_size,
662 piece->content->video->colour_conversion ()
667 _last_video->set_subtitle (subtitles.get ());
670 _last_video_time = time;
672 Video (_last_video, *_last_video_time);
674 /* Discard any subtitles we no longer need */
676 for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
677 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
680 if (i->second.to < time) {
681 _subtitles.erase (i);
689 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
691 shared_ptr<AudioContent> content = piece->content->audio;
692 DCPOMATIC_ASSERT (content);
694 shared_ptr<Resampler> r = resampler (content, stream, false);
699 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
700 ContentAudio content_audio;
701 content_audio.audio = ro.first;
702 content_audio.frame = ro.second;
704 /* Compute time in the DCP */
705 DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
707 audio_transform (content, stream, content_audio, time);
710 /** Do our common processing on some audio */
712 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
716 if (content->gain() != 0) {
717 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
718 gain->apply_gain (content->gain ());
719 content_audio.audio = gain;
724 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
725 dcp_mapped->make_silent ();
727 AudioMapping map = stream->mapping ();
728 for (int i = 0; i < map.input_channels(); ++i) {
729 for (int j = 0; j < dcp_mapped->channels(); ++j) {
730 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
731 dcp_mapped->accumulate_channel (
732 content_audio.audio.get(),
734 static_cast<dcp::Channel> (j),
735 map.get (i, static_cast<dcp::Channel> (j))
741 content_audio.audio = dcp_mapped;
745 if (_audio_processor) {
746 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
751 _audio_merger.push (content_audio.audio, time);
752 DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
753 _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
757 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
759 shared_ptr<Piece> piece = wp.lock ();
764 shared_ptr<AudioContent> content = piece->content->audio;
765 DCPOMATIC_ASSERT (content);
768 if (stream->frame_rate() != content->resampled_frame_rate()) {
769 shared_ptr<Resampler> r = resampler (content, stream, true);
770 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
771 content_audio.audio = ro.first;
772 content_audio.frame = ro.second;
775 /* Compute time in the DCP */
776 DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
777 /* And the end of this block in the DCP */
778 DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
780 /* Remove anything that comes before the start or after the end of the content */
781 if (time < piece->content->position()) {
782 DCPTime const discard_time = piece->content->position() - time;
783 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
784 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
785 if (remaining_frames <= 0) {
786 /* This audio is entirely discarded */
789 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
790 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
791 content_audio.audio = cut;
792 time += discard_time;
793 } else if (time > piece->content->end()) {
796 } else if (end > piece->content->end()) {
797 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
798 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
799 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
800 content_audio.audio = cut;
803 audio_transform (content, stream, content_audio, time);
807 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
809 shared_ptr<Piece> piece = wp.lock ();
814 /* Apply content's subtitle offsets */
815 subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
816 subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
818 /* Apply content's subtitle scale */
819 subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
820 subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
822 /* Apply a corrective translation to keep the subtitle centred after that scale */
823 subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
824 subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
827 ps.image.push_back (subtitle.sub);
828 DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
830 if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
831 _subtitles.push_back (make_pair (ps, period));
833 Subtitle (ps, period);
838 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
840 shared_ptr<Piece> piece = wp.lock ();
846 DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
848 BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
849 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
850 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
851 float const xs = piece->content->subtitle->x_scale();
852 float const ys = piece->content->subtitle->y_scale();
853 float size = s.size();
855 /* Adjust size to express the common part of the scaling;
856 e.g. if xs = ys = 0.5 we scale size by 2.
858 if (xs > 1e-5 && ys > 1e-5) {
859 size *= 1 / min (1 / xs, 1 / ys);
863 /* Then express aspect ratio changes */
864 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
865 s.set_aspect_adjust (xs / ys);
868 s.set_in (dcp::Time(period.from.seconds(), 1000));
869 s.set_out (dcp::Time(period.to.seconds(), 1000));
870 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
871 ps.add_fonts (piece->content->subtitle->fonts ());
874 if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
875 _subtitles.push_back (make_pair (ps, period));
877 Subtitle (ps, period);
882 Player::seek (DCPTime time, bool accurate)
884 if (_audio_processor) {
885 _audio_processor->flush ();
888 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
889 if (i->content->position() <= time && time < i->content->end()) {
890 i->decoder->seek (dcp_to_content_time (i, time), accurate);
896 _last_video_time = time - one_video_frame ();
897 _last_audio_time = time;
899 _last_video_time = optional<DCPTime> ();
900 _last_audio_time = optional<DCPTime> ();
904 shared_ptr<Resampler>
905 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
907 ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
908 if (i != _resamplers.end ()) {
913 return shared_ptr<Resampler> ();
917 "Creating new resampler from %1 to %2 with %3 channels",
918 stream->frame_rate(),
919 content->resampled_frame_rate(),
923 shared_ptr<Resampler> r (
924 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
927 _resamplers[make_pair(content, stream)] = r;
932 Player::fill_video (DCPTimePeriod period)
934 /* XXX: this may not work for 3D */
935 BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
936 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
937 if (_playlist->video_content_at(j) && _last_video) {
938 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
940 Video (black_player_video_frame(), j);
947 Player::fill_audio (DCPTimePeriod period)
949 BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
952 DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
953 Frame const samples = block.frames_round(_film->audio_frame_rate());
955 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
956 silence->make_silent ();
965 Player::one_video_frame () const
967 return DCPTime::from_frames (1, _film->video_frame_rate ());