2 Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
4 This file is part of DCP-o-matic.
6 DCP-o-matic is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2 of the License, or
9 (at your option) any later version.
11 DCP-o-matic is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
28 #include "raw_image_proxy.h"
31 #include "render_subtitles.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
51 #include <dcp/reel_sound_asset.h>
52 #include <dcp/reel_subtitle_asset.h>
53 #include <dcp/reel_picture_asset.h>
54 #include <boost/foreach.hpp>
61 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
73 using boost::shared_ptr;
74 using boost::weak_ptr;
75 using boost::dynamic_pointer_cast;
76 using boost::optional;
77 using boost::scoped_ptr;
80 has_video (Content* c)
82 return static_cast<bool>(c->video);
86 has_audio (Content* c)
88 return static_cast<bool>(c->audio);
92 has_subtitle (Content* c)
94 return static_cast<bool>(c->subtitle);
97 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
99 , _playlist (playlist)
100 , _have_valid_pieces (false)
101 , _ignore_video (false)
102 , _ignore_audio (false)
103 , _always_burn_subtitles (false)
105 , _play_referenced (false)
107 _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
108 _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
109 _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
110 set_video_container_size (_film->frame_size ());
112 film_changed (Film::AUDIO_PROCESSOR);
116 Player::setup_pieces ()
120 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
122 if (!i->paths_valid ()) {
126 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
127 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
130 /* Not something that we can decode; e.g. Atmos content */
134 if (decoder->video && _ignore_video) {
135 decoder->video->set_ignore ();
138 if (decoder->audio && _ignore_audio) {
139 decoder->audio->set_ignore ();
142 _pieces.push_back (shared_ptr<Piece> (new Piece (i, decoder, frc)));
145 _have_valid_pieces = true;
149 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
151 shared_ptr<Content> c = w.lock ();
157 property == ContentProperty::POSITION ||
158 property == ContentProperty::LENGTH ||
159 property == ContentProperty::TRIM_START ||
160 property == ContentProperty::TRIM_END ||
161 property == ContentProperty::PATH ||
162 property == VideoContentProperty::FRAME_TYPE ||
163 property == DCPContentProperty::CAN_BE_PLAYED ||
164 property == SubtitleContentProperty::COLOUR ||
165 property == SubtitleContentProperty::OUTLINE ||
166 property == SubtitleContentProperty::OUTLINE_COLOUR ||
167 property == FFmpegContentProperty::SUBTITLE_STREAM
170 _have_valid_pieces = false;
174 property == ContentProperty::VIDEO_FRAME_RATE ||
175 property == SubtitleContentProperty::USE ||
176 property == SubtitleContentProperty::X_OFFSET ||
177 property == SubtitleContentProperty::Y_OFFSET ||
178 property == SubtitleContentProperty::X_SCALE ||
179 property == SubtitleContentProperty::Y_SCALE ||
180 property == SubtitleContentProperty::FONTS ||
181 property == VideoContentProperty::CROP ||
182 property == VideoContentProperty::SCALE ||
183 property == VideoContentProperty::FADE_IN ||
184 property == VideoContentProperty::FADE_OUT ||
185 property == VideoContentProperty::COLOUR_CONVERSION
193 Player::set_video_container_size (dcp::Size s)
195 _video_container_size = s;
197 _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
198 _black_image->make_black ();
202 Player::playlist_changed ()
204 _have_valid_pieces = false;
209 Player::film_changed (Film::Property p)
211 /* Here we should notice Film properties that affect our output, and
212 alert listeners that our output now would be different to how it was
213 last time we were run.
216 if (p == Film::CONTAINER) {
218 } else if (p == Film::VIDEO_FRAME_RATE) {
219 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
220 so we need new pieces here.
222 _have_valid_pieces = false;
224 } else if (p == Film::AUDIO_PROCESSOR) {
225 if (_film->audio_processor ()) {
226 _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
232 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
234 list<PositionImage> all;
236 for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
241 /* We will scale the subtitle up to fit _video_container_size */
242 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
244 /* Then we need a corrective translation, consisting of two parts:
246 * 1. that which is the result of the scaling of the subtitle by _video_container_size; this will be
247 * rect.x * _video_container_size.width and rect.y * _video_container_size.height.
249 * 2. that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
250 * (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
251 * (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
253 * Combining these two translations gives these expressions.
260 dcp::YUV_TO_RGB_REC601,
261 i->image->pixel_format (),
266 lrint (_video_container_size.width * i->rectangle.x),
267 lrint (_video_container_size.height * i->rectangle.y)
276 shared_ptr<PlayerVideo>
277 Player::black_player_video_frame (DCPTime time) const
279 return shared_ptr<PlayerVideo> (
281 shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
285 _video_container_size,
286 _video_container_size,
289 PresetColourConversion::all().front().conversion
294 /** @return All PlayerVideos at the given time. There may be none if the content
295 * at `time' is a DCP which we are passing through (i.e. referring to by reference)
296 * or 2 if we have 3D.
298 list<shared_ptr<PlayerVideo> >
299 Player::get_video (DCPTime time, bool accurate)
301 if (!_have_valid_pieces) {
305 /* Find subtitles for possible burn-in */
307 PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true, accurate);
309 list<PositionImage> sub_images;
311 /* Image subtitles */
312 list<PositionImage> c = transform_image_subtitles (ps.image);
313 copy (c.begin(), c.end(), back_inserter (sub_images));
315 /* Text subtitles (rendered to an image) */
316 if (!ps.text.empty ()) {
317 list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
318 copy (s.begin (), s.end (), back_inserter (sub_images));
321 optional<PositionImage> subtitles;
322 if (!sub_images.empty ()) {
323 subtitles = merge (sub_images);
326 /* Find pieces containing video which is happening now */
328 list<shared_ptr<Piece> > ov = overlaps (
330 time + DCPTime::from_frames (1, _film->video_frame_rate ()),
334 list<shared_ptr<PlayerVideo> > pvf;
337 /* No video content at this time */
338 pvf.push_back (black_player_video_frame (time));
340 /* Some video content at this time */
341 shared_ptr<Piece> last = *(ov.rbegin ());
342 VideoFrameType const last_type = last->content->video->frame_type ();
344 /* Get video from appropriate piece(s) */
345 BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
347 shared_ptr<VideoDecoder> decoder = piece->decoder->video;
348 DCPOMATIC_ASSERT (decoder);
350 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (piece->content);
351 if (dcp_content && dcp_content->reference_video () && !_play_referenced) {
356 /* always use the last video */
358 /* with a corresponding L/R eye if appropriate */
359 (last_type == VIDEO_FRAME_TYPE_3D_LEFT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
360 (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
363 /* We want to use this piece */
364 list<ContentVideo> content_video = decoder->get (dcp_to_content_video (piece, time), accurate);
365 if (content_video.empty ()) {
366 pvf.push_back (black_player_video_frame (time));
368 dcp::Size image_size = piece->content->video->scale().size (
369 piece->content->video, _video_container_size, _film->frame_size ()
372 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
374 shared_ptr<PlayerVideo> (
377 content_video_to_dcp (piece, i->frame),
378 piece->content->video->crop (),
379 piece->content->video->fade (i->frame),
381 _video_container_size,
384 piece->content->video->colour_conversion ()
391 /* Discard unused video */
392 decoder->get (dcp_to_content_video (piece, time), accurate);
398 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
399 p->set_subtitle (subtitles.get ());
406 /** @return Audio data or 0 if the only audio data here is referenced DCP data */
407 shared_ptr<AudioBuffers>
408 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
410 if (!_have_valid_pieces) {
414 Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
416 shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
417 audio->make_silent ();
419 list<shared_ptr<Piece> > ov = overlaps (time, time + length, has_audio);
424 bool all_referenced = true;
425 BOOST_FOREACH (shared_ptr<Piece> i, ov) {
426 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (i->content);
427 if (i->content->audio && (!dcp_content || !dcp_content->reference_audio ())) {
428 /* There is audio content which is not from a DCP or not set to be referenced */
429 all_referenced = false;
433 if (all_referenced && !_play_referenced) {
434 return shared_ptr<AudioBuffers> ();
437 BOOST_FOREACH (shared_ptr<Piece> i, ov) {
439 DCPOMATIC_ASSERT (i->content->audio);
440 shared_ptr<AudioDecoder> decoder = i->decoder->audio;
441 DCPOMATIC_ASSERT (decoder);
443 /* The time that we should request from the content */
444 DCPTime request = time - DCPTime::from_seconds (i->content->audio->delay() / 1000.0);
445 Frame request_frames = length_frames;
447 if (request < DCPTime ()) {
448 /* We went off the start of the content, so we will need to offset
449 the stuff we get back.
452 request_frames += request.frames_round (_film->audio_frame_rate ());
453 if (request_frames < 0) {
456 request = DCPTime ();
459 Frame const content_frame = dcp_to_resampled_audio (i, request);
461 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams ()) {
463 if (j->channels() == 0) {
464 /* Some content (e.g. DCPs) can have streams with no channels */
468 /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
469 ContentAudio all = decoder->get (j, content_frame, request_frames, accurate);
472 if (i->content->audio->gain() != 0) {
473 shared_ptr<AudioBuffers> gain (new AudioBuffers (all.audio));
474 gain->apply_gain (i->content->audio->gain ());
479 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all.audio->frames()));
480 dcp_mapped->make_silent ();
481 AudioMapping map = j->mapping ();
482 for (int i = 0; i < map.input_channels(); ++i) {
483 for (int j = 0; j < _film->audio_channels(); ++j) {
484 if (map.get (i, j) > 0) {
485 dcp_mapped->accumulate_channel (
495 if (_audio_processor) {
496 dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ());
499 all.audio = dcp_mapped;
501 audio->accumulate_frames (
503 content_frame - all.frame,
504 offset.frames_round (_film->audio_frame_rate()),
505 min (Frame (all.audio->frames()), request_frames)
514 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
516 DCPTime s = t - piece->content->position ();
517 s = min (piece->content->length_after_trim(), s);
518 s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
520 /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
521 then convert that ContentTime to frames at the content's rate. However this fails for
522 situations like content at 29.9978733fps, DCP at 30fps. The accuracy of the Time type is not
523 enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
525 Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
527 return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
531 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
533 /* See comment in dcp_to_content_video */
534 DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
535 return max (DCPTime (), d + piece->content->position ());
539 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
541 DCPTime s = t - piece->content->position ();
542 s = min (piece->content->length_after_trim(), s);
543 /* See notes in dcp_to_content_video */
544 return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
548 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
550 DCPTime s = t - piece->content->position ();
551 s = min (piece->content->length_after_trim(), s);
552 return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
556 Player::content_subtitle_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
558 return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
561 /** @param burnt true to return only subtitles to be burnt, false to return only
562 * subtitles that should not be burnt. This parameter will be ignored if
563 * _always_burn_subtitles is true; in this case, all subtitles will be returned.
566 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt, bool accurate)
568 list<shared_ptr<Piece> > subs = overlaps (time, time + length, has_subtitle);
570 PlayerSubtitles ps (time, length);
572 for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
573 if (!(*j)->content->subtitle->use () || (!_always_burn_subtitles && (burnt != (*j)->content->subtitle->burn ()))) {
577 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> ((*j)->content);
578 if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) {
582 shared_ptr<SubtitleDecoder> subtitle_decoder = (*j)->decoder->subtitle;
583 ContentTime const from = dcp_to_content_subtitle (*j, time);
584 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
585 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
587 list<ContentImageSubtitle> image = subtitle_decoder->get_image (ContentTimePeriod (from, to), starting, accurate);
588 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
590 /* Apply content's subtitle offsets */
591 i->sub.rectangle.x += (*j)->content->subtitle->x_offset ();
592 i->sub.rectangle.y += (*j)->content->subtitle->y_offset ();
594 /* Apply content's subtitle scale */
595 i->sub.rectangle.width *= (*j)->content->subtitle->x_scale ();
596 i->sub.rectangle.height *= (*j)->content->subtitle->y_scale ();
598 /* Apply a corrective translation to keep the subtitle centred after that scale */
599 i->sub.rectangle.x -= i->sub.rectangle.width * ((*j)->content->subtitle->x_scale() - 1);
600 i->sub.rectangle.y -= i->sub.rectangle.height * ((*j)->content->subtitle->y_scale() - 1);
602 ps.image.push_back (i->sub);
605 list<ContentTextSubtitle> text = subtitle_decoder->get_text (ContentTimePeriod (from, to), starting, accurate);
606 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
607 BOOST_FOREACH (dcp::SubtitleString s, ts.subs) {
608 s.set_h_position (s.h_position() + (*j)->content->subtitle->x_offset ());
609 s.set_v_position (s.v_position() + (*j)->content->subtitle->y_offset ());
610 float const xs = (*j)->content->subtitle->x_scale();
611 float const ys = (*j)->content->subtitle->y_scale();
612 float size = s.size();
614 /* Adjust size to express the common part of the scaling;
615 e.g. if xs = ys = 0.5 we scale size by 2.
617 if (xs > 1e-5 && ys > 1e-5) {
618 size *= 1 / min (1 / xs, 1 / ys);
622 /* Then express aspect ratio changes */
623 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
624 s.set_aspect_adjust (xs / ys);
626 s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds(), 1000));
627 s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds(), 1000));
628 ps.text.push_back (s);
629 ps.add_fonts ((*j)->content->subtitle->fonts ());
637 list<shared_ptr<Font> >
638 Player::get_subtitle_fonts ()
640 if (!_have_valid_pieces) {
644 list<shared_ptr<Font> > fonts;
645 BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
646 if (p->content->subtitle) {
647 /* XXX: things may go wrong if there are duplicate font IDs
648 with different font files.
650 list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
651 copy (f.begin(), f.end(), back_inserter (fonts));
658 /** Set this player never to produce any video data */
660 Player::set_ignore_video ()
662 _ignore_video = true;
665 /** Set this player never to produce any audio data */
667 Player::set_ignore_audio ()
669 _ignore_audio = true;
672 /** Set whether or not this player should always burn text subtitles into the image,
673 * regardless of the content settings.
674 * @param burn true to always burn subtitles, false to obey content settings.
677 Player::set_always_burn_subtitles (bool burn)
679 _always_burn_subtitles = burn;
686 _have_valid_pieces = false;
690 Player::set_play_referenced ()
692 _play_referenced = true;
693 _have_valid_pieces = false;
696 list<ReferencedReelAsset>
697 Player::get_reel_assets ()
699 list<ReferencedReelAsset> a;
701 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
702 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
707 scoped_ptr<DCPDecoder> decoder;
709 decoder.reset (new DCPDecoder (j, _film->log(), false));
715 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
716 DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
717 if (j->reference_video ()) {
719 ReferencedReelAsset (
721 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_picture()->duration(), _film->video_frame_rate()))
726 if (j->reference_audio ()) {
728 ReferencedReelAsset (
730 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_sound()->duration(), _film->video_frame_rate()))
735 if (j->reference_subtitle ()) {
736 DCPOMATIC_ASSERT (k->main_subtitle ());
738 ReferencedReelAsset (
740 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_subtitle()->duration(), _film->video_frame_rate()))
745 /* Assume that main picture duration is the length of the reel */
746 offset += k->main_picture()->duration ();
753 list<shared_ptr<Piece> >
754 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
756 if (!_have_valid_pieces) {
760 list<shared_ptr<Piece> > overlaps;
761 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
762 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
763 overlaps.push_back (i);