2 Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
4 This file is part of DCP-o-matic.
6 DCP-o-matic is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2 of the License, or
9 (at your option) any later version.
11 DCP-o-matic is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with DCP-o-matic. If not, see <http://www.gnu.org/licenses/>.
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
28 #include "raw_image_proxy.h"
31 #include "render_subtitles.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
51 #include <dcp/reel_sound_asset.h>
52 #include <dcp/reel_subtitle_asset.h>
53 #include <dcp/reel_picture_asset.h>
54 #include <boost/foreach.hpp>
55 #include <boost/make_shared.hpp>
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
74 using boost::shared_ptr;
75 using boost::make_shared;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
82 has_video (Content* c)
84 return static_cast<bool>(c->video);
88 has_audio (Content* c)
90 return static_cast<bool>(c->audio);
94 has_subtitle (Content* c)
96 return static_cast<bool>(c->subtitle);
99 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
101 , _playlist (playlist)
102 , _have_valid_pieces (false)
103 , _ignore_video (false)
104 , _ignore_audio (false)
105 , _always_burn_subtitles (false)
107 , _play_referenced (false)
109 _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
110 _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
111 _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
112 set_video_container_size (_film->frame_size ());
114 film_changed (Film::AUDIO_PROCESSOR);
118 Player::setup_pieces ()
122 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
124 if (!i->paths_valid ()) {
128 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
129 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
132 /* Not something that we can decode; e.g. Atmos content */
136 if (decoder->video && _ignore_video) {
137 decoder->video->set_ignore ();
140 if (decoder->audio && _ignore_audio) {
141 decoder->audio->set_ignore ();
144 _pieces.push_back (make_shared<Piece> (i, decoder, frc));
147 _have_valid_pieces = true;
151 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
153 shared_ptr<Content> c = w.lock ();
159 property == ContentProperty::POSITION ||
160 property == ContentProperty::LENGTH ||
161 property == ContentProperty::TRIM_START ||
162 property == ContentProperty::TRIM_END ||
163 property == ContentProperty::PATH ||
164 property == VideoContentProperty::FRAME_TYPE ||
165 property == DCPContentProperty::CAN_BE_PLAYED ||
166 property == SubtitleContentProperty::COLOUR ||
167 property == SubtitleContentProperty::OUTLINE ||
168 property == SubtitleContentProperty::OUTLINE_COLOUR ||
169 property == FFmpegContentProperty::SUBTITLE_STREAM
172 _have_valid_pieces = false;
176 property == ContentProperty::VIDEO_FRAME_RATE ||
177 property == SubtitleContentProperty::USE ||
178 property == SubtitleContentProperty::X_OFFSET ||
179 property == SubtitleContentProperty::Y_OFFSET ||
180 property == SubtitleContentProperty::X_SCALE ||
181 property == SubtitleContentProperty::Y_SCALE ||
182 property == SubtitleContentProperty::FONTS ||
183 property == VideoContentProperty::CROP ||
184 property == VideoContentProperty::SCALE ||
185 property == VideoContentProperty::FADE_IN ||
186 property == VideoContentProperty::FADE_OUT ||
187 property == VideoContentProperty::COLOUR_CONVERSION
195 Player::set_video_container_size (dcp::Size s)
197 _video_container_size = s;
199 _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
200 _black_image->make_black ();
204 Player::playlist_changed ()
206 _have_valid_pieces = false;
211 Player::film_changed (Film::Property p)
213 /* Here we should notice Film properties that affect our output, and
214 alert listeners that our output now would be different to how it was
215 last time we were run.
218 if (p == Film::CONTAINER) {
220 } else if (p == Film::VIDEO_FRAME_RATE) {
221 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
222 so we need new pieces here.
224 _have_valid_pieces = false;
226 } else if (p == Film::AUDIO_PROCESSOR) {
227 if (_film->audio_processor ()) {
228 _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
234 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
236 list<PositionImage> all;
238 for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
243 /* We will scale the subtitle up to fit _video_container_size */
244 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
246 /* Then we need a corrective translation, consisting of two parts:
248 * 1. that which is the result of the scaling of the subtitle by _video_container_size; this will be
249 * rect.x * _video_container_size.width and rect.y * _video_container_size.height.
251 * 2. that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
252 * (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
253 * (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
255 * Combining these two translations gives these expressions.
262 dcp::YUV_TO_RGB_REC601,
263 i->image->pixel_format (),
268 lrint (_video_container_size.width * i->rectangle.x),
269 lrint (_video_container_size.height * i->rectangle.y)
278 shared_ptr<PlayerVideo>
279 Player::black_player_video_frame (DCPTime time) const
281 return shared_ptr<PlayerVideo> (
283 make_shared<RawImageProxy> (_black_image),
287 _video_container_size,
288 _video_container_size,
291 PresetColourConversion::all().front().conversion
296 /** @return All PlayerVideos at the given time. There may be none if the content
297 * at `time' is a DCP which we are passing through (i.e. referring to by reference)
298 * or 2 if we have 3D.
300 list<shared_ptr<PlayerVideo> >
301 Player::get_video (DCPTime time, bool accurate)
303 if (!_have_valid_pieces) {
307 /* Find subtitles for possible burn-in */
309 PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true, accurate);
311 list<PositionImage> sub_images;
313 /* Image subtitles */
314 list<PositionImage> c = transform_image_subtitles (ps.image);
315 copy (c.begin(), c.end(), back_inserter (sub_images));
317 /* Text subtitles (rendered to an image) */
318 if (!ps.text.empty ()) {
319 list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
320 copy (s.begin (), s.end (), back_inserter (sub_images));
323 optional<PositionImage> subtitles;
324 if (!sub_images.empty ()) {
325 subtitles = merge (sub_images);
328 /* Find pieces containing video which is happening now */
330 list<shared_ptr<Piece> > ov = overlaps (
332 time + DCPTime::from_frames (1, _film->video_frame_rate ()),
336 list<shared_ptr<PlayerVideo> > pvf;
339 /* No video content at this time */
340 pvf.push_back (black_player_video_frame (time));
342 /* Some video content at this time */
343 shared_ptr<Piece> last = *(ov.rbegin ());
344 VideoFrameType const last_type = last->content->video->frame_type ();
346 /* Get video from appropriate piece(s) */
347 BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
349 shared_ptr<VideoDecoder> decoder = piece->decoder->video;
350 DCPOMATIC_ASSERT (decoder);
352 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (piece->content);
353 if (dcp_content && dcp_content->reference_video () && !_play_referenced) {
358 /* always use the last video */
360 /* with a corresponding L/R eye if appropriate */
361 (last_type == VIDEO_FRAME_TYPE_3D_LEFT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
362 (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
365 /* We want to use this piece */
366 list<ContentVideo> content_video = decoder->get (dcp_to_content_video (piece, time), accurate);
367 if (content_video.empty ()) {
368 pvf.push_back (black_player_video_frame (time));
370 dcp::Size image_size = piece->content->video->scale().size (
371 piece->content->video, _video_container_size, _film->frame_size ()
374 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
376 shared_ptr<PlayerVideo> (
380 piece->content->video->crop (),
381 piece->content->video->fade (i->frame.index()),
383 _video_container_size,
386 piece->content->video->colour_conversion ()
393 /* Discard unused video */
394 decoder->get (dcp_to_content_video (piece, time), accurate);
400 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
401 p->set_subtitle (subtitles.get ());
408 /** @return Audio data or 0 if the only audio data here is referenced DCP data */
409 shared_ptr<AudioBuffers>
410 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
412 if (!_have_valid_pieces) {
416 Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
418 shared_ptr<AudioBuffers> audio = make_shared<AudioBuffers> (_film->audio_channels(), length_frames);
419 audio->make_silent ();
421 list<shared_ptr<Piece> > ov = overlaps (time, time + length, has_audio);
426 bool all_referenced = true;
427 BOOST_FOREACH (shared_ptr<Piece> i, ov) {
428 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (i->content);
429 if (i->content->audio && (!dcp_content || !dcp_content->reference_audio ())) {
430 /* There is audio content which is not from a DCP or not set to be referenced */
431 all_referenced = false;
435 if (all_referenced && !_play_referenced) {
436 return shared_ptr<AudioBuffers> ();
439 BOOST_FOREACH (shared_ptr<Piece> i, ov) {
441 DCPOMATIC_ASSERT (i->content->audio);
442 shared_ptr<AudioDecoder> decoder = i->decoder->audio;
443 DCPOMATIC_ASSERT (decoder);
445 /* The time that we should request from the content */
446 DCPTime request = time - DCPTime::from_seconds (i->content->audio->delay() / 1000.0);
447 Frame request_frames = length_frames;
449 if (request < DCPTime ()) {
450 /* We went off the start of the content, so we will need to offset
451 the stuff we get back.
454 request_frames += request.frames_round (_film->audio_frame_rate ());
455 if (request_frames < 0) {
458 request = DCPTime ();
461 Frame const content_frame = dcp_to_resampled_audio (i, request);
463 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams ()) {
465 if (j->channels() == 0) {
466 /* Some content (e.g. DCPs) can have streams with no channels */
470 /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
471 ContentAudio all = decoder->get (j, content_frame, request_frames, accurate);
474 if (i->content->audio->gain() != 0) {
475 shared_ptr<AudioBuffers> gain = make_shared<AudioBuffers> (all.audio);
476 gain->apply_gain (i->content->audio->gain ());
481 shared_ptr<AudioBuffers> dcp_mapped = make_shared<AudioBuffers> (_film->audio_channels(), all.audio->frames());
482 dcp_mapped->make_silent ();
483 AudioMapping map = j->mapping ();
484 for (int i = 0; i < map.input_channels(); ++i) {
485 for (int j = 0; j < _film->audio_channels(); ++j) {
486 if (map.get (i, j) > 0) {
487 dcp_mapped->accumulate_channel (
497 if (_audio_processor) {
498 dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ());
501 all.audio = dcp_mapped;
503 audio->accumulate_frames (
505 content_frame - all.frame,
506 offset.frames_round (_film->audio_frame_rate()),
507 min (Frame (all.audio->frames()), request_frames)
516 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
518 DCPTime s = t - piece->content->position ();
519 s = min (piece->content->length_after_trim(), s);
520 s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
522 /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
523 then convert that ContentTime to frames at the content's rate. However this fails for
524 situations like content at 29.9978733fps, DCP at 30fps. The accuracy of the Time type is not
525 enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
527 Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
529 return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
533 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
535 /* See comment in dcp_to_content_video */
536 DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
537 return max (DCPTime (), d + piece->content->position ());
541 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
543 DCPTime s = t - piece->content->position ();
544 s = min (piece->content->length_after_trim(), s);
545 /* See notes in dcp_to_content_video */
546 return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
550 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
552 DCPTime s = t - piece->content->position ();
553 s = min (piece->content->length_after_trim(), s);
554 return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
558 Player::content_subtitle_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
560 return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
563 /** @param burnt true to return only subtitles to be burnt, false to return only
564 * subtitles that should not be burnt. This parameter will be ignored if
565 * _always_burn_subtitles is true; in this case, all subtitles will be returned.
568 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt, bool accurate)
570 list<shared_ptr<Piece> > subs = overlaps (time, time + length, has_subtitle);
572 PlayerSubtitles ps (time, length);
574 for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
575 if (!(*j)->content->subtitle->use () || (!_always_burn_subtitles && (burnt != (*j)->content->subtitle->burn ()))) {
579 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> ((*j)->content);
580 if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) {
584 shared_ptr<SubtitleDecoder> subtitle_decoder = (*j)->decoder->subtitle;
585 ContentTime const from = dcp_to_content_subtitle (*j, time);
586 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
587 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
589 list<ContentImageSubtitle> image = subtitle_decoder->get_image (ContentTimePeriod (from, to), starting, accurate);
590 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
592 /* Apply content's subtitle offsets */
593 i->sub.rectangle.x += (*j)->content->subtitle->x_offset ();
594 i->sub.rectangle.y += (*j)->content->subtitle->y_offset ();
596 /* Apply content's subtitle scale */
597 i->sub.rectangle.width *= (*j)->content->subtitle->x_scale ();
598 i->sub.rectangle.height *= (*j)->content->subtitle->y_scale ();
600 /* Apply a corrective translation to keep the subtitle centred after that scale */
601 i->sub.rectangle.x -= i->sub.rectangle.width * ((*j)->content->subtitle->x_scale() - 1);
602 i->sub.rectangle.y -= i->sub.rectangle.height * ((*j)->content->subtitle->y_scale() - 1);
604 ps.image.push_back (i->sub);
607 list<ContentTextSubtitle> text = subtitle_decoder->get_text (ContentTimePeriod (from, to), starting, accurate);
608 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
609 BOOST_FOREACH (dcp::SubtitleString s, ts.subs) {
610 s.set_h_position (s.h_position() + (*j)->content->subtitle->x_offset ());
611 s.set_v_position (s.v_position() + (*j)->content->subtitle->y_offset ());
612 float const xs = (*j)->content->subtitle->x_scale();
613 float const ys = (*j)->content->subtitle->y_scale();
614 float size = s.size();
616 /* Adjust size to express the common part of the scaling;
617 e.g. if xs = ys = 0.5 we scale size by 2.
619 if (xs > 1e-5 && ys > 1e-5) {
620 size *= 1 / min (1 / xs, 1 / ys);
624 /* Then express aspect ratio changes */
625 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
626 s.set_aspect_adjust (xs / ys);
628 s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds(), 1000));
629 s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds(), 1000));
630 ps.text.push_back (s);
631 ps.add_fonts ((*j)->content->subtitle->fonts ());
639 list<shared_ptr<Font> >
640 Player::get_subtitle_fonts ()
642 if (!_have_valid_pieces) {
646 list<shared_ptr<Font> > fonts;
647 BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
648 if (p->content->subtitle) {
649 /* XXX: things may go wrong if there are duplicate font IDs
650 with different font files.
652 list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
653 copy (f.begin(), f.end(), back_inserter (fonts));
660 /** Set this player never to produce any video data */
662 Player::set_ignore_video ()
664 _ignore_video = true;
667 /** Set this player never to produce any audio data */
669 Player::set_ignore_audio ()
671 _ignore_audio = true;
674 /** Set whether or not this player should always burn text subtitles into the image,
675 * regardless of the content settings.
676 * @param burn true to always burn subtitles, false to obey content settings.
679 Player::set_always_burn_subtitles (bool burn)
681 _always_burn_subtitles = burn;
688 _have_valid_pieces = false;
692 Player::set_play_referenced ()
694 _play_referenced = true;
695 _have_valid_pieces = false;
698 list<ReferencedReelAsset>
699 Player::get_reel_assets ()
701 list<ReferencedReelAsset> a;
703 BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
704 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
709 scoped_ptr<DCPDecoder> decoder;
711 decoder.reset (new DCPDecoder (j, _film->log(), false));
717 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
718 DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
719 if (j->reference_video ()) {
721 ReferencedReelAsset (
723 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_picture()->duration(), _film->video_frame_rate()))
728 if (j->reference_audio ()) {
730 ReferencedReelAsset (
732 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_sound()->duration(), _film->video_frame_rate()))
737 if (j->reference_subtitle ()) {
738 DCPOMATIC_ASSERT (k->main_subtitle ());
740 ReferencedReelAsset (
742 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_subtitle()->duration(), _film->video_frame_rate()))
747 /* Assume that main picture duration is the length of the reel */
748 offset += k->main_picture()->duration ();
755 list<shared_ptr<Piece> >
756 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
758 if (!_have_valid_pieces) {
762 list<shared_ptr<Piece> > overlaps;
763 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
764 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
765 overlaps.push_back (i);