Use make_shared<>.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include <dcp/reel.h>
51 #include <dcp/reel_sound_asset.h>
52 #include <dcp/reel_subtitle_asset.h>
53 #include <dcp/reel_picture_asset.h>
54 #include <boost/foreach.hpp>
55 #include <boost/make_shared.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::make_shared;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 static bool
82 has_video (Content* c)
83 {
84         return static_cast<bool>(c->video);
85 }
86
87 static bool
88 has_audio (Content* c)
89 {
90         return static_cast<bool>(c->audio);
91 }
92
93 static bool
94 has_subtitle (Content* c)
95 {
96         return static_cast<bool>(c->subtitle);
97 }
98
99 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
100         : _film (film)
101         , _playlist (playlist)
102         , _have_valid_pieces (false)
103         , _ignore_video (false)
104         , _ignore_audio (false)
105         , _always_burn_subtitles (false)
106         , _fast (false)
107         , _play_referenced (false)
108 {
109         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
110         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
111         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
112         set_video_container_size (_film->frame_size ());
113
114         film_changed (Film::AUDIO_PROCESSOR);
115 }
116
117 void
118 Player::setup_pieces ()
119 {
120         _pieces.clear ();
121
122         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
123
124                 if (!i->paths_valid ()) {
125                         continue;
126                 }
127
128                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
129                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
130
131                 if (!decoder) {
132                         /* Not something that we can decode; e.g. Atmos content */
133                         continue;
134                 }
135
136                 if (decoder->video && _ignore_video) {
137                         decoder->video->set_ignore ();
138                 }
139
140                 if (decoder->audio && _ignore_audio) {
141                         decoder->audio->set_ignore ();
142                 }
143
144                 _pieces.push_back (make_shared<Piece> (i, decoder, frc));
145         }
146
147         _have_valid_pieces = true;
148 }
149
150 void
151 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
152 {
153         shared_ptr<Content> c = w.lock ();
154         if (!c) {
155                 return;
156         }
157
158         if (
159                 property == ContentProperty::POSITION ||
160                 property == ContentProperty::LENGTH ||
161                 property == ContentProperty::TRIM_START ||
162                 property == ContentProperty::TRIM_END ||
163                 property == ContentProperty::PATH ||
164                 property == VideoContentProperty::FRAME_TYPE ||
165                 property == DCPContentProperty::CAN_BE_PLAYED ||
166                 property == SubtitleContentProperty::COLOUR ||
167                 property == SubtitleContentProperty::OUTLINE ||
168                 property == SubtitleContentProperty::OUTLINE_COLOUR ||
169                 property == FFmpegContentProperty::SUBTITLE_STREAM
170                 ) {
171
172                 _have_valid_pieces = false;
173                 Changed (frequent);
174
175         } else if (
176                 property == ContentProperty::VIDEO_FRAME_RATE ||
177                 property == SubtitleContentProperty::USE ||
178                 property == SubtitleContentProperty::X_OFFSET ||
179                 property == SubtitleContentProperty::Y_OFFSET ||
180                 property == SubtitleContentProperty::X_SCALE ||
181                 property == SubtitleContentProperty::Y_SCALE ||
182                 property == SubtitleContentProperty::FONTS ||
183                 property == VideoContentProperty::CROP ||
184                 property == VideoContentProperty::SCALE ||
185                 property == VideoContentProperty::FADE_IN ||
186                 property == VideoContentProperty::FADE_OUT ||
187                 property == VideoContentProperty::COLOUR_CONVERSION
188                 ) {
189
190                 Changed (frequent);
191         }
192 }
193
194 void
195 Player::set_video_container_size (dcp::Size s)
196 {
197         _video_container_size = s;
198
199         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
200         _black_image->make_black ();
201 }
202
203 void
204 Player::playlist_changed ()
205 {
206         _have_valid_pieces = false;
207         Changed (false);
208 }
209
210 void
211 Player::film_changed (Film::Property p)
212 {
213         /* Here we should notice Film properties that affect our output, and
214            alert listeners that our output now would be different to how it was
215            last time we were run.
216         */
217
218         if (p == Film::CONTAINER) {
219                 Changed (false);
220         } else if (p == Film::VIDEO_FRAME_RATE) {
221                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
222                    so we need new pieces here.
223                 */
224                 _have_valid_pieces = false;
225                 Changed (false);
226         } else if (p == Film::AUDIO_PROCESSOR) {
227                 if (_film->audio_processor ()) {
228                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
229                 }
230         }
231 }
232
233 list<PositionImage>
234 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
235 {
236         list<PositionImage> all;
237
238         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
239                 if (!i->image) {
240                         continue;
241                 }
242
243                 /* We will scale the subtitle up to fit _video_container_size */
244                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
245
246                 /* Then we need a corrective translation, consisting of two parts:
247                  *
248                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
249                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
250                  *
251                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
252                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
253                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
254                  *
255                  * Combining these two translations gives these expressions.
256                  */
257
258                 all.push_back (
259                         PositionImage (
260                                 i->image->scale (
261                                         scaled_size,
262                                         dcp::YUV_TO_RGB_REC601,
263                                         i->image->pixel_format (),
264                                         true,
265                                         _fast
266                                         ),
267                                 Position<int> (
268                                         lrint (_video_container_size.width * i->rectangle.x),
269                                         lrint (_video_container_size.height * i->rectangle.y)
270                                         )
271                                 )
272                         );
273         }
274
275         return all;
276 }
277
278 shared_ptr<PlayerVideo>
279 Player::black_player_video_frame (DCPTime time) const
280 {
281         return shared_ptr<PlayerVideo> (
282                 new PlayerVideo (
283                         make_shared<RawImageProxy> (_black_image),
284                         time,
285                         Crop (),
286                         optional<double> (),
287                         _video_container_size,
288                         _video_container_size,
289                         EYES_BOTH,
290                         PART_WHOLE,
291                         PresetColourConversion::all().front().conversion
292                 )
293         );
294 }
295
296 /** @return All PlayerVideos at the given time.  There may be none if the content
297  *  at `time' is a DCP which we are passing through (i.e. referring to by reference)
298  *  or 2 if we have 3D.
299  */
300 list<shared_ptr<PlayerVideo> >
301 Player::get_video (DCPTime time, bool accurate)
302 {
303         if (!_have_valid_pieces) {
304                 setup_pieces ();
305         }
306
307         /* Find subtitles for possible burn-in */
308
309         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true, accurate);
310
311         list<PositionImage> sub_images;
312
313         /* Image subtitles */
314         list<PositionImage> c = transform_image_subtitles (ps.image);
315         copy (c.begin(), c.end(), back_inserter (sub_images));
316
317         /* Text subtitles (rendered to an image) */
318         if (!ps.text.empty ()) {
319                 list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
320                 copy (s.begin (), s.end (), back_inserter (sub_images));
321         }
322
323         optional<PositionImage> subtitles;
324         if (!sub_images.empty ()) {
325                 subtitles = merge (sub_images);
326         }
327
328         /* Find pieces containing video which is happening now */
329
330         list<shared_ptr<Piece> > ov = overlaps (
331                 time,
332                 time + DCPTime::from_frames (1, _film->video_frame_rate ()),
333                 &has_video
334                 );
335
336         list<shared_ptr<PlayerVideo> > pvf;
337
338         if (ov.empty ()) {
339                 /* No video content at this time */
340                 pvf.push_back (black_player_video_frame (time));
341         } else {
342                 /* Some video content at this time */
343                 shared_ptr<Piece> last = *(ov.rbegin ());
344                 VideoFrameType const last_type = last->content->video->frame_type ();
345
346                 /* Get video from appropriate piece(s) */
347                 BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
348
349                         shared_ptr<VideoDecoder> decoder = piece->decoder->video;
350                         DCPOMATIC_ASSERT (decoder);
351
352                         shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (piece->content);
353                         if (dcp_content && dcp_content->reference_video () && !_play_referenced) {
354                                 continue;
355                         }
356
357                         bool const use =
358                                 /* always use the last video */
359                                 piece == last ||
360                                 /* with a corresponding L/R eye if appropriate */
361                                 (last_type == VIDEO_FRAME_TYPE_3D_LEFT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
362                                 (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
363
364                         if (use) {
365                                 /* We want to use this piece */
366                                 list<ContentVideo> content_video = decoder->get (dcp_to_content_video (piece, time), accurate);
367                                 if (content_video.empty ()) {
368                                         pvf.push_back (black_player_video_frame (time));
369                                 } else {
370                                         dcp::Size image_size = piece->content->video->scale().size (
371                                                 piece->content->video, _video_container_size, _film->frame_size ()
372                                                 );
373
374                                         for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
375                                                 pvf.push_back (
376                                                         shared_ptr<PlayerVideo> (
377                                                                 new PlayerVideo (
378                                                                         i->image,
379                                                                         time,
380                                                                         piece->content->video->crop (),
381                                                                         piece->content->video->fade (i->frame.index()),
382                                                                         image_size,
383                                                                         _video_container_size,
384                                                                         i->frame.eyes(),
385                                                                         i->part,
386                                                                         piece->content->video->colour_conversion ()
387                                                                         )
388                                                                 )
389                                                         );
390                                         }
391                                 }
392                         } else {
393                                 /* Discard unused video */
394                                 decoder->get (dcp_to_content_video (piece, time), accurate);
395                         }
396                 }
397         }
398
399         if (subtitles) {
400                 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
401                         p->set_subtitle (subtitles.get ());
402                 }
403         }
404
405         return pvf;
406 }
407
408 /** @return Audio data or 0 if the only audio data here is referenced DCP data */
409 shared_ptr<AudioBuffers>
410 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
411 {
412         if (!_have_valid_pieces) {
413                 setup_pieces ();
414         }
415
416         Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
417
418         shared_ptr<AudioBuffers> audio = make_shared<AudioBuffers> (_film->audio_channels(), length_frames);
419         audio->make_silent ();
420
421         list<shared_ptr<Piece> > ov = overlaps (time, time + length, has_audio);
422         if (ov.empty ()) {
423                 return audio;
424         }
425
426         bool all_referenced = true;
427         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
428                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (i->content);
429                 if (i->content->audio && (!dcp_content || !dcp_content->reference_audio ())) {
430                         /* There is audio content which is not from a DCP or not set to be referenced */
431                         all_referenced = false;
432                 }
433         }
434
435         if (all_referenced && !_play_referenced) {
436                 return shared_ptr<AudioBuffers> ();
437         }
438
439         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
440
441                 DCPOMATIC_ASSERT (i->content->audio);
442                 shared_ptr<AudioDecoder> decoder = i->decoder->audio;
443                 DCPOMATIC_ASSERT (decoder);
444
445                 /* The time that we should request from the content */
446                 DCPTime request = time - DCPTime::from_seconds (i->content->audio->delay() / 1000.0);
447                 Frame request_frames = length_frames;
448                 DCPTime offset;
449                 if (request < DCPTime ()) {
450                         /* We went off the start of the content, so we will need to offset
451                            the stuff we get back.
452                         */
453                         offset = -request;
454                         request_frames += request.frames_round (_film->audio_frame_rate ());
455                         if (request_frames < 0) {
456                                 request_frames = 0;
457                         }
458                         request = DCPTime ();
459                 }
460
461                 Frame const content_frame = dcp_to_resampled_audio (i, request);
462
463                 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams ()) {
464
465                         if (j->channels() == 0) {
466                                 /* Some content (e.g. DCPs) can have streams with no channels */
467                                 continue;
468                         }
469
470                         /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
471                         ContentAudio all = decoder->get (j, content_frame, request_frames, accurate);
472
473                         /* Gain */
474                         if (i->content->audio->gain() != 0) {
475                                 shared_ptr<AudioBuffers> gain = make_shared<AudioBuffers> (all.audio);
476                                 gain->apply_gain (i->content->audio->gain ());
477                                 all.audio = gain;
478                         }
479
480                         /* Remap channels */
481                         shared_ptr<AudioBuffers> dcp_mapped = make_shared<AudioBuffers> (_film->audio_channels(), all.audio->frames());
482                         dcp_mapped->make_silent ();
483                         AudioMapping map = j->mapping ();
484                         for (int i = 0; i < map.input_channels(); ++i) {
485                                 for (int j = 0; j < _film->audio_channels(); ++j) {
486                                         if (map.get (i, j) > 0) {
487                                                 dcp_mapped->accumulate_channel (
488                                                         all.audio.get(),
489                                                         i,
490                                                         j,
491                                                         map.get (i, j)
492                                                         );
493                                         }
494                                 }
495                         }
496
497                         if (_audio_processor) {
498                                 dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ());
499                         }
500
501                         all.audio = dcp_mapped;
502
503                         audio->accumulate_frames (
504                                 all.audio.get(),
505                                 content_frame - all.frame,
506                                 offset.frames_round (_film->audio_frame_rate()),
507                                 min (Frame (all.audio->frames()), request_frames)
508                                 );
509                 }
510         }
511
512         return audio;
513 }
514
515 Frame
516 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
517 {
518         DCPTime s = t - piece->content->position ();
519         s = min (piece->content->length_after_trim(), s);
520         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
521
522         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
523            then convert that ContentTime to frames at the content's rate.  However this fails for
524            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
525            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
526
527            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
528         */
529         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
530 }
531
532 DCPTime
533 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
534 {
535         /* See comment in dcp_to_content_video */
536         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
537         return max (DCPTime (), d + piece->content->position ());
538 }
539
540 Frame
541 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
542 {
543         DCPTime s = t - piece->content->position ();
544         s = min (piece->content->length_after_trim(), s);
545         /* See notes in dcp_to_content_video */
546         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
547 }
548
549 ContentTime
550 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
551 {
552         DCPTime s = t - piece->content->position ();
553         s = min (piece->content->length_after_trim(), s);
554         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
555 }
556
557 DCPTime
558 Player::content_subtitle_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
559 {
560         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
561 }
562
563 /** @param burnt true to return only subtitles to be burnt, false to return only
564  *  subtitles that should not be burnt.  This parameter will be ignored if
565  *  _always_burn_subtitles is true; in this case, all subtitles will be returned.
566  */
567 PlayerSubtitles
568 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt, bool accurate)
569 {
570         list<shared_ptr<Piece> > subs = overlaps (time, time + length, has_subtitle);
571
572         PlayerSubtitles ps (time, length);
573
574         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
575                 if (!(*j)->content->subtitle->use () || (!_always_burn_subtitles && (burnt != (*j)->content->subtitle->burn ()))) {
576                         continue;
577                 }
578
579                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> ((*j)->content);
580                 if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) {
581                         continue;
582                 }
583
584                 shared_ptr<SubtitleDecoder> subtitle_decoder = (*j)->decoder->subtitle;
585                 ContentTime const from = dcp_to_content_subtitle (*j, time);
586                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
587                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
588
589                 list<ContentImageSubtitle> image = subtitle_decoder->get_image (ContentTimePeriod (from, to), starting, accurate);
590                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
591
592                         /* Apply content's subtitle offsets */
593                         i->sub.rectangle.x += (*j)->content->subtitle->x_offset ();
594                         i->sub.rectangle.y += (*j)->content->subtitle->y_offset ();
595
596                         /* Apply content's subtitle scale */
597                         i->sub.rectangle.width *= (*j)->content->subtitle->x_scale ();
598                         i->sub.rectangle.height *= (*j)->content->subtitle->y_scale ();
599
600                         /* Apply a corrective translation to keep the subtitle centred after that scale */
601                         i->sub.rectangle.x -= i->sub.rectangle.width * ((*j)->content->subtitle->x_scale() - 1);
602                         i->sub.rectangle.y -= i->sub.rectangle.height * ((*j)->content->subtitle->y_scale() - 1);
603
604                         ps.image.push_back (i->sub);
605                 }
606
607                 list<ContentTextSubtitle> text = subtitle_decoder->get_text (ContentTimePeriod (from, to), starting, accurate);
608                 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
609                         BOOST_FOREACH (dcp::SubtitleString s, ts.subs) {
610                                 s.set_h_position (s.h_position() + (*j)->content->subtitle->x_offset ());
611                                 s.set_v_position (s.v_position() + (*j)->content->subtitle->y_offset ());
612                                 float const xs = (*j)->content->subtitle->x_scale();
613                                 float const ys = (*j)->content->subtitle->y_scale();
614                                 float size = s.size();
615
616                                 /* Adjust size to express the common part of the scaling;
617                                    e.g. if xs = ys = 0.5 we scale size by 2.
618                                 */
619                                 if (xs > 1e-5 && ys > 1e-5) {
620                                         size *= 1 / min (1 / xs, 1 / ys);
621                                 }
622                                 s.set_size (size);
623
624                                 /* Then express aspect ratio changes */
625                                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
626                                         s.set_aspect_adjust (xs / ys);
627                                 }
628                                 s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds(), 1000));
629                                 s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds(), 1000));
630                                 ps.text.push_back (s);
631                                 ps.add_fonts ((*j)->content->subtitle->fonts ());
632                         }
633                 }
634         }
635
636         return ps;
637 }
638
639 list<shared_ptr<Font> >
640 Player::get_subtitle_fonts ()
641 {
642         if (!_have_valid_pieces) {
643                 setup_pieces ();
644         }
645
646         list<shared_ptr<Font> > fonts;
647         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
648                 if (p->content->subtitle) {
649                         /* XXX: things may go wrong if there are duplicate font IDs
650                            with different font files.
651                         */
652                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
653                         copy (f.begin(), f.end(), back_inserter (fonts));
654                 }
655         }
656
657         return fonts;
658 }
659
660 /** Set this player never to produce any video data */
661 void
662 Player::set_ignore_video ()
663 {
664         _ignore_video = true;
665 }
666
667 /** Set this player never to produce any audio data */
668 void
669 Player::set_ignore_audio ()
670 {
671         _ignore_audio = true;
672 }
673
674 /** Set whether or not this player should always burn text subtitles into the image,
675  *  regardless of the content settings.
676  *  @param burn true to always burn subtitles, false to obey content settings.
677  */
678 void
679 Player::set_always_burn_subtitles (bool burn)
680 {
681         _always_burn_subtitles = burn;
682 }
683
684 void
685 Player::set_fast ()
686 {
687         _fast = true;
688         _have_valid_pieces = false;
689 }
690
691 void
692 Player::set_play_referenced ()
693 {
694         _play_referenced = true;
695         _have_valid_pieces = false;
696 }
697
698 list<ReferencedReelAsset>
699 Player::get_reel_assets ()
700 {
701         list<ReferencedReelAsset> a;
702
703         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
704                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
705                 if (!j) {
706                         continue;
707                 }
708
709                 scoped_ptr<DCPDecoder> decoder;
710                 try {
711                         decoder.reset (new DCPDecoder (j, _film->log(), false));
712                 } catch (...) {
713                         return a;
714                 }
715
716                 int64_t offset = 0;
717                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
718                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
719                         if (j->reference_video ()) {
720                                 a.push_back (
721                                         ReferencedReelAsset (
722                                                 k->main_picture (),
723                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_picture()->duration(), _film->video_frame_rate()))
724                                                 )
725                                         );
726                         }
727
728                         if (j->reference_audio ()) {
729                                 a.push_back (
730                                         ReferencedReelAsset (
731                                                 k->main_sound (),
732                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_sound()->duration(), _film->video_frame_rate()))
733                                                 )
734                                         );
735                         }
736
737                         if (j->reference_subtitle ()) {
738                                 DCPOMATIC_ASSERT (k->main_subtitle ());
739                                 a.push_back (
740                                         ReferencedReelAsset (
741                                                 k->main_subtitle (),
742                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_subtitle()->duration(), _film->video_frame_rate()))
743                                                 )
744                                         );
745                         }
746
747                         /* Assume that main picture duration is the length of the reel */
748                         offset += k->main_picture()->duration ();
749                 }
750         }
751
752         return a;
753 }
754
755 list<shared_ptr<Piece> >
756 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
757 {
758         if (!_have_valid_pieces) {
759                 setup_pieces ();
760         }
761
762         list<shared_ptr<Piece> > overlaps;
763         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
764                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
765                         overlaps.push_back (i);
766                 }
767         }
768
769         return overlaps;
770 }