30313d39dd23bb57445cf43ff8a45f7b9c2d8b00
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include <dcp/reel.h>
51 #include <dcp/reel_sound_asset.h>
52 #include <dcp/reel_subtitle_asset.h>
53 #include <dcp/reel_picture_asset.h>
54 #include <boost/foreach.hpp>
55 #include <stdint.h>
56 #include <algorithm>
57 #include <iostream>
58
59 #include "i18n.h"
60
61 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
62
63 using std::list;
64 using std::cout;
65 using std::min;
66 using std::max;
67 using std::min;
68 using std::vector;
69 using std::pair;
70 using std::map;
71 using std::make_pair;
72 using std::copy;
73 using boost::shared_ptr;
74 using boost::weak_ptr;
75 using boost::dynamic_pointer_cast;
76 using boost::optional;
77 using boost::scoped_ptr;
78
79 static bool
80 has_video (Content* c)
81 {
82         return static_cast<bool>(c->video);
83 }
84
85 static bool
86 has_audio (Content* c)
87 {
88         return static_cast<bool>(c->audio);
89 }
90
91 static bool
92 has_subtitle (Content* c)
93 {
94         return static_cast<bool>(c->subtitle);
95 }
96
97 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
98         : _film (film)
99         , _playlist (playlist)
100         , _have_valid_pieces (false)
101         , _ignore_video (false)
102         , _ignore_audio (false)
103         , _always_burn_subtitles (false)
104         , _fast (false)
105         , _play_referenced (false)
106 {
107         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
108         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
109         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
110         set_video_container_size (_film->frame_size ());
111
112         film_changed (Film::AUDIO_PROCESSOR);
113 }
114
115 void
116 Player::setup_pieces ()
117 {
118         _pieces.clear ();
119
120         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
121
122                 if (!i->paths_valid ()) {
123                         continue;
124                 }
125
126                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
127                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
128
129                 if (!decoder) {
130                         /* Not something that we can decode; e.g. Atmos content */
131                         continue;
132                 }
133
134                 if (decoder->video && _ignore_video) {
135                         decoder->video->set_ignore ();
136                 }
137
138                 if (decoder->audio && _ignore_audio) {
139                         decoder->audio->set_ignore ();
140                 }
141
142                 _pieces.push_back (shared_ptr<Piece> (new Piece (i, decoder, frc)));
143         }
144
145         _have_valid_pieces = true;
146 }
147
148 void
149 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
150 {
151         shared_ptr<Content> c = w.lock ();
152         if (!c) {
153                 return;
154         }
155
156         if (
157                 property == ContentProperty::POSITION ||
158                 property == ContentProperty::LENGTH ||
159                 property == ContentProperty::TRIM_START ||
160                 property == ContentProperty::TRIM_END ||
161                 property == ContentProperty::PATH ||
162                 property == VideoContentProperty::FRAME_TYPE ||
163                 property == DCPContentProperty::CAN_BE_PLAYED ||
164                 property == SubtitleContentProperty::COLOUR ||
165                 property == SubtitleContentProperty::OUTLINE ||
166                 property == SubtitleContentProperty::OUTLINE_COLOUR ||
167                 property == FFmpegContentProperty::SUBTITLE_STREAM
168                 ) {
169
170                 _have_valid_pieces = false;
171                 Changed (frequent);
172
173         } else if (
174                 property == ContentProperty::VIDEO_FRAME_RATE ||
175                 property == SubtitleContentProperty::USE ||
176                 property == SubtitleContentProperty::X_OFFSET ||
177                 property == SubtitleContentProperty::Y_OFFSET ||
178                 property == SubtitleContentProperty::X_SCALE ||
179                 property == SubtitleContentProperty::Y_SCALE ||
180                 property == SubtitleContentProperty::FONTS ||
181                 property == VideoContentProperty::CROP ||
182                 property == VideoContentProperty::SCALE ||
183                 property == VideoContentProperty::FADE_IN ||
184                 property == VideoContentProperty::FADE_OUT ||
185                 property == VideoContentProperty::COLOUR_CONVERSION
186                 ) {
187
188                 Changed (frequent);
189         }
190 }
191
192 void
193 Player::set_video_container_size (dcp::Size s)
194 {
195         _video_container_size = s;
196
197         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
198         _black_image->make_black ();
199 }
200
201 void
202 Player::playlist_changed ()
203 {
204         _have_valid_pieces = false;
205         Changed (false);
206 }
207
208 void
209 Player::film_changed (Film::Property p)
210 {
211         /* Here we should notice Film properties that affect our output, and
212            alert listeners that our output now would be different to how it was
213            last time we were run.
214         */
215
216         if (p == Film::CONTAINER) {
217                 Changed (false);
218         } else if (p == Film::VIDEO_FRAME_RATE) {
219                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
220                    so we need new pieces here.
221                 */
222                 _have_valid_pieces = false;
223                 Changed (false);
224         } else if (p == Film::AUDIO_PROCESSOR) {
225                 if (_film->audio_processor ()) {
226                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
227                 }
228         }
229 }
230
231 list<PositionImage>
232 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
233 {
234         list<PositionImage> all;
235
236         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
237                 if (!i->image) {
238                         continue;
239                 }
240
241                 /* We will scale the subtitle up to fit _video_container_size */
242                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
243
244                 /* Then we need a corrective translation, consisting of two parts:
245                  *
246                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
247                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
248                  *
249                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
250                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
251                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
252                  *
253                  * Combining these two translations gives these expressions.
254                  */
255
256                 all.push_back (
257                         PositionImage (
258                                 i->image->scale (
259                                         scaled_size,
260                                         dcp::YUV_TO_RGB_REC601,
261                                         i->image->pixel_format (),
262                                         true,
263                                         _fast
264                                         ),
265                                 Position<int> (
266                                         lrint (_video_container_size.width * i->rectangle.x),
267                                         lrint (_video_container_size.height * i->rectangle.y)
268                                         )
269                                 )
270                         );
271         }
272
273         return all;
274 }
275
276 shared_ptr<PlayerVideo>
277 Player::black_player_video_frame (DCPTime time) const
278 {
279         return shared_ptr<PlayerVideo> (
280                 new PlayerVideo (
281                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
282                         time,
283                         Crop (),
284                         optional<double> (),
285                         _video_container_size,
286                         _video_container_size,
287                         EYES_BOTH,
288                         PART_WHOLE,
289                         PresetColourConversion::all().front().conversion
290                 )
291         );
292 }
293
294 /** @return All PlayerVideos at the given time.  There may be none if the content
295  *  at `time' is a DCP which we are passing through (i.e. referring to by reference)
296  *  or 2 if we have 3D.
297  */
298 list<shared_ptr<PlayerVideo> >
299 Player::get_video (DCPTime time, bool accurate)
300 {
301         if (!_have_valid_pieces) {
302                 setup_pieces ();
303         }
304
305         /* Find subtitles for possible burn-in */
306
307         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true, accurate);
308
309         list<PositionImage> sub_images;
310
311         /* Image subtitles */
312         list<PositionImage> c = transform_image_subtitles (ps.image);
313         copy (c.begin(), c.end(), back_inserter (sub_images));
314
315         /* Text subtitles (rendered to an image) */
316         if (!ps.text.empty ()) {
317                 list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
318                 copy (s.begin (), s.end (), back_inserter (sub_images));
319         }
320
321         optional<PositionImage> subtitles;
322         if (!sub_images.empty ()) {
323                 subtitles = merge (sub_images);
324         }
325
326         /* Find pieces containing video which is happening now */
327
328         list<shared_ptr<Piece> > ov = overlaps (
329                 time,
330                 time + DCPTime::from_frames (1, _film->video_frame_rate ()),
331                 &has_video
332                 );
333
334         list<shared_ptr<PlayerVideo> > pvf;
335
336         if (ov.empty ()) {
337                 /* No video content at this time */
338                 pvf.push_back (black_player_video_frame (time));
339         } else {
340                 /* Some video content at this time */
341                 shared_ptr<Piece> last = *(ov.rbegin ());
342                 VideoFrameType const last_type = last->content->video->frame_type ();
343
344                 /* Get video from appropriate piece(s) */
345                 BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
346
347                         shared_ptr<VideoDecoder> decoder = piece->decoder->video;
348                         DCPOMATIC_ASSERT (decoder);
349
350                         shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (piece->content);
351                         if (dcp_content && dcp_content->reference_video () && !_play_referenced) {
352                                 continue;
353                         }
354
355                         bool const use =
356                                 /* always use the last video */
357                                 piece == last ||
358                                 /* with a corresponding L/R eye if appropriate */
359                                 (last_type == VIDEO_FRAME_TYPE_3D_LEFT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
360                                 (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
361
362                         if (use) {
363                                 /* We want to use this piece */
364                                 list<ContentVideo> content_video = decoder->get (dcp_to_content_video (piece, time), accurate);
365                                 if (content_video.empty ()) {
366                                         pvf.push_back (black_player_video_frame (time));
367                                 } else {
368                                         dcp::Size image_size = piece->content->video->scale().size (
369                                                 piece->content->video, _video_container_size, _film->frame_size ()
370                                                 );
371
372                                         for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
373                                                 pvf.push_back (
374                                                         shared_ptr<PlayerVideo> (
375                                                                 new PlayerVideo (
376                                                                         i->image,
377                                                                         time,
378                                                                         piece->content->video->crop (),
379                                                                         piece->content->video->fade (i->frame.index()),
380                                                                         image_size,
381                                                                         _video_container_size,
382                                                                         i->frame.eyes(),
383                                                                         i->part,
384                                                                         piece->content->video->colour_conversion ()
385                                                                         )
386                                                                 )
387                                                         );
388                                         }
389                                 }
390                         } else {
391                                 /* Discard unused video */
392                                 decoder->get (dcp_to_content_video (piece, time), accurate);
393                         }
394                 }
395         }
396
397         if (subtitles) {
398                 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
399                         p->set_subtitle (subtitles.get ());
400                 }
401         }
402
403         return pvf;
404 }
405
406 /** @return Audio data or 0 if the only audio data here is referenced DCP data */
407 shared_ptr<AudioBuffers>
408 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
409 {
410         if (!_have_valid_pieces) {
411                 setup_pieces ();
412         }
413
414         Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
415
416         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
417         audio->make_silent ();
418
419         list<shared_ptr<Piece> > ov = overlaps (time, time + length, has_audio);
420         if (ov.empty ()) {
421                 return audio;
422         }
423
424         bool all_referenced = true;
425         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
426                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (i->content);
427                 if (i->content->audio && (!dcp_content || !dcp_content->reference_audio ())) {
428                         /* There is audio content which is not from a DCP or not set to be referenced */
429                         all_referenced = false;
430                 }
431         }
432
433         if (all_referenced && !_play_referenced) {
434                 return shared_ptr<AudioBuffers> ();
435         }
436
437         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
438
439                 DCPOMATIC_ASSERT (i->content->audio);
440                 shared_ptr<AudioDecoder> decoder = i->decoder->audio;
441                 DCPOMATIC_ASSERT (decoder);
442
443                 /* The time that we should request from the content */
444                 DCPTime request = time - DCPTime::from_seconds (i->content->audio->delay() / 1000.0);
445                 Frame request_frames = length_frames;
446                 DCPTime offset;
447                 if (request < DCPTime ()) {
448                         /* We went off the start of the content, so we will need to offset
449                            the stuff we get back.
450                         */
451                         offset = -request;
452                         request_frames += request.frames_round (_film->audio_frame_rate ());
453                         if (request_frames < 0) {
454                                 request_frames = 0;
455                         }
456                         request = DCPTime ();
457                 }
458
459                 Frame const content_frame = dcp_to_resampled_audio (i, request);
460
461                 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams ()) {
462
463                         if (j->channels() == 0) {
464                                 /* Some content (e.g. DCPs) can have streams with no channels */
465                                 continue;
466                         }
467
468                         /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
469                         ContentAudio all = decoder->get (j, content_frame, request_frames, accurate);
470
471                         /* Gain */
472                         if (i->content->audio->gain() != 0) {
473                                 shared_ptr<AudioBuffers> gain (new AudioBuffers (all.audio));
474                                 gain->apply_gain (i->content->audio->gain ());
475                                 all.audio = gain;
476                         }
477
478                         /* Remap channels */
479                         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all.audio->frames()));
480                         dcp_mapped->make_silent ();
481                         AudioMapping map = j->mapping ();
482                         for (int i = 0; i < map.input_channels(); ++i) {
483                                 for (int j = 0; j < _film->audio_channels(); ++j) {
484                                         if (map.get (i, j) > 0) {
485                                                 dcp_mapped->accumulate_channel (
486                                                         all.audio.get(),
487                                                         i,
488                                                         j,
489                                                         map.get (i, j)
490                                                         );
491                                         }
492                                 }
493                         }
494
495                         if (_audio_processor) {
496                                 dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ());
497                         }
498
499                         all.audio = dcp_mapped;
500
501                         audio->accumulate_frames (
502                                 all.audio.get(),
503                                 content_frame - all.frame,
504                                 offset.frames_round (_film->audio_frame_rate()),
505                                 min (Frame (all.audio->frames()), request_frames)
506                                 );
507                 }
508         }
509
510         return audio;
511 }
512
513 Frame
514 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
515 {
516         DCPTime s = t - piece->content->position ();
517         s = min (piece->content->length_after_trim(), s);
518         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
519
520         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
521            then convert that ContentTime to frames at the content's rate.  However this fails for
522            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
523            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
524
525            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
526         */
527         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
528 }
529
530 DCPTime
531 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
532 {
533         /* See comment in dcp_to_content_video */
534         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
535         return max (DCPTime (), d + piece->content->position ());
536 }
537
538 Frame
539 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
540 {
541         DCPTime s = t - piece->content->position ();
542         s = min (piece->content->length_after_trim(), s);
543         /* See notes in dcp_to_content_video */
544         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
545 }
546
547 ContentTime
548 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
549 {
550         DCPTime s = t - piece->content->position ();
551         s = min (piece->content->length_after_trim(), s);
552         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
553 }
554
555 DCPTime
556 Player::content_subtitle_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
557 {
558         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
559 }
560
561 /** @param burnt true to return only subtitles to be burnt, false to return only
562  *  subtitles that should not be burnt.  This parameter will be ignored if
563  *  _always_burn_subtitles is true; in this case, all subtitles will be returned.
564  */
565 PlayerSubtitles
566 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt, bool accurate)
567 {
568         list<shared_ptr<Piece> > subs = overlaps (time, time + length, has_subtitle);
569
570         PlayerSubtitles ps (time, length);
571
572         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
573                 if (!(*j)->content->subtitle->use () || (!_always_burn_subtitles && (burnt != (*j)->content->subtitle->burn ()))) {
574                         continue;
575                 }
576
577                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> ((*j)->content);
578                 if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) {
579                         continue;
580                 }
581
582                 shared_ptr<SubtitleDecoder> subtitle_decoder = (*j)->decoder->subtitle;
583                 ContentTime const from = dcp_to_content_subtitle (*j, time);
584                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
585                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
586
587                 list<ContentImageSubtitle> image = subtitle_decoder->get_image (ContentTimePeriod (from, to), starting, accurate);
588                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
589
590                         /* Apply content's subtitle offsets */
591                         i->sub.rectangle.x += (*j)->content->subtitle->x_offset ();
592                         i->sub.rectangle.y += (*j)->content->subtitle->y_offset ();
593
594                         /* Apply content's subtitle scale */
595                         i->sub.rectangle.width *= (*j)->content->subtitle->x_scale ();
596                         i->sub.rectangle.height *= (*j)->content->subtitle->y_scale ();
597
598                         /* Apply a corrective translation to keep the subtitle centred after that scale */
599                         i->sub.rectangle.x -= i->sub.rectangle.width * ((*j)->content->subtitle->x_scale() - 1);
600                         i->sub.rectangle.y -= i->sub.rectangle.height * ((*j)->content->subtitle->y_scale() - 1);
601
602                         ps.image.push_back (i->sub);
603                 }
604
605                 list<ContentTextSubtitle> text = subtitle_decoder->get_text (ContentTimePeriod (from, to), starting, accurate);
606                 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
607                         BOOST_FOREACH (dcp::SubtitleString s, ts.subs) {
608                                 s.set_h_position (s.h_position() + (*j)->content->subtitle->x_offset ());
609                                 s.set_v_position (s.v_position() + (*j)->content->subtitle->y_offset ());
610                                 float const xs = (*j)->content->subtitle->x_scale();
611                                 float const ys = (*j)->content->subtitle->y_scale();
612                                 float size = s.size();
613
614                                 /* Adjust size to express the common part of the scaling;
615                                    e.g. if xs = ys = 0.5 we scale size by 2.
616                                 */
617                                 if (xs > 1e-5 && ys > 1e-5) {
618                                         size *= 1 / min (1 / xs, 1 / ys);
619                                 }
620                                 s.set_size (size);
621
622                                 /* Then express aspect ratio changes */
623                                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
624                                         s.set_aspect_adjust (xs / ys);
625                                 }
626                                 s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds(), 1000));
627                                 s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds(), 1000));
628                                 ps.text.push_back (s);
629                                 ps.add_fonts ((*j)->content->subtitle->fonts ());
630                         }
631                 }
632         }
633
634         return ps;
635 }
636
637 list<shared_ptr<Font> >
638 Player::get_subtitle_fonts ()
639 {
640         if (!_have_valid_pieces) {
641                 setup_pieces ();
642         }
643
644         list<shared_ptr<Font> > fonts;
645         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
646                 if (p->content->subtitle) {
647                         /* XXX: things may go wrong if there are duplicate font IDs
648                            with different font files.
649                         */
650                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
651                         copy (f.begin(), f.end(), back_inserter (fonts));
652                 }
653         }
654
655         return fonts;
656 }
657
658 /** Set this player never to produce any video data */
659 void
660 Player::set_ignore_video ()
661 {
662         _ignore_video = true;
663 }
664
665 /** Set this player never to produce any audio data */
666 void
667 Player::set_ignore_audio ()
668 {
669         _ignore_audio = true;
670 }
671
672 /** Set whether or not this player should always burn text subtitles into the image,
673  *  regardless of the content settings.
674  *  @param burn true to always burn subtitles, false to obey content settings.
675  */
676 void
677 Player::set_always_burn_subtitles (bool burn)
678 {
679         _always_burn_subtitles = burn;
680 }
681
682 void
683 Player::set_fast ()
684 {
685         _fast = true;
686         _have_valid_pieces = false;
687 }
688
689 void
690 Player::set_play_referenced ()
691 {
692         _play_referenced = true;
693         _have_valid_pieces = false;
694 }
695
696 list<ReferencedReelAsset>
697 Player::get_reel_assets ()
698 {
699         list<ReferencedReelAsset> a;
700
701         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
702                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
703                 if (!j) {
704                         continue;
705                 }
706
707                 scoped_ptr<DCPDecoder> decoder;
708                 try {
709                         decoder.reset (new DCPDecoder (j, _film->log(), false));
710                 } catch (...) {
711                         return a;
712                 }
713
714                 int64_t offset = 0;
715                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
716                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
717                         if (j->reference_video ()) {
718                                 a.push_back (
719                                         ReferencedReelAsset (
720                                                 k->main_picture (),
721                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_picture()->duration(), _film->video_frame_rate()))
722                                                 )
723                                         );
724                         }
725
726                         if (j->reference_audio ()) {
727                                 a.push_back (
728                                         ReferencedReelAsset (
729                                                 k->main_sound (),
730                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_sound()->duration(), _film->video_frame_rate()))
731                                                 )
732                                         );
733                         }
734
735                         if (j->reference_subtitle ()) {
736                                 DCPOMATIC_ASSERT (k->main_subtitle ());
737                                 a.push_back (
738                                         ReferencedReelAsset (
739                                                 k->main_subtitle (),
740                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_subtitle()->duration(), _film->video_frame_rate()))
741                                                 )
742                                         );
743                         }
744
745                         /* Assume that main picture duration is the length of the reel */
746                         offset += k->main_picture()->duration ();
747                 }
748         }
749
750         return a;
751 }
752
753 list<shared_ptr<Piece> >
754 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
755 {
756         if (!_have_valid_pieces) {
757                 setup_pieces ();
758         }
759
760         list<shared_ptr<Piece> > overlaps;
761         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
762                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
763                         overlaps.push_back (i);
764                 }
765         }
766
767         return overlaps;
768 }