Factor out decoder creation to a factory method.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include <dcp/reel.h>
51 #include <dcp/reel_sound_asset.h>
52 #include <dcp/reel_subtitle_asset.h>
53 #include <dcp/reel_picture_asset.h>
54 #include <boost/foreach.hpp>
55 #include <stdint.h>
56 #include <algorithm>
57 #include <iostream>
58
59 #include "i18n.h"
60
61 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
62
63 using std::list;
64 using std::cout;
65 using std::min;
66 using std::max;
67 using std::min;
68 using std::vector;
69 using std::pair;
70 using std::map;
71 using std::make_pair;
72 using std::copy;
73 using boost::shared_ptr;
74 using boost::weak_ptr;
75 using boost::dynamic_pointer_cast;
76 using boost::optional;
77 using boost::scoped_ptr;
78
79 static bool
80 has_video (Content* c)
81 {
82         return static_cast<bool>(c->video);
83 }
84
85 static bool
86 has_audio (Content* c)
87 {
88         return static_cast<bool>(c->audio);
89 }
90
91 static bool
92 has_subtitle (Content* c)
93 {
94         return static_cast<bool>(c->subtitle);
95 }
96
97 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
98         : _film (film)
99         , _playlist (playlist)
100         , _have_valid_pieces (false)
101         , _ignore_video (false)
102         , _ignore_audio (false)
103         , _always_burn_subtitles (false)
104         , _fast (false)
105         , _play_referenced (false)
106 {
107         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
108         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
109         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
110         set_video_container_size (_film->frame_size ());
111
112         film_changed (Film::AUDIO_PROCESSOR);
113 }
114
115 void
116 Player::setup_pieces ()
117 {
118         list<shared_ptr<ImageDecoder> > old_image_decoders;
119         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
120                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> (i->decoder);
121                 if (imd) {
122                         old_image_decoders.push_back (imd);
123                 }
124         }
125
126         _pieces.clear ();
127
128         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
129
130                 if (!i->paths_valid ()) {
131                         continue;
132                 }
133
134                 shared_ptr<Decoder> decoder = decoder_factory (i, old_image_decoders, _film->log(), _fast);
135                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
136
137                 if (!decoder) {
138                         /* Not something that we can decode; e.g. Atmos content */
139                         continue;
140                 }
141
142                 if (decoder->video && _ignore_video) {
143                         decoder->video->set_ignore ();
144                 }
145
146                 if (decoder->audio && _ignore_audio) {
147                         decoder->audio->set_ignore ();
148                 }
149
150                 _pieces.push_back (shared_ptr<Piece> (new Piece (i, decoder, frc)));
151         }
152
153         _have_valid_pieces = true;
154 }
155
156 void
157 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
158 {
159         shared_ptr<Content> c = w.lock ();
160         if (!c) {
161                 return;
162         }
163
164         if (
165                 property == ContentProperty::POSITION ||
166                 property == ContentProperty::LENGTH ||
167                 property == ContentProperty::TRIM_START ||
168                 property == ContentProperty::TRIM_END ||
169                 property == ContentProperty::PATH ||
170                 property == VideoContentProperty::FRAME_TYPE ||
171                 property == DCPContentProperty::CAN_BE_PLAYED ||
172                 property == SubtitleContentProperty::COLOUR ||
173                 property == SubtitleContentProperty::OUTLINE ||
174                 property == SubtitleContentProperty::OUTLINE_COLOUR ||
175                 property == FFmpegContentProperty::SUBTITLE_STREAM
176                 ) {
177
178                 _have_valid_pieces = false;
179                 Changed (frequent);
180
181         } else if (
182                 property == ContentProperty::VIDEO_FRAME_RATE ||
183                 property == SubtitleContentProperty::USE ||
184                 property == SubtitleContentProperty::X_OFFSET ||
185                 property == SubtitleContentProperty::Y_OFFSET ||
186                 property == SubtitleContentProperty::X_SCALE ||
187                 property == SubtitleContentProperty::Y_SCALE ||
188                 property == SubtitleContentProperty::FONTS ||
189                 property == VideoContentProperty::CROP ||
190                 property == VideoContentProperty::SCALE ||
191                 property == VideoContentProperty::FADE_IN ||
192                 property == VideoContentProperty::FADE_OUT ||
193                 property == VideoContentProperty::COLOUR_CONVERSION
194                 ) {
195
196                 Changed (frequent);
197         }
198 }
199
200 void
201 Player::set_video_container_size (dcp::Size s)
202 {
203         _video_container_size = s;
204
205         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
206         _black_image->make_black ();
207 }
208
209 void
210 Player::playlist_changed ()
211 {
212         _have_valid_pieces = false;
213         Changed (false);
214 }
215
216 void
217 Player::film_changed (Film::Property p)
218 {
219         /* Here we should notice Film properties that affect our output, and
220            alert listeners that our output now would be different to how it was
221            last time we were run.
222         */
223
224         if (p == Film::CONTAINER) {
225                 Changed (false);
226         } else if (p == Film::VIDEO_FRAME_RATE) {
227                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
228                    so we need new pieces here.
229                 */
230                 _have_valid_pieces = false;
231                 Changed (false);
232         } else if (p == Film::AUDIO_PROCESSOR) {
233                 if (_film->audio_processor ()) {
234                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
235                 }
236         }
237 }
238
239 list<PositionImage>
240 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
241 {
242         list<PositionImage> all;
243
244         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
245                 if (!i->image) {
246                         continue;
247                 }
248
249                 /* We will scale the subtitle up to fit _video_container_size */
250                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
251
252                 /* Then we need a corrective translation, consisting of two parts:
253                  *
254                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
255                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
256                  *
257                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
258                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
259                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
260                  *
261                  * Combining these two translations gives these expressions.
262                  */
263
264                 all.push_back (
265                         PositionImage (
266                                 i->image->scale (
267                                         scaled_size,
268                                         dcp::YUV_TO_RGB_REC601,
269                                         i->image->pixel_format (),
270                                         true,
271                                         _fast
272                                         ),
273                                 Position<int> (
274                                         lrint (_video_container_size.width * i->rectangle.x),
275                                         lrint (_video_container_size.height * i->rectangle.y)
276                                         )
277                                 )
278                         );
279         }
280
281         return all;
282 }
283
284 shared_ptr<PlayerVideo>
285 Player::black_player_video_frame (DCPTime time) const
286 {
287         return shared_ptr<PlayerVideo> (
288                 new PlayerVideo (
289                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
290                         time,
291                         Crop (),
292                         optional<double> (),
293                         _video_container_size,
294                         _video_container_size,
295                         EYES_BOTH,
296                         PART_WHOLE,
297                         PresetColourConversion::all().front().conversion
298                 )
299         );
300 }
301
302 /** @return All PlayerVideos at the given time.  There may be none if the content
303  *  at `time' is a DCP which we are passing through (i.e. referring to by reference)
304  *  or 2 if we have 3D.
305  */
306 list<shared_ptr<PlayerVideo> >
307 Player::get_video (DCPTime time, bool accurate)
308 {
309         if (!_have_valid_pieces) {
310                 setup_pieces ();
311         }
312
313         /* Find subtitles for possible burn-in */
314
315         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true, accurate);
316
317         list<PositionImage> sub_images;
318
319         /* Image subtitles */
320         list<PositionImage> c = transform_image_subtitles (ps.image);
321         copy (c.begin(), c.end(), back_inserter (sub_images));
322
323         /* Text subtitles (rendered to an image) */
324         if (!ps.text.empty ()) {
325                 list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
326                 copy (s.begin (), s.end (), back_inserter (sub_images));
327         }
328
329         optional<PositionImage> subtitles;
330         if (!sub_images.empty ()) {
331                 subtitles = merge (sub_images);
332         }
333
334         /* Find pieces containing video which is happening now */
335
336         list<shared_ptr<Piece> > ov = overlaps (
337                 time,
338                 time + DCPTime::from_frames (1, _film->video_frame_rate ()),
339                 &has_video
340                 );
341
342         list<shared_ptr<PlayerVideo> > pvf;
343
344         if (ov.empty ()) {
345                 /* No video content at this time */
346                 pvf.push_back (black_player_video_frame (time));
347         } else {
348                 /* Some video content at this time */
349                 shared_ptr<Piece> last = *(ov.rbegin ());
350                 VideoFrameType const last_type = last->content->video->frame_type ();
351
352                 /* Get video from appropriate piece(s) */
353                 BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
354
355                         shared_ptr<VideoDecoder> decoder = piece->decoder->video;
356                         DCPOMATIC_ASSERT (decoder);
357
358                         shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (piece->content);
359                         if (dcp_content && dcp_content->reference_video () && !_play_referenced) {
360                                 continue;
361                         }
362
363                         bool const use =
364                                 /* always use the last video */
365                                 piece == last ||
366                                 /* with a corresponding L/R eye if appropriate */
367                                 (last_type == VIDEO_FRAME_TYPE_3D_LEFT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
368                                 (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && piece->content->video->frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
369
370                         if (use) {
371                                 /* We want to use this piece */
372                                 list<ContentVideo> content_video = decoder->get (dcp_to_content_video (piece, time), accurate);
373                                 if (content_video.empty ()) {
374                                         pvf.push_back (black_player_video_frame (time));
375                                 } else {
376                                         dcp::Size image_size = piece->content->video->scale().size (
377                                                 piece->content->video, _video_container_size, _film->frame_size ()
378                                                 );
379
380                                         for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
381                                                 pvf.push_back (
382                                                         shared_ptr<PlayerVideo> (
383                                                                 new PlayerVideo (
384                                                                         i->image,
385                                                                         content_video_to_dcp (piece, i->frame),
386                                                                         piece->content->video->crop (),
387                                                                         piece->content->video->fade (i->frame),
388                                                                         image_size,
389                                                                         _video_container_size,
390                                                                         i->eyes,
391                                                                         i->part,
392                                                                         piece->content->video->colour_conversion ()
393                                                                         )
394                                                                 )
395                                                         );
396                                         }
397                                 }
398                         } else {
399                                 /* Discard unused video */
400                                 decoder->get (dcp_to_content_video (piece, time), accurate);
401                         }
402                 }
403         }
404
405         if (subtitles) {
406                 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
407                         p->set_subtitle (subtitles.get ());
408                 }
409         }
410
411         return pvf;
412 }
413
414 /** @return Audio data or 0 if the only audio data here is referenced DCP data */
415 shared_ptr<AudioBuffers>
416 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
417 {
418         if (!_have_valid_pieces) {
419                 setup_pieces ();
420         }
421
422         Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
423
424         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
425         audio->make_silent ();
426
427         list<shared_ptr<Piece> > ov = overlaps (time, time + length, has_audio);
428         if (ov.empty ()) {
429                 return audio;
430         }
431
432         bool all_referenced = true;
433         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
434                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (i->content);
435                 if (i->content->audio && (!dcp_content || !dcp_content->reference_audio ())) {
436                         /* There is audio content which is not from a DCP or not set to be referenced */
437                         all_referenced = false;
438                 }
439         }
440
441         if (all_referenced && !_play_referenced) {
442                 return shared_ptr<AudioBuffers> ();
443         }
444
445         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
446
447                 DCPOMATIC_ASSERT (i->content->audio);
448                 shared_ptr<AudioDecoder> decoder = i->decoder->audio;
449                 DCPOMATIC_ASSERT (decoder);
450
451                 /* The time that we should request from the content */
452                 DCPTime request = time - DCPTime::from_seconds (i->content->audio->delay() / 1000.0);
453                 Frame request_frames = length_frames;
454                 DCPTime offset;
455                 if (request < DCPTime ()) {
456                         /* We went off the start of the content, so we will need to offset
457                            the stuff we get back.
458                         */
459                         offset = -request;
460                         request_frames += request.frames_round (_film->audio_frame_rate ());
461                         if (request_frames < 0) {
462                                 request_frames = 0;
463                         }
464                         request = DCPTime ();
465                 }
466
467                 Frame const content_frame = dcp_to_resampled_audio (i, request);
468
469                 BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams ()) {
470
471                         if (j->channels() == 0) {
472                                 /* Some content (e.g. DCPs) can have streams with no channels */
473                                 continue;
474                         }
475
476                         /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
477                         ContentAudio all = decoder->get (j, content_frame, request_frames, accurate);
478
479                         /* Gain */
480                         if (i->content->audio->gain() != 0) {
481                                 shared_ptr<AudioBuffers> gain (new AudioBuffers (all.audio));
482                                 gain->apply_gain (i->content->audio->gain ());
483                                 all.audio = gain;
484                         }
485
486                         /* Remap channels */
487                         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all.audio->frames()));
488                         dcp_mapped->make_silent ();
489                         AudioMapping map = j->mapping ();
490                         for (int i = 0; i < map.input_channels(); ++i) {
491                                 for (int j = 0; j < _film->audio_channels(); ++j) {
492                                         if (map.get (i, j) > 0) {
493                                                 dcp_mapped->accumulate_channel (
494                                                         all.audio.get(),
495                                                         i,
496                                                         j,
497                                                         map.get (i, j)
498                                                         );
499                                         }
500                                 }
501                         }
502
503                         if (_audio_processor) {
504                                 dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ());
505                         }
506
507                         all.audio = dcp_mapped;
508
509                         audio->accumulate_frames (
510                                 all.audio.get(),
511                                 content_frame - all.frame,
512                                 offset.frames_round (_film->audio_frame_rate()),
513                                 min (Frame (all.audio->frames()), request_frames)
514                                 );
515                 }
516         }
517
518         return audio;
519 }
520
521 Frame
522 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
523 {
524         DCPTime s = t - piece->content->position ();
525         s = min (piece->content->length_after_trim(), s);
526         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
527
528         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
529            then convert that ContentTime to frames at the content's rate.  However this fails for
530            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
531            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
532
533            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
534         */
535         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
536 }
537
538 DCPTime
539 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
540 {
541         /* See comment in dcp_to_content_video */
542         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
543         return max (DCPTime (), d + piece->content->position ());
544 }
545
546 Frame
547 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
548 {
549         DCPTime s = t - piece->content->position ();
550         s = min (piece->content->length_after_trim(), s);
551         /* See notes in dcp_to_content_video */
552         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
553 }
554
555 ContentTime
556 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
557 {
558         DCPTime s = t - piece->content->position ();
559         s = min (piece->content->length_after_trim(), s);
560         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
561 }
562
563 DCPTime
564 Player::content_subtitle_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
565 {
566         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
567 }
568
569 /** @param burnt true to return only subtitles to be burnt, false to return only
570  *  subtitles that should not be burnt.  This parameter will be ignored if
571  *  _always_burn_subtitles is true; in this case, all subtitles will be returned.
572  */
573 PlayerSubtitles
574 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt, bool accurate)
575 {
576         list<shared_ptr<Piece> > subs = overlaps (time, time + length, has_subtitle);
577
578         PlayerSubtitles ps (time, length);
579
580         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
581                 if (!(*j)->content->subtitle->use () || (!_always_burn_subtitles && (burnt != (*j)->content->subtitle->burn ()))) {
582                         continue;
583                 }
584
585                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> ((*j)->content);
586                 if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) {
587                         continue;
588                 }
589
590                 shared_ptr<SubtitleDecoder> subtitle_decoder = (*j)->decoder->subtitle;
591                 ContentTime const from = dcp_to_content_subtitle (*j, time);
592                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
593                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
594
595                 list<ContentImageSubtitle> image = subtitle_decoder->get_image (ContentTimePeriod (from, to), starting, accurate);
596                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
597
598                         /* Apply content's subtitle offsets */
599                         i->sub.rectangle.x += (*j)->content->subtitle->x_offset ();
600                         i->sub.rectangle.y += (*j)->content->subtitle->y_offset ();
601
602                         /* Apply content's subtitle scale */
603                         i->sub.rectangle.width *= (*j)->content->subtitle->x_scale ();
604                         i->sub.rectangle.height *= (*j)->content->subtitle->y_scale ();
605
606                         /* Apply a corrective translation to keep the subtitle centred after that scale */
607                         i->sub.rectangle.x -= i->sub.rectangle.width * ((*j)->content->subtitle->x_scale() - 1);
608                         i->sub.rectangle.y -= i->sub.rectangle.height * ((*j)->content->subtitle->y_scale() - 1);
609
610                         ps.image.push_back (i->sub);
611                 }
612
613                 list<ContentTextSubtitle> text = subtitle_decoder->get_text (ContentTimePeriod (from, to), starting, accurate);
614                 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
615                         BOOST_FOREACH (dcp::SubtitleString s, ts.subs) {
616                                 s.set_h_position (s.h_position() + (*j)->content->subtitle->x_offset ());
617                                 s.set_v_position (s.v_position() + (*j)->content->subtitle->y_offset ());
618                                 float const xs = (*j)->content->subtitle->x_scale();
619                                 float const ys = (*j)->content->subtitle->y_scale();
620                                 float size = s.size();
621
622                                 /* Adjust size to express the common part of the scaling;
623                                    e.g. if xs = ys = 0.5 we scale size by 2.
624                                 */
625                                 if (xs > 1e-5 && ys > 1e-5) {
626                                         size *= 1 / min (1 / xs, 1 / ys);
627                                 }
628                                 s.set_size (size);
629
630                                 /* Then express aspect ratio changes */
631                                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
632                                         s.set_aspect_adjust (xs / ys);
633                                 }
634                                 s.set_in (dcp::Time(content_subtitle_to_dcp (*j, ts.period().from).seconds(), 1000));
635                                 s.set_out (dcp::Time(content_subtitle_to_dcp (*j, ts.period().to).seconds(), 1000));
636                                 ps.text.push_back (s);
637                                 ps.add_fonts ((*j)->content->subtitle->fonts ());
638                         }
639                 }
640         }
641
642         return ps;
643 }
644
645 list<shared_ptr<Font> >
646 Player::get_subtitle_fonts ()
647 {
648         if (!_have_valid_pieces) {
649                 setup_pieces ();
650         }
651
652         list<shared_ptr<Font> > fonts;
653         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
654                 if (p->content->subtitle) {
655                         /* XXX: things may go wrong if there are duplicate font IDs
656                            with different font files.
657                         */
658                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
659                         copy (f.begin(), f.end(), back_inserter (fonts));
660                 }
661         }
662
663         return fonts;
664 }
665
666 /** Set this player never to produce any video data */
667 void
668 Player::set_ignore_video ()
669 {
670         _ignore_video = true;
671 }
672
673 /** Set this player never to produce any audio data */
674 void
675 Player::set_ignore_audio ()
676 {
677         _ignore_audio = true;
678 }
679
680 /** Set whether or not this player should always burn text subtitles into the image,
681  *  regardless of the content settings.
682  *  @param burn true to always burn subtitles, false to obey content settings.
683  */
684 void
685 Player::set_always_burn_subtitles (bool burn)
686 {
687         _always_burn_subtitles = burn;
688 }
689
690 void
691 Player::set_fast ()
692 {
693         _fast = true;
694         _have_valid_pieces = false;
695 }
696
697 void
698 Player::set_play_referenced ()
699 {
700         _play_referenced = true;
701         _have_valid_pieces = false;
702 }
703
704 list<ReferencedReelAsset>
705 Player::get_reel_assets ()
706 {
707         list<ReferencedReelAsset> a;
708
709         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
710                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
711                 if (!j) {
712                         continue;
713                 }
714
715                 scoped_ptr<DCPDecoder> decoder;
716                 try {
717                         decoder.reset (new DCPDecoder (j, _film->log(), false));
718                 } catch (...) {
719                         return a;
720                 }
721
722                 int64_t offset = 0;
723                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
724                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
725                         if (j->reference_video ()) {
726                                 a.push_back (
727                                         ReferencedReelAsset (
728                                                 k->main_picture (),
729                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_picture()->duration(), _film->video_frame_rate()))
730                                                 )
731                                         );
732                         }
733
734                         if (j->reference_audio ()) {
735                                 a.push_back (
736                                         ReferencedReelAsset (
737                                                 k->main_sound (),
738                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_sound()->duration(), _film->video_frame_rate()))
739                                                 )
740                                         );
741                         }
742
743                         if (j->reference_subtitle ()) {
744                                 DCPOMATIC_ASSERT (k->main_subtitle ());
745                                 a.push_back (
746                                         ReferencedReelAsset (
747                                                 k->main_subtitle (),
748                                                 DCPTimePeriod (from, from + DCPTime::from_frames (k->main_subtitle()->duration(), _film->video_frame_rate()))
749                                                 )
750                                         );
751                         }
752
753                         /* Assume that main picture duration is the length of the reel */
754                         offset += k->main_picture()->duration ();
755                 }
756         }
757
758         return a;
759 }
760
761 list<shared_ptr<Piece> >
762 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
763 {
764         if (!_have_valid_pieces) {
765                 setup_pieces ();
766         }
767
768         list<shared_ptr<Piece> > overlaps;
769         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
770                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
771                         overlaps.push_back (i);
772                 }
773         }
774
775         return overlaps;
776 }