Back-end for very basic and hacky VF support for a DCP imported as content.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2015 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include "player.h"
21 #include "film.h"
22 #include "ffmpeg_decoder.h"
23 #include "audio_buffers.h"
24 #include "ffmpeg_content.h"
25 #include "image_decoder.h"
26 #include "image_content.h"
27 #include "sndfile_decoder.h"
28 #include "sndfile_content.h"
29 #include "subtitle_content.h"
30 #include "subrip_decoder.h"
31 #include "subrip_content.h"
32 #include "dcp_content.h"
33 #include "job.h"
34 #include "image.h"
35 #include "raw_image_proxy.h"
36 #include "ratio.h"
37 #include "log.h"
38 #include "render_subtitles.h"
39 #include "config.h"
40 #include "content_video.h"
41 #include "player_video.h"
42 #include "frame_rate_change.h"
43 #include "dcp_content.h"
44 #include "dcp_decoder.h"
45 #include "dcp_subtitle_content.h"
46 #include "dcp_subtitle_decoder.h"
47 #include "audio_processor.h"
48 #include "playlist.h"
49 #include <dcp/reel.h>
50 #include <dcp/reel_sound_asset.h>
51 #include <dcp/reel_subtitle_asset.h>
52 #include <dcp/reel_picture_asset.h>
53 #include <boost/foreach.hpp>
54 #include <stdint.h>
55 #include <algorithm>
56 #include <iostream>
57
58 #include "i18n.h"
59
60 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
61
62 using std::list;
63 using std::cout;
64 using std::min;
65 using std::max;
66 using std::min;
67 using std::vector;
68 using std::pair;
69 using std::map;
70 using std::make_pair;
71 using std::copy;
72 using boost::shared_ptr;
73 using boost::weak_ptr;
74 using boost::dynamic_pointer_cast;
75 using boost::optional;
76
77 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
78         : _film (film)
79         , _playlist (playlist)
80         , _have_valid_pieces (false)
81         , _ignore_video (false)
82         , _ignore_audio (false)
83         , _always_burn_subtitles (false)
84         , _fast (false)
85         , _play_referenced (false)
86 {
87         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
88         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
89         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
90         set_video_container_size (_film->frame_size ());
91
92         film_changed (Film::AUDIO_PROCESSOR);
93 }
94
95 void
96 Player::setup_pieces ()
97 {
98         list<shared_ptr<Piece> > old_pieces = _pieces;
99         _pieces.clear ();
100
101         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
102
103                 if (!i->paths_valid ()) {
104                         continue;
105                 }
106
107                 shared_ptr<Decoder> decoder;
108                 optional<FrameRateChange> frc;
109
110                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
111                 DCPTime best_overlap_t;
112                 shared_ptr<VideoContent> best_overlap;
113                 BOOST_FOREACH (shared_ptr<Content> j, _playlist->content ()) {
114                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (j);
115                         if (!vc) {
116                                 continue;
117                         }
118
119                         DCPTime const overlap = max (vc->position(), i->position()) - min (vc->end(), i->end());
120                         if (overlap > best_overlap_t) {
121                                 best_overlap = vc;
122                                 best_overlap_t = overlap;
123                         }
124                 }
125
126                 optional<FrameRateChange> best_overlap_frc;
127                 if (best_overlap) {
128                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
129                 } else {
130                         /* No video overlap; e.g. if the DCP is just audio */
131                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
132                 }
133
134                 /* FFmpeg */
135                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (i);
136                 if (fc) {
137                         decoder.reset (new FFmpegDecoder (fc, _film->log(), _fast));
138                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
139                 }
140
141                 shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (i);
142                 if (dc) {
143                         decoder.reset (new DCPDecoder (dc, _fast));
144                         frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate());
145                 }
146
147                 /* ImageContent */
148                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (i);
149                 if (ic) {
150                         /* See if we can re-use an old ImageDecoder */
151                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
152                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
153                                 if (imd && imd->content() == ic) {
154                                         decoder = imd;
155                                 }
156                         }
157
158                         if (!decoder) {
159                                 decoder.reset (new ImageDecoder (ic));
160                         }
161
162                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
163                 }
164
165                 /* SndfileContent */
166                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (i);
167                 if (sc) {
168                         decoder.reset (new SndfileDecoder (sc, _fast));
169                         frc = best_overlap_frc;
170                 }
171
172                 /* SubRipContent */
173                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (i);
174                 if (rc) {
175                         decoder.reset (new SubRipDecoder (rc));
176                         frc = best_overlap_frc;
177                 }
178
179                 /* DCPSubtitleContent */
180                 shared_ptr<const DCPSubtitleContent> dsc = dynamic_pointer_cast<const DCPSubtitleContent> (i);
181                 if (dsc) {
182                         decoder.reset (new DCPSubtitleDecoder (dsc));
183                         frc = best_overlap_frc;
184                 }
185
186                 shared_ptr<VideoDecoder> vd = dynamic_pointer_cast<VideoDecoder> (decoder);
187                 if (vd && _ignore_video) {
188                         vd->set_ignore_video ();
189                 }
190
191                 shared_ptr<AudioDecoder> ad = dynamic_pointer_cast<AudioDecoder> (decoder);
192                 if (ad && _ignore_audio) {
193                         ad->set_ignore_audio ();
194                 }
195
196                 _pieces.push_back (shared_ptr<Piece> (new Piece (i, decoder, frc.get ())));
197         }
198
199         _have_valid_pieces = true;
200 }
201
202 void
203 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
204 {
205         shared_ptr<Content> c = w.lock ();
206         if (!c) {
207                 return;
208         }
209
210         if (
211                 property == ContentProperty::POSITION ||
212                 property == ContentProperty::LENGTH ||
213                 property == ContentProperty::TRIM_START ||
214                 property == ContentProperty::TRIM_END ||
215                 property == ContentProperty::PATH ||
216                 property == VideoContentProperty::VIDEO_FRAME_TYPE ||
217                 property == DCPContentProperty::CAN_BE_PLAYED
218                 ) {
219
220                 _have_valid_pieces = false;
221                 Changed (frequent);
222
223         } else if (
224                 property == SubtitleContentProperty::USE_SUBTITLES ||
225                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
226                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
227                 property == SubtitleContentProperty::SUBTITLE_X_SCALE ||
228                 property == SubtitleContentProperty::SUBTITLE_Y_SCALE ||
229                 property == SubtitleContentProperty::FONTS ||
230                 property == VideoContentProperty::VIDEO_CROP ||
231                 property == VideoContentProperty::VIDEO_SCALE ||
232                 property == VideoContentProperty::VIDEO_FRAME_RATE ||
233                 property == VideoContentProperty::VIDEO_FADE_IN ||
234                 property == VideoContentProperty::VIDEO_FADE_OUT ||
235                 property == VideoContentProperty::COLOUR_CONVERSION
236                 ) {
237
238                 Changed (frequent);
239         }
240 }
241
242 void
243 Player::set_video_container_size (dcp::Size s)
244 {
245         _video_container_size = s;
246
247         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
248         _black_image->make_black ();
249 }
250
251 void
252 Player::playlist_changed ()
253 {
254         _have_valid_pieces = false;
255         Changed (false);
256 }
257
258 void
259 Player::film_changed (Film::Property p)
260 {
261         /* Here we should notice Film properties that affect our output, and
262            alert listeners that our output now would be different to how it was
263            last time we were run.
264         */
265
266         if (p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
267                 Changed (false);
268         } else if (p == Film::AUDIO_PROCESSOR) {
269                 if (_film->audio_processor ()) {
270                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
271                 }
272         }
273 }
274
275 list<PositionImage>
276 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
277 {
278         list<PositionImage> all;
279
280         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
281                 if (!i->image) {
282                         continue;
283                 }
284
285                 /* We will scale the subtitle up to fit _video_container_size */
286                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
287
288                 /* Then we need a corrective translation, consisting of two parts:
289                  *
290                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
291                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
292                  *
293                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
294                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
295                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
296                  *
297                  * Combining these two translations gives these expressions.
298                  */
299
300                 all.push_back (
301                         PositionImage (
302                                 i->image->scale (
303                                         scaled_size,
304                                         dcp::YUV_TO_RGB_REC601,
305                                         i->image->pixel_format (),
306                                         true
307                                         ),
308                                 Position<int> (
309                                         lrint (_video_container_size.width * i->rectangle.x),
310                                         lrint (_video_container_size.height * i->rectangle.y)
311                                         )
312                                 )
313                         );
314         }
315
316         return all;
317 }
318
319 shared_ptr<PlayerVideo>
320 Player::black_player_video_frame (DCPTime time) const
321 {
322         return shared_ptr<PlayerVideo> (
323                 new PlayerVideo (
324                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
325                         time,
326                         Crop (),
327                         optional<double> (),
328                         _video_container_size,
329                         _video_container_size,
330                         EYES_BOTH,
331                         PART_WHOLE,
332                         PresetColourConversion::all().front().conversion
333                 )
334         );
335 }
336
337 /** @return All PlayerVideos at the given time.  There may be none if the content
338  *  at `time' is a DCP which we are passing through (i.e. referring to by reference)
339  *  or 2 if we have 3D.
340  */
341 list<shared_ptr<PlayerVideo> >
342 Player::get_video (DCPTime time, bool accurate)
343 {
344         if (!_have_valid_pieces) {
345                 setup_pieces ();
346         }
347
348         /* Find subtitles for possible burn-in */
349
350         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true);
351
352         list<PositionImage> sub_images;
353
354         /* Image subtitles */
355         list<PositionImage> c = transform_image_subtitles (ps.image);
356         copy (c.begin(), c.end(), back_inserter (sub_images));
357
358         /* Text subtitles (rendered to an image) */
359         if (!ps.text.empty ()) {
360                 list<PositionImage> s = render_subtitles (ps.text, ps.fonts, _video_container_size);
361                 copy (s.begin (), s.end (), back_inserter (sub_images));
362         }
363
364         optional<PositionImage> subtitles;
365         if (!sub_images.empty ()) {
366                 subtitles = merge (sub_images);
367         }
368
369         /* Find pieces containing video which is happening now */
370
371         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
372                 time,
373                 time + DCPTime::from_frames (1, _film->video_frame_rate ()) - DCPTime::delta()
374                 );
375
376         list<shared_ptr<PlayerVideo> > pvf;
377
378         if (ov.empty ()) {
379                 /* No video content at this time */
380                 pvf.push_back (black_player_video_frame (time));
381         } else {
382                 /* Some video content at this time */
383                 shared_ptr<Piece> last = *(ov.rbegin ());
384                 VideoFrameType const last_type = dynamic_pointer_cast<VideoContent> (last->content)->video_frame_type ();
385
386                 /* Get video from appropriate piece(s) */
387                 BOOST_FOREACH (shared_ptr<Piece> piece, ov) {
388
389                         shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
390                         DCPOMATIC_ASSERT (decoder);
391                         shared_ptr<VideoContent> video_content = dynamic_pointer_cast<VideoContent> (piece->content);
392                         DCPOMATIC_ASSERT (video_content);
393
394                         shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (video_content);
395                         if (dcp_content && dcp_content->reference_video () && !_play_referenced) {
396                                 continue;
397                         }
398
399                         bool const use =
400                                 /* always use the last video */
401                                 piece == last ||
402                                 /* with a corresponding L/R eye if appropriate */
403                                 (last_type == VIDEO_FRAME_TYPE_3D_LEFT && video_content->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) ||
404                                 (last_type == VIDEO_FRAME_TYPE_3D_RIGHT && video_content->video_frame_type() == VIDEO_FRAME_TYPE_3D_LEFT);
405
406                         if (use) {
407                                 /* We want to use this piece */
408                                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
409                                 if (content_video.empty ()) {
410                                         pvf.push_back (black_player_video_frame (time));
411                                 } else {
412                                         dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ());
413
414                                         for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
415                                                 pvf.push_back (
416                                                         shared_ptr<PlayerVideo> (
417                                                                 new PlayerVideo (
418                                                                         i->image,
419                                                                         content_video_to_dcp (piece, i->frame),
420                                                                         video_content->crop (),
421                                                                         video_content->fade (i->frame),
422                                                                         image_size,
423                                                                         _video_container_size,
424                                                                         i->eyes,
425                                                                         i->part,
426                                                                         video_content->colour_conversion ()
427                                                                         )
428                                                                 )
429                                                         );
430                                         }
431                                 }
432                         } else {
433                                 /* Discard unused video */
434                                 decoder->get_video (dcp_to_content_video (piece, time), accurate);
435                         }
436                 }
437         }
438
439         if (subtitles) {
440                 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
441                         p->set_subtitle (subtitles.get ());
442                 }
443         }
444
445         return pvf;
446 }
447
448 /** @return Audio data or 0 if the only audio data here is referenced DCP data */
449 shared_ptr<AudioBuffers>
450 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
451 {
452         if (!_have_valid_pieces) {
453                 setup_pieces ();
454         }
455
456         Frame const length_frames = length.frames_round (_film->audio_frame_rate ());
457
458         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
459         audio->make_silent ();
460
461         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
462         if (ov.empty ()) {
463                 return audio;
464         }
465
466         bool all_referenced = true;
467         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
468                 shared_ptr<AudioContent> audio_content = dynamic_pointer_cast<AudioContent> (i->content);
469                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (i->content);
470                 if (audio_content && (!dcp_content || !dcp_content->reference_audio ())) {
471                         /* There is audio content which is not from a DCP or not set to be referenced */
472                         all_referenced = false;
473                 }
474         }
475
476         if (all_referenced && !_play_referenced) {
477                 return shared_ptr<AudioBuffers> ();
478         }
479
480         BOOST_FOREACH (shared_ptr<Piece> i, ov) {
481
482                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> (i->content);
483                 DCPOMATIC_ASSERT (content);
484                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> (i->decoder);
485                 DCPOMATIC_ASSERT (decoder);
486
487                 /* The time that we should request from the content */
488                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
489                 Frame request_frames = length_frames;
490                 DCPTime offset;
491                 if (request < DCPTime ()) {
492                         /* We went off the start of the content, so we will need to offset
493                            the stuff we get back.
494                         */
495                         offset = -request;
496                         request_frames += request.frames_round (_film->audio_frame_rate ());
497                         if (request_frames < 0) {
498                                 request_frames = 0;
499                         }
500                         request = DCPTime ();
501                 }
502
503                 Frame const content_frame = dcp_to_resampled_audio (i, request);
504
505                 BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) {
506
507                         if (j->channels() == 0) {
508                                 /* Some content (e.g. DCPs) can have streams with no channels */
509                                 continue;
510                         }
511
512                         /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
513                         ContentAudio all = decoder->get_audio (j, content_frame, request_frames, accurate);
514
515                         /* Gain */
516                         if (content->audio_gain() != 0) {
517                                 shared_ptr<AudioBuffers> gain (new AudioBuffers (all.audio));
518                                 gain->apply_gain (content->audio_gain ());
519                                 all.audio = gain;
520                         }
521
522                         /* Remap channels */
523                         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all.audio->frames()));
524                         dcp_mapped->make_silent ();
525                         AudioMapping map = j->mapping ();
526                         for (int i = 0; i < map.input_channels(); ++i) {
527                                 for (int j = 0; j < _film->audio_channels(); ++j) {
528                                         if (map.get (i, j) > 0) {
529                                                 dcp_mapped->accumulate_channel (
530                                                         all.audio.get(),
531                                                         i,
532                                                         j,
533                                                         map.get (i, j)
534                                                         );
535                                         }
536                                 }
537                         }
538
539                         if (_audio_processor) {
540                                 dcp_mapped = _audio_processor->run (dcp_mapped, _film->audio_channels ());
541                         }
542
543                         all.audio = dcp_mapped;
544
545                         audio->accumulate_frames (
546                                 all.audio.get(),
547                                 content_frame - all.frame,
548                                 offset.frames_round (_film->audio_frame_rate()),
549                                 min (Frame (all.audio->frames()), request_frames)
550                                 );
551                 }
552         }
553
554         return audio;
555 }
556
557 Frame
558 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
559 {
560         shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (piece->content);
561         DCPTime s = t - piece->content->position ();
562         s = min (piece->content->length_after_trim(), s);
563         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
564
565         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
566            then convert that ContentTime to frames at the content's rate.  However this fails for
567            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
568            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
569
570            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
571         */
572         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
573 }
574
575 DCPTime
576 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
577 {
578         shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (piece->content);
579         /* See comment in dcp_to_content_video */
580         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
581         return max (DCPTime (), d + piece->content->position ());
582 }
583
584 Frame
585 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
586 {
587         DCPTime s = t - piece->content->position ();
588         s = min (piece->content->length_after_trim(), s);
589         /* See notes in dcp_to_content_video */
590         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
591 }
592
593 ContentTime
594 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
595 {
596         DCPTime s = t - piece->content->position ();
597         s = min (piece->content->length_after_trim(), s);
598         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
599 }
600
601 /** @param burnt true to return only subtitles to be burnt, false to return only
602  *  subtitles that should not be burnt.  This parameter will be ignored if
603  *  _always_burn_subtitles is true; in this case, all subtitles will be returned.
604  */
605 PlayerSubtitles
606 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt)
607 {
608         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (time, time + length);
609
610         PlayerSubtitles ps (time, length);
611
612         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
613                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
614                 if (!subtitle_content->use_subtitles () || (!_always_burn_subtitles && (burnt != subtitle_content->burn_subtitles ()))) {
615                         continue;
616                 }
617
618                 shared_ptr<DCPContent> dcp_content = dynamic_pointer_cast<DCPContent> (subtitle_content);
619                 if (dcp_content && dcp_content->reference_subtitle () && !_play_referenced) {
620                         continue;
621                 }
622
623                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
624                 ContentTime const from = dcp_to_content_subtitle (*j, time);
625                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
626                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
627
628                 list<ContentImageSubtitle> image = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to), starting);
629                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
630
631                         /* Apply content's subtitle offsets */
632                         i->sub.rectangle.x += subtitle_content->subtitle_x_offset ();
633                         i->sub.rectangle.y += subtitle_content->subtitle_y_offset ();
634
635                         /* Apply content's subtitle scale */
636                         i->sub.rectangle.width *= subtitle_content->subtitle_x_scale ();
637                         i->sub.rectangle.height *= subtitle_content->subtitle_y_scale ();
638
639                         /* Apply a corrective translation to keep the subtitle centred after that scale */
640                         i->sub.rectangle.x -= i->sub.rectangle.width * (subtitle_content->subtitle_x_scale() - 1);
641                         i->sub.rectangle.y -= i->sub.rectangle.height * (subtitle_content->subtitle_y_scale() - 1);
642
643                         ps.image.push_back (i->sub);
644                 }
645
646                 list<ContentTextSubtitle> text = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to), starting);
647                 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
648                         BOOST_FOREACH (dcp::SubtitleString& s, ts.subs) {
649                                 s.set_h_position (s.h_position() + subtitle_content->subtitle_x_offset ());
650                                 s.set_v_position (s.v_position() + subtitle_content->subtitle_y_offset ());
651                                 float const xs = subtitle_content->subtitle_x_scale();
652                                 float const ys = subtitle_content->subtitle_y_scale();
653                                 float const average = s.size() * (xs + ys) / 2;
654                                 s.set_size (average);
655                                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
656                                         s.set_aspect_adjust (xs / ys);
657                                 }
658                                 ps.text.push_back (s);
659                                 ps.add_fonts (subtitle_content->fonts ());
660                         }
661                 }
662         }
663
664         return ps;
665 }
666
667 list<shared_ptr<Font> >
668 Player::get_subtitle_fonts ()
669 {
670         if (!_have_valid_pieces) {
671                 setup_pieces ();
672         }
673
674         list<shared_ptr<Font> > fonts;
675         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
676                 shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (p->content);
677                 if (sc) {
678                         /* XXX: things may go wrong if there are duplicate font IDs
679                            with different font files.
680                         */
681                         list<shared_ptr<Font> > f = sc->fonts ();
682                         copy (f.begin(), f.end(), back_inserter (fonts));
683                 }
684         }
685
686         return fonts;
687 }
688
689 /** Set this player never to produce any video data */
690 void
691 Player::set_ignore_video ()
692 {
693         _ignore_video = true;
694 }
695
696 /** Set this player never to produce any audio data */
697 void
698 Player::set_ignore_audio ()
699 {
700         _ignore_audio = true;
701 }
702
703 /** Set whether or not this player should always burn text subtitles into the image,
704  *  regardless of the content settings.
705  *  @param burn true to always burn subtitles, false to obey content settings.
706  */
707 void
708 Player::set_always_burn_subtitles (bool burn)
709 {
710         _always_burn_subtitles = burn;
711 }
712
713 void
714 Player::set_fast ()
715 {
716         _fast = true;
717         _have_valid_pieces = false;
718 }
719
720 void
721 Player::set_play_referenced ()
722 {
723         _play_referenced = true;
724         _have_valid_pieces = false;
725 }
726
727 list<shared_ptr<dcp::ReelAsset> >
728 Player::get_reel_assets ()
729 {
730         list<shared_ptr<dcp::ReelAsset> > a;
731
732         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
733                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
734                 if (!j) {
735                         continue;
736                 }
737                 /* XXX: hack hack hack */
738                 DCPDecoder decoder (j, false);
739                 if (j->reference_video ()) {
740                         a.push_back (decoder.reels().front()->main_picture ());
741                 }
742                 if (j->reference_audio ()) {
743                         a.push_back (decoder.reels().front()->main_sound ());
744                 }
745                 if (j->reference_subtitle ()) {
746                         a.push_back (decoder.reels().front()->main_subtitle ());
747                 }
748         }
749
750         return a;
751 }