Go back to player having a playlist and not assuming it uses the film's content.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2015 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include "player.h"
21 #include "film.h"
22 #include "ffmpeg_decoder.h"
23 #include "audio_buffers.h"
24 #include "ffmpeg_content.h"
25 #include "image_decoder.h"
26 #include "image_content.h"
27 #include "sndfile_decoder.h"
28 #include "sndfile_content.h"
29 #include "subtitle_content.h"
30 #include "subrip_decoder.h"
31 #include "subrip_content.h"
32 #include "dcp_content.h"
33 #include "job.h"
34 #include "image.h"
35 #include "raw_image_proxy.h"
36 #include "ratio.h"
37 #include "log.h"
38 #include "render_subtitles.h"
39 #include "config.h"
40 #include "content_video.h"
41 #include "player_video.h"
42 #include "frame_rate_change.h"
43 #include "dcp_content.h"
44 #include "dcp_decoder.h"
45 #include "dcp_subtitle_content.h"
46 #include "dcp_subtitle_decoder.h"
47 #include "audio_processor.h"
48 #include "playlist.h"
49 #include <boost/foreach.hpp>
50 #include <stdint.h>
51 #include <algorithm>
52
53 #include "i18n.h"
54
55 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
56
57 using std::list;
58 using std::cout;
59 using std::min;
60 using std::max;
61 using std::min;
62 using std::vector;
63 using std::pair;
64 using std::map;
65 using std::make_pair;
66 using std::copy;
67 using boost::shared_ptr;
68 using boost::weak_ptr;
69 using boost::dynamic_pointer_cast;
70 using boost::optional;
71
72 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
73         : _film (film)
74         , _playlist (playlist)
75         , _have_valid_pieces (false)
76         , _ignore_video (false)
77         , _always_burn_subtitles (false)
78 {
79         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
80         _playlist_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_changed, this));
81         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
82         set_video_container_size (_film->frame_size ());
83
84         film_changed (Film::AUDIO_PROCESSOR);
85 }
86
87 void
88 Player::setup_pieces ()
89 {
90         list<shared_ptr<Piece> > old_pieces = _pieces;
91         _pieces.clear ();
92
93         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
94
95                 if (!i->paths_valid ()) {
96                         continue;
97                 }
98
99                 shared_ptr<Decoder> decoder;
100                 optional<FrameRateChange> frc;
101
102                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
103                 DCPTime best_overlap_t;
104                 shared_ptr<VideoContent> best_overlap;
105                 BOOST_FOREACH (shared_ptr<Content> j, _playlist->content ()) {
106                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (j);
107                         if (!vc) {
108                                 continue;
109                         }
110
111                         DCPTime const overlap = max (vc->position(), i->position()) - min (vc->end(), i->end());
112                         if (overlap > best_overlap_t) {
113                                 best_overlap = vc;
114                                 best_overlap_t = overlap;
115                         }
116                 }
117
118                 optional<FrameRateChange> best_overlap_frc;
119                 if (best_overlap) {
120                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
121                 } else {
122                         /* No video overlap; e.g. if the DCP is just audio */
123                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
124                 }
125
126                 /* FFmpeg */
127                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (i);
128                 if (fc) {
129                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
130                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
131                 }
132
133                 shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (i);
134                 if (dc) {
135                         decoder.reset (new DCPDecoder (dc));
136                         frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate());
137                 }
138
139                 /* ImageContent */
140                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (i);
141                 if (ic) {
142                         /* See if we can re-use an old ImageDecoder */
143                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
144                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
145                                 if (imd && imd->content() == ic) {
146                                         decoder = imd;
147                                 }
148                         }
149
150                         if (!decoder) {
151                                 decoder.reset (new ImageDecoder (ic));
152                         }
153
154                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
155                 }
156
157                 /* SndfileContent */
158                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (i);
159                 if (sc) {
160                         decoder.reset (new SndfileDecoder (sc));
161                         frc = best_overlap_frc;
162                 }
163
164                 /* SubRipContent */
165                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (i);
166                 if (rc) {
167                         decoder.reset (new SubRipDecoder (rc));
168                         frc = best_overlap_frc;
169                 }
170
171                 /* DCPSubtitleContent */
172                 shared_ptr<const DCPSubtitleContent> dsc = dynamic_pointer_cast<const DCPSubtitleContent> (i);
173                 if (dsc) {
174                         decoder.reset (new DCPSubtitleDecoder (dsc));
175                         frc = best_overlap_frc;
176                 }
177
178                 shared_ptr<VideoDecoder> vd = dynamic_pointer_cast<VideoDecoder> (decoder);
179                 if (vd && _ignore_video) {
180                         vd->set_ignore_video ();
181                 }
182
183                 _pieces.push_back (shared_ptr<Piece> (new Piece (i, decoder, frc.get ())));
184         }
185
186         _have_valid_pieces = true;
187 }
188
189 void
190 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
191 {
192         shared_ptr<Content> c = w.lock ();
193         if (!c) {
194                 return;
195         }
196
197         if (
198                 property == ContentProperty::POSITION ||
199                 property == ContentProperty::LENGTH ||
200                 property == ContentProperty::TRIM_START ||
201                 property == ContentProperty::TRIM_END ||
202                 property == ContentProperty::PATH ||
203                 property == VideoContentProperty::VIDEO_FRAME_TYPE ||
204                 property == DCPContentProperty::CAN_BE_PLAYED
205                 ) {
206
207                 _have_valid_pieces = false;
208                 Changed (frequent);
209
210         } else if (
211                 property == SubtitleContentProperty::USE_SUBTITLES ||
212                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
213                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
214                 property == SubtitleContentProperty::SUBTITLE_X_SCALE ||
215                 property == SubtitleContentProperty::SUBTITLE_Y_SCALE ||
216                 property == VideoContentProperty::VIDEO_CROP ||
217                 property == VideoContentProperty::VIDEO_SCALE ||
218                 property == VideoContentProperty::VIDEO_FRAME_RATE ||
219                 property == VideoContentProperty::VIDEO_FADE_IN ||
220                 property == VideoContentProperty::VIDEO_FADE_OUT
221                 ) {
222
223                 Changed (frequent);
224         }
225 }
226
227 void
228 Player::set_video_container_size (dcp::Size s)
229 {
230         _video_container_size = s;
231
232         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
233         _black_image->make_black ();
234 }
235
236 void
237 Player::playlist_changed ()
238 {
239         _have_valid_pieces = false;
240         Changed (false);
241 }
242
243 void
244 Player::film_changed (Film::Property p)
245 {
246         /* Here we should notice Film properties that affect our output, and
247            alert listeners that our output now would be different to how it was
248            last time we were run.
249         */
250
251         if (p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
252                 Changed (false);
253         } else if (p == Film::AUDIO_PROCESSOR) {
254                 if (_film->audio_processor ()) {
255                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
256                 }
257         }
258 }
259
260 list<PositionImage>
261 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
262 {
263         list<PositionImage> all;
264
265         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
266                 if (!i->image) {
267                         continue;
268                 }
269
270                 /* We will scale the subtitle up to fit _video_container_size */
271                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
272
273                 /* Then we need a corrective translation, consisting of two parts:
274                  *
275                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
276                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
277                  *
278                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
279                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
280                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
281                  *
282                  * Combining these two translations gives these expressions.
283                  */
284
285                 all.push_back (
286                         PositionImage (
287                                 i->image->scale (
288                                         scaled_size,
289                                         dcp::YUV_TO_RGB_REC601,
290                                         i->image->pixel_format (),
291                                         true
292                                         ),
293                                 Position<int> (
294                                         rint (_video_container_size.width * i->rectangle.x),
295                                         rint (_video_container_size.height * i->rectangle.y)
296                                         )
297                                 )
298                         );
299         }
300
301         return all;
302 }
303
304 shared_ptr<PlayerVideo>
305 Player::black_player_video_frame (DCPTime time) const
306 {
307         return shared_ptr<PlayerVideo> (
308                 new PlayerVideo (
309                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
310                         time,
311                         Crop (),
312                         optional<float> (),
313                         _video_container_size,
314                         _video_container_size,
315                         EYES_BOTH,
316                         PART_WHOLE,
317                         PresetColourConversion::all().front().conversion
318                 )
319         );
320 }
321
322 /** @return All PlayerVideos at the given time (there may be two frames for 3D) */
323 list<shared_ptr<PlayerVideo> >
324 Player::get_video (DCPTime time, bool accurate)
325 {
326         if (!_have_valid_pieces) {
327                 setup_pieces ();
328         }
329
330         /* Find subtitles for possible burn-in */
331
332         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true);
333
334         list<PositionImage> sub_images;
335
336         /* Image subtitles */
337         list<PositionImage> c = transform_image_subtitles (ps.image);
338         copy (c.begin(), c.end(), back_inserter (sub_images));
339
340         /* Text subtitles (rendered to an image) */
341         if (!ps.text.empty ()) {
342                 list<PositionImage> s = render_subtitles (ps.text, _video_container_size);
343                 copy (s.begin (), s.end (), back_inserter (sub_images));
344         }
345
346         optional<PositionImage> subtitles;
347         if (!sub_images.empty ()) {
348                 subtitles = merge (sub_images);
349         }
350
351         /* Find video */
352
353         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
354                 time,
355                 time + DCPTime::from_frames (1, _film->video_frame_rate ()) - DCPTime::delta()
356                 );
357
358         list<shared_ptr<PlayerVideo> > pvf;
359
360         if (ov.empty ()) {
361                 /* No video content at this time */
362                 pvf.push_back (black_player_video_frame (time));
363         } else {
364                 /* Create a PlayerVideo from the content's video at this time */
365
366                 shared_ptr<Piece> piece = ov.back ();
367                 shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
368                 DCPOMATIC_ASSERT (decoder);
369                 shared_ptr<VideoContent> video_content = dynamic_pointer_cast<VideoContent> (piece->content);
370                 DCPOMATIC_ASSERT (video_content);
371
372                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
373                 if (content_video.empty ()) {
374                         pvf.push_back (black_player_video_frame (time));
375                         return pvf;
376                 }
377
378                 dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ());
379
380                 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
381                         pvf.push_back (
382                                 shared_ptr<PlayerVideo> (
383                                         new PlayerVideo (
384                                                 i->image,
385                                                 content_video_to_dcp (piece, i->frame),
386                                                 video_content->crop (),
387                                                 video_content->fade (i->frame),
388                                                 image_size,
389                                                 _video_container_size,
390                                                 i->eyes,
391                                                 i->part,
392                                                 video_content->colour_conversion ()
393                                                 )
394                                         )
395                                 );
396                 }
397         }
398
399         if (subtitles) {
400                 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
401                         p->set_subtitle (subtitles.get ());
402                 }
403         }
404
405         return pvf;
406 }
407
408 shared_ptr<AudioBuffers>
409 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
410 {
411         if (!_have_valid_pieces) {
412                 setup_pieces ();
413         }
414
415         Frame const length_frames = length.frames (_film->audio_frame_rate ());
416
417         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
418         audio->make_silent ();
419
420         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
421         if (ov.empty ()) {
422                 return audio;
423         }
424
425         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
426
427                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
428                 DCPOMATIC_ASSERT (content);
429                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
430                 DCPOMATIC_ASSERT (decoder);
431
432                 /* The time that we should request from the content */
433                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
434                 Frame request_frames = length_frames;
435                 DCPTime offset;
436                 if (request < DCPTime ()) {
437                         /* We went off the start of the content, so we will need to offset
438                            the stuff we get back.
439                         */
440                         offset = -request;
441                         request_frames += request.frames (_film->audio_frame_rate ());
442                         if (request_frames < 0) {
443                                 request_frames = 0;
444                         }
445                         request = DCPTime ();
446                 }
447
448                 Frame const content_frame = dcp_to_content_audio (*i, request);
449
450                 BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) {
451
452                         /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
453                         ContentAudio all = decoder->get_audio (j, content_frame, request_frames, accurate);
454
455                         /* Gain */
456                         if (content->audio_gain() != 0) {
457                                 shared_ptr<AudioBuffers> gain (new AudioBuffers (all.audio));
458                                 gain->apply_gain (content->audio_gain ());
459                                 all.audio = gain;
460                         }
461
462                         /* Remap channels */
463                         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all.audio->frames()));
464                         dcp_mapped->make_silent ();
465                         AudioMapping map = j->mapping ();
466                         for (int i = 0; i < map.input_channels(); ++i) {
467                                 for (int j = 0; j < _film->audio_channels(); ++j) {
468                                         if (map.get (i, j) > 0) {
469                                                 dcp_mapped->accumulate_channel (
470                                                         all.audio.get(),
471                                                         i,
472                                                         j,
473                                                         map.get (i, j)
474                                                         );
475                                         }
476                                 }
477                         }
478
479                         if (_audio_processor) {
480                                 dcp_mapped = _audio_processor->run (dcp_mapped);
481                         }
482
483                         all.audio = dcp_mapped;
484
485                         audio->accumulate_frames (
486                                 all.audio.get(),
487                                 content_frame - all.frame,
488                                 offset.frames (_film->audio_frame_rate()),
489                                 min (Frame (all.audio->frames()), request_frames)
490                                 );
491                 }
492         }
493
494         return audio;
495 }
496
497 Frame
498 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
499 {
500         /* s is the offset of t from the start position of this content */
501         DCPTime s = t - piece->content->position ();
502         s = DCPTime (max (DCPTime::Type (0), s.get ()));
503         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
504
505         /* Convert this to the content frame */
506         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) / piece->frc.factor ();
507 }
508
509 DCPTime
510 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
511 {
512         DCPTime t = DCPTime::from_frames (f * piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position ();
513         if (t < DCPTime ()) {
514                 t = DCPTime ();
515         }
516
517         return t;
518 }
519
520 Frame
521 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
522 {
523         /* s is the offset of t from the start position of this content */
524         DCPTime s = t - piece->content->position ();
525         s = DCPTime (max (DCPTime::Type (0), s.get ()));
526         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
527
528         /* Convert this to the content frame */
529         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
530 }
531
532 ContentTime
533 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
534 {
535         /* s is the offset of t from the start position of this content */
536         DCPTime s = t - piece->content->position ();
537         s = DCPTime (max (DCPTime::Type (0), s.get ()));
538         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
539
540         return ContentTime (s + piece->content->trim_start(), piece->frc);
541 }
542
543 void
544 PlayerStatistics::dump (shared_ptr<Log> log) const
545 {
546         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
547         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
548 }
549
550 PlayerStatistics const &
551 Player::statistics () const
552 {
553         return _statistics;
554 }
555
556 /** @param burnt true to return only subtitles to be burnt, false to return only
557  *  subtitles that should not be burnt.  This parameter will be ignored if
558  *  _always_burn_subtitles is true; in this case, all subtitles will be returned.
559  */
560 PlayerSubtitles
561 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt)
562 {
563         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (time, time + length);
564
565         PlayerSubtitles ps (time, length);
566
567         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
568                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
569                 if (!subtitle_content->use_subtitles () || (!_always_burn_subtitles && (burnt != subtitle_content->burn_subtitles ()))) {
570                         continue;
571                 }
572
573                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
574                 ContentTime const from = dcp_to_content_subtitle (*j, time);
575                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
576                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
577
578                 list<ContentImageSubtitle> image = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to), starting);
579                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
580
581                         /* Apply content's subtitle offsets */
582                         i->sub.rectangle.x += subtitle_content->subtitle_x_offset ();
583                         i->sub.rectangle.y += subtitle_content->subtitle_y_offset ();
584
585                         /* Apply content's subtitle scale */
586                         i->sub.rectangle.width *= subtitle_content->subtitle_x_scale ();
587                         i->sub.rectangle.height *= subtitle_content->subtitle_y_scale ();
588
589                         /* Apply a corrective translation to keep the subtitle centred after that scale */
590                         i->sub.rectangle.x -= i->sub.rectangle.width * (subtitle_content->subtitle_x_scale() - 1);
591                         i->sub.rectangle.y -= i->sub.rectangle.height * (subtitle_content->subtitle_y_scale() - 1);
592
593                         ps.image.push_back (i->sub);
594                 }
595
596                 list<ContentTextSubtitle> text = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to), starting);
597                 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
598                         BOOST_FOREACH (dcp::SubtitleString& s, ts.subs) {
599                                 s.set_h_position (s.h_position() + subtitle_content->subtitle_x_offset ());
600                                 s.set_v_position (s.v_position() + subtitle_content->subtitle_y_offset ());
601                                 float const xs = subtitle_content->subtitle_x_scale();
602                                 float const ys = subtitle_content->subtitle_y_scale();
603                                 float const average = s.size() * (xs + ys) / 2;
604                                 s.set_size (average);
605                                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
606                                         s.set_aspect_adjust (xs / ys);
607                                 }
608                                 ps.text.push_back (s);
609                         }
610                 }
611         }
612
613         return ps;
614 }
615
616 list<shared_ptr<Font> >
617 Player::get_subtitle_fonts ()
618 {
619         if (!_have_valid_pieces) {
620                 setup_pieces ();
621         }
622
623         list<shared_ptr<Font> > fonts;
624         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
625                 shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (p->content);
626                 if (sc) {
627                         /* XXX: things may go wrong if there are duplicate font IDs
628                            with different font files.
629                         */
630                         list<shared_ptr<Font> > f = sc->fonts ();
631                         copy (f.begin(), f.end(), back_inserter (fonts));
632                 }
633         }
634
635         return fonts;
636 }
637
638 /** Set this player never to produce any video data */
639 void
640 Player::set_ignore_video ()
641 {
642         _ignore_video = true;
643 }
644
645 /** Set whether or not this player should always burn text subtitles into the image,
646  *  regardless of the content settings.
647  *  @param burn true to always burn subtitles, false to obey content settings.
648  */
649 void
650 Player::set_always_burn_subtitles (bool burn)
651 {
652         _always_burn_subtitles = burn;
653 }