Merge branch 'content-burn-subs' into 2.0
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2015 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include "player.h"
21 #include "film.h"
22 #include "ffmpeg_decoder.h"
23 #include "audio_buffers.h"
24 #include "ffmpeg_content.h"
25 #include "image_decoder.h"
26 #include "image_content.h"
27 #include "sndfile_decoder.h"
28 #include "sndfile_content.h"
29 #include "subtitle_content.h"
30 #include "subrip_decoder.h"
31 #include "subrip_content.h"
32 #include "dcp_content.h"
33 #include "job.h"
34 #include "image.h"
35 #include "raw_image_proxy.h"
36 #include "ratio.h"
37 #include "log.h"
38 #include "render_subtitles.h"
39 #include "config.h"
40 #include "content_video.h"
41 #include "player_video.h"
42 #include "frame_rate_change.h"
43 #include "dcp_content.h"
44 #include "dcp_decoder.h"
45 #include "dcp_subtitle_content.h"
46 #include "dcp_subtitle_decoder.h"
47 #include "audio_processor.h"
48 #include <boost/foreach.hpp>
49 #include <stdint.h>
50 #include <algorithm>
51
52 #include "i18n.h"
53
54 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
55
56 using std::list;
57 using std::cout;
58 using std::min;
59 using std::max;
60 using std::min;
61 using std::vector;
62 using std::pair;
63 using std::map;
64 using std::make_pair;
65 using std::copy;
66 using boost::shared_ptr;
67 using boost::weak_ptr;
68 using boost::dynamic_pointer_cast;
69 using boost::optional;
70
71 Player::Player (shared_ptr<const Film> film)
72         : _film (film)
73         , _have_valid_pieces (false)
74         , _ignore_video (false)
75         , _always_burn_subtitles (false)
76 {
77         _film_content_changed_connection = _film->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
78         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
79         set_video_container_size (_film->frame_size ());
80
81         film_changed (Film::AUDIO_PROCESSOR);
82 }
83
84 void
85 Player::setup_pieces ()
86 {
87         list<shared_ptr<Piece> > old_pieces = _pieces;
88         _pieces.clear ();
89
90         ContentList content = _film->content ();
91
92         for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
93
94                 if (!(*i)->paths_valid ()) {
95                         continue;
96                 }
97
98                 shared_ptr<Decoder> decoder;
99                 optional<FrameRateChange> frc;
100
101                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
102                 DCPTime best_overlap_t;
103                 shared_ptr<VideoContent> best_overlap;
104                 for (ContentList::iterator j = content.begin(); j != content.end(); ++j) {
105                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*j);
106                         if (!vc) {
107                                 continue;
108                         }
109
110                         DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
111                         if (overlap > best_overlap_t) {
112                                 best_overlap = vc;
113                                 best_overlap_t = overlap;
114                         }
115                 }
116
117                 optional<FrameRateChange> best_overlap_frc;
118                 if (best_overlap) {
119                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
120                 } else {
121                         /* No video overlap; e.g. if the DCP is just audio */
122                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
123                 }
124
125                 /* FFmpeg */
126                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (*i);
127                 if (fc) {
128                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
129                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
130                 }
131
132                 shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (*i);
133                 if (dc) {
134                         decoder.reset (new DCPDecoder (dc));
135                         frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate());
136                 }
137
138                 /* ImageContent */
139                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
140                 if (ic) {
141                         /* See if we can re-use an old ImageDecoder */
142                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
143                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
144                                 if (imd && imd->content() == ic) {
145                                         decoder = imd;
146                                 }
147                         }
148
149                         if (!decoder) {
150                                 decoder.reset (new ImageDecoder (ic));
151                         }
152
153                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
154                 }
155
156                 /* SndfileContent */
157                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
158                 if (sc) {
159                         decoder.reset (new SndfileDecoder (sc));
160                         frc = best_overlap_frc;
161                 }
162
163                 /* SubRipContent */
164                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (*i);
165                 if (rc) {
166                         decoder.reset (new SubRipDecoder (rc));
167                         frc = best_overlap_frc;
168                 }
169
170                 /* DCPSubtitleContent */
171                 shared_ptr<const DCPSubtitleContent> dsc = dynamic_pointer_cast<const DCPSubtitleContent> (*i);
172                 if (dsc) {
173                         decoder.reset (new DCPSubtitleDecoder (dsc));
174                         frc = best_overlap_frc;
175                 }
176
177                 shared_ptr<VideoDecoder> vd = dynamic_pointer_cast<VideoDecoder> (decoder);
178                 if (vd && _ignore_video) {
179                         vd->set_ignore_video ();
180                 }
181
182                 _pieces.push_back (shared_ptr<Piece> (new Piece (*i, decoder, frc.get ())));
183         }
184
185         _have_valid_pieces = true;
186 }
187
188 void
189 Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
190 {
191         shared_ptr<Content> c = w.lock ();
192         if (!c) {
193                 return;
194         }
195
196         if (
197                 property == ContentProperty::POSITION ||
198                 property == ContentProperty::LENGTH ||
199                 property == ContentProperty::TRIM_START ||
200                 property == ContentProperty::TRIM_END ||
201                 property == ContentProperty::PATH ||
202                 property == VideoContentProperty::VIDEO_FRAME_TYPE ||
203                 property == DCPContentProperty::CAN_BE_PLAYED
204                 ) {
205
206                 _have_valid_pieces = false;
207                 Changed (frequent);
208
209         } else if (
210                 property == SubtitleContentProperty::USE_SUBTITLES ||
211                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
212                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
213                 property == SubtitleContentProperty::SUBTITLE_X_SCALE ||
214                 property == SubtitleContentProperty::SUBTITLE_Y_SCALE ||
215                 property == VideoContentProperty::VIDEO_CROP ||
216                 property == VideoContentProperty::VIDEO_SCALE ||
217                 property == VideoContentProperty::VIDEO_FRAME_RATE ||
218                 property == VideoContentProperty::VIDEO_FADE_IN ||
219                 property == VideoContentProperty::VIDEO_FADE_OUT
220                 ) {
221
222                 Changed (frequent);
223         }
224 }
225
226 void
227 Player::set_video_container_size (dcp::Size s)
228 {
229         _video_container_size = s;
230
231         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
232         _black_image->make_black ();
233 }
234
235 void
236 Player::film_changed (Film::Property p)
237 {
238         /* Here we should notice Film properties that affect our output, and
239            alert listeners that our output now would be different to how it was
240            last time we were run.
241         */
242
243         if (p == Film::CONTENT) {
244                 _have_valid_pieces = false;
245                 Changed (false);
246         } else if (p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
247                 Changed (false);
248         } else if (p == Film::AUDIO_PROCESSOR) {
249                 if (_film->audio_processor ()) {
250                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
251                 }
252         }
253 }
254
255 list<PositionImage>
256 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
257 {
258         list<PositionImage> all;
259
260         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
261                 if (!i->image) {
262                         continue;
263                 }
264
265                 /* We will scale the subtitle up to fit _video_container_size */
266                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
267
268                 /* Then we need a corrective translation, consisting of two parts:
269                  *
270                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
271                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
272                  *
273                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
274                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
275                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
276                  *
277                  * Combining these two translations gives these expressions.
278                  */
279
280                 all.push_back (
281                         PositionImage (
282                                 i->image->scale (
283                                         scaled_size,
284                                         dcp::YUV_TO_RGB_REC601,
285                                         i->image->pixel_format (),
286                                         true
287                                         ),
288                                 Position<int> (
289                                         rint (_video_container_size.width * i->rectangle.x),
290                                         rint (_video_container_size.height * i->rectangle.y)
291                                         )
292                                 )
293                         );
294         }
295
296         return all;
297 }
298
299 shared_ptr<PlayerVideo>
300 Player::black_player_video_frame (DCPTime time) const
301 {
302         return shared_ptr<PlayerVideo> (
303                 new PlayerVideo (
304                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
305                         time,
306                         Crop (),
307                         optional<float> (),
308                         _video_container_size,
309                         _video_container_size,
310                         EYES_BOTH,
311                         PART_WHOLE,
312                         PresetColourConversion::all().front().conversion
313                 )
314         );
315 }
316
317 /** @return All PlayerVideos at the given time (there may be two frames for 3D) */
318 list<shared_ptr<PlayerVideo> >
319 Player::get_video (DCPTime time, bool accurate)
320 {
321         if (!_have_valid_pieces) {
322                 setup_pieces ();
323         }
324
325         /* Find subtitles for possible burn-in */
326
327         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false, true);
328
329         list<PositionImage> sub_images;
330
331         /* Image subtitles */
332         list<PositionImage> c = transform_image_subtitles (ps.image);
333         copy (c.begin(), c.end(), back_inserter (sub_images));
334
335         /* Text subtitles (rendered to an image) */
336         if (!ps.text.empty ()) {
337                 list<PositionImage> s = render_subtitles (ps.text, _video_container_size);
338                 copy (s.begin (), s.end (), back_inserter (sub_images));
339         }
340
341         optional<PositionImage> subtitles;
342         if (!sub_images.empty ()) {
343                 subtitles = merge (sub_images);
344         }
345
346         /* Find video */
347
348         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
349                 time,
350                 time + DCPTime::from_frames (1, _film->video_frame_rate ()) - DCPTime::delta()
351                 );
352
353         list<shared_ptr<PlayerVideo> > pvf;
354
355         if (ov.empty ()) {
356                 /* No video content at this time */
357                 pvf.push_back (black_player_video_frame (time));
358         } else {
359                 /* Create a PlayerVideo from the content's video at this time */
360
361                 shared_ptr<Piece> piece = ov.back ();
362                 shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
363                 DCPOMATIC_ASSERT (decoder);
364                 shared_ptr<VideoContent> video_content = dynamic_pointer_cast<VideoContent> (piece->content);
365                 DCPOMATIC_ASSERT (video_content);
366
367                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
368                 if (content_video.empty ()) {
369                         pvf.push_back (black_player_video_frame (time));
370                         return pvf;
371                 }
372
373                 dcp::Size image_size = video_content->scale().size (video_content, _video_container_size, _film->frame_size ());
374
375                 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
376                         pvf.push_back (
377                                 shared_ptr<PlayerVideo> (
378                                         new PlayerVideo (
379                                                 i->image,
380                                                 content_video_to_dcp (piece, i->frame),
381                                                 video_content->crop (),
382                                                 video_content->fade (i->frame),
383                                                 image_size,
384                                                 _video_container_size,
385                                                 i->eyes,
386                                                 i->part,
387                                                 video_content->colour_conversion ()
388                                                 )
389                                         )
390                                 );
391                 }
392         }
393
394         if (subtitles) {
395                 BOOST_FOREACH (shared_ptr<PlayerVideo> p, pvf) {
396                         p->set_subtitle (subtitles.get ());
397                 }
398         }
399
400         return pvf;
401 }
402
403 shared_ptr<AudioBuffers>
404 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
405 {
406         if (!_have_valid_pieces) {
407                 setup_pieces ();
408         }
409
410         Frame const length_frames = length.frames (_film->audio_frame_rate ());
411
412         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
413         audio->make_silent ();
414
415         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
416         if (ov.empty ()) {
417                 return audio;
418         }
419
420         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
421
422                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
423                 DCPOMATIC_ASSERT (content);
424                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
425                 DCPOMATIC_ASSERT (decoder);
426
427                 /* The time that we should request from the content */
428                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
429                 Frame request_frames = length_frames;
430                 DCPTime offset;
431                 if (request < DCPTime ()) {
432                         /* We went off the start of the content, so we will need to offset
433                            the stuff we get back.
434                         */
435                         offset = -request;
436                         request_frames += request.frames (_film->audio_frame_rate ());
437                         if (request_frames < 0) {
438                                 request_frames = 0;
439                         }
440                         request = DCPTime ();
441                 }
442
443                 Frame const content_frame = dcp_to_content_audio (*i, request);
444
445                 BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) {
446
447                         /* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
448                         ContentAudio all = decoder->get_audio (j, content_frame, request_frames, accurate);
449
450                         /* Gain */
451                         if (content->audio_gain() != 0) {
452                                 shared_ptr<AudioBuffers> gain (new AudioBuffers (all.audio));
453                                 gain->apply_gain (content->audio_gain ());
454                                 all.audio = gain;
455                         }
456
457                         /* Remap channels */
458                         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all.audio->frames()));
459                         dcp_mapped->make_silent ();
460                         AudioMapping map = j->mapping ();
461                         for (int i = 0; i < map.input_channels(); ++i) {
462                                 for (int j = 0; j < _film->audio_channels(); ++j) {
463                                         if (map.get (i, j) > 0) {
464                                                 dcp_mapped->accumulate_channel (
465                                                         all.audio.get(),
466                                                         i,
467                                                         j,
468                                                         map.get (i, j)
469                                                         );
470                                         }
471                                 }
472                         }
473
474                         if (_audio_processor) {
475                                 dcp_mapped = _audio_processor->run (dcp_mapped);
476                         }
477
478                         all.audio = dcp_mapped;
479
480                         audio->accumulate_frames (
481                                 all.audio.get(),
482                                 content_frame - all.frame,
483                                 offset.frames (_film->audio_frame_rate()),
484                                 min (Frame (all.audio->frames()), request_frames)
485                                 );
486                 }
487         }
488
489         return audio;
490 }
491
492 Frame
493 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
494 {
495         /* s is the offset of t from the start position of this content */
496         DCPTime s = t - piece->content->position ();
497         s = DCPTime (max (DCPTime::Type (0), s.get ()));
498         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
499
500         /* Convert this to the content frame */
501         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) / piece->frc.factor ();
502 }
503
504 DCPTime
505 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
506 {
507         DCPTime t = DCPTime::from_frames (f * piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position ();
508         if (t < DCPTime ()) {
509                 t = DCPTime ();
510         }
511
512         return t;
513 }
514
515 Frame
516 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
517 {
518         /* s is the offset of t from the start position of this content */
519         DCPTime s = t - piece->content->position ();
520         s = DCPTime (max (DCPTime::Type (0), s.get ()));
521         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
522
523         /* Convert this to the content frame */
524         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
525 }
526
527 ContentTime
528 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
529 {
530         /* s is the offset of t from the start position of this content */
531         DCPTime s = t - piece->content->position ();
532         s = DCPTime (max (DCPTime::Type (0), s.get ()));
533         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
534
535         return ContentTime (s + piece->content->trim_start(), piece->frc);
536 }
537
538 void
539 PlayerStatistics::dump (shared_ptr<Log> log) const
540 {
541         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
542         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
543 }
544
545 PlayerStatistics const &
546 Player::statistics () const
547 {
548         return _statistics;
549 }
550
551 /** @param burnt true to return only subtitles to be burnt, false to return only
552  *  subtitles that should not be burnt.  This parameter will be ignored if
553  *  _always_burn_subtitles is true; in this case, all subtitles will be returned.
554  */
555 PlayerSubtitles
556 Player::get_subtitles (DCPTime time, DCPTime length, bool starting, bool burnt)
557 {
558         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (time, time + length);
559
560         PlayerSubtitles ps (time, length);
561
562         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
563                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
564                 if (!subtitle_content->use_subtitles () || (!_always_burn_subtitles && (burnt != subtitle_content->burn_subtitles ()))) {
565                         continue;
566                 }
567
568                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
569                 ContentTime const from = dcp_to_content_subtitle (*j, time);
570                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
571                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
572
573                 list<ContentImageSubtitle> image = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to), starting);
574                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
575
576                         /* Apply content's subtitle offsets */
577                         i->sub.rectangle.x += subtitle_content->subtitle_x_offset ();
578                         i->sub.rectangle.y += subtitle_content->subtitle_y_offset ();
579
580                         /* Apply content's subtitle scale */
581                         i->sub.rectangle.width *= subtitle_content->subtitle_x_scale ();
582                         i->sub.rectangle.height *= subtitle_content->subtitle_y_scale ();
583
584                         /* Apply a corrective translation to keep the subtitle centred after that scale */
585                         i->sub.rectangle.x -= i->sub.rectangle.width * (subtitle_content->subtitle_x_scale() - 1);
586                         i->sub.rectangle.y -= i->sub.rectangle.height * (subtitle_content->subtitle_y_scale() - 1);
587
588                         ps.image.push_back (i->sub);
589                 }
590
591                 list<ContentTextSubtitle> text = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to), starting);
592                 BOOST_FOREACH (ContentTextSubtitle& ts, text) {
593                         BOOST_FOREACH (dcp::SubtitleString& s, ts.subs) {
594                                 s.set_h_position (s.h_position() + subtitle_content->subtitle_x_offset ());
595                                 s.set_v_position (s.v_position() + subtitle_content->subtitle_y_offset ());
596                                 float const xs = subtitle_content->subtitle_x_scale();
597                                 float const ys = subtitle_content->subtitle_y_scale();
598                                 float const average = s.size() * (xs + ys) / 2;
599                                 s.set_size (average);
600                                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
601                                         s.set_aspect_adjust (xs / ys);
602                                 }
603                                 ps.text.push_back (s);
604                         }
605                 }
606         }
607
608         return ps;
609 }
610
611 list<shared_ptr<Font> >
612 Player::get_subtitle_fonts ()
613 {
614         if (!_have_valid_pieces) {
615                 setup_pieces ();
616         }
617
618         list<shared_ptr<Font> > fonts;
619         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
620                 shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (p->content);
621                 if (sc) {
622                         /* XXX: things may go wrong if there are duplicate font IDs
623                            with different font files.
624                         */
625                         list<shared_ptr<Font> > f = sc->fonts ();
626                         copy (f.begin(), f.end(), back_inserter (fonts));
627                 }
628         }
629
630         return fonts;
631 }
632
633 /** Set this player never to produce any video data */
634 void
635 Player::set_ignore_video ()
636 {
637         _ignore_video = true;
638 }
639
640 /** Set whether or not this player should always burn text subtitles into the image,
641  *  regardless of the content settings.
642  *  @param burn true to always burn subtitles, false to obey content settings.
643  */
644 void
645 Player::set_always_burn_subtitles (bool burn)
646 {
647         _always_burn_subtitles = burn;
648 }