Restore correct setup of fast resampler when the player is set to fast.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_subtitle (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->subtitle && _ignore_subtitle) {
125                         decoder->subtitle->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         if (_play_referenced) {
131                                 dcp->set_decode_referenced ();
132                         }
133                         dcp->set_forced_reduction (_dcp_decode_reduction);
134                 }
135
136                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
137                 _pieces.push_back (piece);
138
139                 if (decoder->video) {
140                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
141                 }
142
143                 if (decoder->audio) {
144                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
145                 }
146
147                 if (decoder->subtitle) {
148                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
149                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
150                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
151                 }
152         }
153
154         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
155                 if (i->content->audio) {
156                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
157                                 _stream_states[j] = StreamState (i, i->content->position ());
158                         }
159                 }
160         }
161
162         _black = Empty (_film->content(), _film->length(), bind(&Content::video, _1));
163         _silent = Empty (_film->content(), _film->length(), bind(&Content::audio, _1));
164
165         _last_video_time = DCPTime ();
166         _last_audio_time = DCPTime ();
167         _have_valid_pieces = true;
168 }
169
170 void
171 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
172 {
173         shared_ptr<Content> c = w.lock ();
174         if (!c) {
175                 return;
176         }
177
178         if (
179                 property == ContentProperty::POSITION ||
180                 property == ContentProperty::LENGTH ||
181                 property == ContentProperty::TRIM_START ||
182                 property == ContentProperty::TRIM_END ||
183                 property == ContentProperty::PATH ||
184                 property == VideoContentProperty::FRAME_TYPE ||
185                 property == DCPContentProperty::NEEDS_ASSETS ||
186                 property == DCPContentProperty::NEEDS_KDM ||
187                 property == SubtitleContentProperty::COLOUR ||
188                 property == SubtitleContentProperty::OUTLINE ||
189                 property == SubtitleContentProperty::SHADOW ||
190                 property == SubtitleContentProperty::EFFECT_COLOUR ||
191                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
192                 property == VideoContentProperty::COLOUR_CONVERSION
193                 ) {
194
195                 _have_valid_pieces = false;
196                 Changed (frequent);
197
198         } else if (
199                 property == SubtitleContentProperty::LINE_SPACING ||
200                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
201                 property == SubtitleContentProperty::Y_SCALE ||
202                 property == SubtitleContentProperty::FADE_IN ||
203                 property == SubtitleContentProperty::FADE_OUT ||
204                 property == ContentProperty::VIDEO_FRAME_RATE ||
205                 property == SubtitleContentProperty::USE ||
206                 property == SubtitleContentProperty::X_OFFSET ||
207                 property == SubtitleContentProperty::Y_OFFSET ||
208                 property == SubtitleContentProperty::X_SCALE ||
209                 property == SubtitleContentProperty::FONTS ||
210                 property == VideoContentProperty::CROP ||
211                 property == VideoContentProperty::SCALE ||
212                 property == VideoContentProperty::FADE_IN ||
213                 property == VideoContentProperty::FADE_OUT
214                 ) {
215
216                 Changed (frequent);
217         }
218 }
219
220 void
221 Player::set_video_container_size (dcp::Size s)
222 {
223         if (s == _video_container_size) {
224                 return;
225         }
226
227         _video_container_size = s;
228
229         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
230         _black_image->make_black ();
231
232         Changed (false);
233 }
234
235 void
236 Player::playlist_changed ()
237 {
238         _have_valid_pieces = false;
239         Changed (false);
240 }
241
242 void
243 Player::film_changed (Film::Property p)
244 {
245         /* Here we should notice Film properties that affect our output, and
246            alert listeners that our output now would be different to how it was
247            last time we were run.
248         */
249
250         if (p == Film::CONTAINER) {
251                 Changed (false);
252         } else if (p == Film::VIDEO_FRAME_RATE) {
253                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
254                    so we need new pieces here.
255                 */
256                 _have_valid_pieces = false;
257                 Changed (false);
258         } else if (p == Film::AUDIO_PROCESSOR) {
259                 if (_film->audio_processor ()) {
260                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
261                 }
262         }
263 }
264
265 list<PositionImage>
266 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
267 {
268         list<PositionImage> all;
269
270         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
271                 if (!i->image) {
272                         continue;
273                 }
274
275                 /* We will scale the subtitle up to fit _video_container_size */
276                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
277
278                 /* Then we need a corrective translation, consisting of two parts:
279                  *
280                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
281                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
282                  *
283                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
284                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
285                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
286                  *
287                  * Combining these two translations gives these expressions.
288                  */
289
290                 all.push_back (
291                         PositionImage (
292                                 i->image->scale (
293                                         scaled_size,
294                                         dcp::YUV_TO_RGB_REC601,
295                                         i->image->pixel_format (),
296                                         true,
297                                         _fast
298                                         ),
299                                 Position<int> (
300                                         lrint (_video_container_size.width * i->rectangle.x),
301                                         lrint (_video_container_size.height * i->rectangle.y)
302                                         )
303                                 )
304                         );
305         }
306
307         return all;
308 }
309
310 shared_ptr<PlayerVideo>
311 Player::black_player_video_frame () const
312 {
313         return shared_ptr<PlayerVideo> (
314                 new PlayerVideo (
315                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
316                         Crop (),
317                         optional<double> (),
318                         _video_container_size,
319                         _video_container_size,
320                         EYES_BOTH,
321                         PART_WHOLE,
322                         PresetColourConversion::all().front().conversion
323                 )
324         );
325 }
326
327 Frame
328 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
329 {
330         DCPTime s = t - piece->content->position ();
331         s = min (piece->content->length_after_trim(), s);
332         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
333
334         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
335            then convert that ContentTime to frames at the content's rate.  However this fails for
336            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
337            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
338
339            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
340         */
341         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
342 }
343
344 DCPTime
345 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
346 {
347         /* See comment in dcp_to_content_video */
348         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
349         return max (DCPTime (), d + piece->content->position ());
350 }
351
352 Frame
353 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
354 {
355         DCPTime s = t - piece->content->position ();
356         s = min (piece->content->length_after_trim(), s);
357         /* See notes in dcp_to_content_video */
358         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
359 }
360
361 DCPTime
362 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
363 {
364         /* See comment in dcp_to_content_video */
365         return DCPTime::from_frames (f, _film->audio_frame_rate())
366                 - DCPTime (piece->content->trim_start(), piece->frc)
367                 + piece->content->position();
368 }
369
370 ContentTime
371 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
372 {
373         DCPTime s = t - piece->content->position ();
374         s = min (piece->content->length_after_trim(), s);
375         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
376 }
377
378 DCPTime
379 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
380 {
381         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
382 }
383
384 list<shared_ptr<Font> >
385 Player::get_subtitle_fonts ()
386 {
387         if (!_have_valid_pieces) {
388                 setup_pieces ();
389         }
390
391         list<shared_ptr<Font> > fonts;
392         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
393                 if (p->content->subtitle) {
394                         /* XXX: things may go wrong if there are duplicate font IDs
395                            with different font files.
396                         */
397                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
398                         copy (f.begin(), f.end(), back_inserter (fonts));
399                 }
400         }
401
402         return fonts;
403 }
404
405 /** Set this player never to produce any video data */
406 void
407 Player::set_ignore_video ()
408 {
409         _ignore_video = true;
410 }
411
412 void
413 Player::set_ignore_subtitle ()
414 {
415         _ignore_subtitle = true;
416 }
417
418 /** Set whether or not this player should always burn text subtitles into the image,
419  *  regardless of the content settings.
420  *  @param burn true to always burn subtitles, false to obey content settings.
421  */
422 void
423 Player::set_always_burn_subtitles (bool burn)
424 {
425         _always_burn_subtitles = burn;
426 }
427
428 /** Sets up the player to be faster, possibly at the expense of quality */
429 void
430 Player::set_fast ()
431 {
432         _fast = true;
433         _have_valid_pieces = false;
434 }
435
436 void
437 Player::set_play_referenced ()
438 {
439         _play_referenced = true;
440         _have_valid_pieces = false;
441 }
442
443 list<ReferencedReelAsset>
444 Player::get_reel_assets ()
445 {
446         list<ReferencedReelAsset> a;
447
448         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
449                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
450                 if (!j) {
451                         continue;
452                 }
453
454                 scoped_ptr<DCPDecoder> decoder;
455                 try {
456                         decoder.reset (new DCPDecoder (j, _film->log(), false));
457                 } catch (...) {
458                         return a;
459                 }
460
461                 int64_t offset = 0;
462                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
463
464                         DCPOMATIC_ASSERT (j->video_frame_rate ());
465                         double const cfr = j->video_frame_rate().get();
466                         Frame const trim_start = j->trim_start().frames_round (cfr);
467                         Frame const trim_end = j->trim_end().frames_round (cfr);
468                         int const ffr = _film->video_frame_rate ();
469
470                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
471                         if (j->reference_video ()) {
472                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
473                                 DCPOMATIC_ASSERT (ra);
474                                 ra->set_entry_point (ra->entry_point() + trim_start);
475                                 ra->set_duration (ra->duration() - trim_start - trim_end);
476                                 a.push_back (
477                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
478                                         );
479                         }
480
481                         if (j->reference_audio ()) {
482                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
483                                 DCPOMATIC_ASSERT (ra);
484                                 ra->set_entry_point (ra->entry_point() + trim_start);
485                                 ra->set_duration (ra->duration() - trim_start - trim_end);
486                                 a.push_back (
487                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
488                                         );
489                         }
490
491                         if (j->reference_subtitle ()) {
492                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
493                                 DCPOMATIC_ASSERT (ra);
494                                 ra->set_entry_point (ra->entry_point() + trim_start);
495                                 ra->set_duration (ra->duration() - trim_start - trim_end);
496                                 a.push_back (
497                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
498                                         );
499                         }
500
501                         /* Assume that main picture duration is the length of the reel */
502                         offset += k->main_picture()->duration ();
503                 }
504         }
505
506         return a;
507 }
508
509 bool
510 Player::pass ()
511 {
512         if (!_have_valid_pieces) {
513                 setup_pieces ();
514         }
515
516         if (_playlist->length() == DCPTime()) {
517                 /* Special case of an empty Film; just give one black frame */
518                 emit_video (black_player_video_frame(), DCPTime());
519                 return true;
520         }
521
522         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
523
524         shared_ptr<Piece> earliest_content;
525         optional<DCPTime> earliest_time;
526
527         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
528                 if (!i->done) {
529                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
530                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
531                            the video.
532                         */
533                         if (!earliest_time || t < *earliest_time || (t == *earliest_time && i->decoder->subtitle)) {
534                                 earliest_time = t;
535                                 earliest_content = i;
536                         }
537                 }
538         }
539
540         bool done = false;
541
542         enum {
543                 NONE,
544                 CONTENT,
545                 BLACK,
546                 SILENT
547         } which = NONE;
548
549         if (earliest_content) {
550                 which = CONTENT;
551         }
552
553         if (!_black.done() && (!earliest_time || _black.position() < *earliest_time)) {
554                 earliest_time = _black.position ();
555                 which = BLACK;
556         }
557
558         if (!_silent.done() && (!earliest_time || _silent.position() < *earliest_time)) {
559                 earliest_time = _silent.position ();
560                 which = SILENT;
561         }
562
563         switch (which) {
564         case CONTENT:
565                 earliest_content->done = earliest_content->decoder->pass ();
566                 break;
567         case BLACK:
568                 emit_video (black_player_video_frame(), _black.position());
569                 _black.set_position (_black.position() + one_video_frame());
570                 break;
571         case SILENT:
572         {
573                 DCPTimePeriod period (_silent.period_at_position());
574                 if (period.duration() > one_video_frame()) {
575                         period.to = period.from + one_video_frame();
576                 }
577                 fill_audio (period);
578                 _silent.set_position (period.to);
579                 break;
580         }
581         case NONE:
582                 done = true;
583                 break;
584         }
585
586         /* Emit any audio that is ready */
587
588         DCPTime pull_to = _film->length ();
589         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
590                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
591                         pull_to = i->second.last_push_end;
592                 }
593         }
594
595         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
596         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
597                 if (_last_audio_time && i->second < *_last_audio_time) {
598                         /* This new data comes before the last we emitted (or the last seek); discard it */
599                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
600                         if (!cut.first) {
601                                 continue;
602                         }
603                         *i = cut;
604                 } else if (_last_audio_time && i->second > *_last_audio_time) {
605                         /* There's a gap between this data and the last we emitted; fill with silence */
606                         fill_audio (DCPTimePeriod (*_last_audio_time, i->second));
607                 }
608
609                 emit_audio (i->first, i->second);
610         }
611
612         return done;
613 }
614
615 optional<PositionImage>
616 Player::subtitles_for_frame (DCPTime time) const
617 {
618         list<PositionImage> subtitles;
619
620         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
621
622                 /* Image subtitles */
623                 list<PositionImage> c = transform_image_subtitles (i.image);
624                 copy (c.begin(), c.end(), back_inserter (subtitles));
625
626                 /* Text subtitles (rendered to an image) */
627                 if (!i.text.empty ()) {
628                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
629                         copy (s.begin(), s.end(), back_inserter (subtitles));
630                 }
631         }
632
633         if (subtitles.empty ()) {
634                 return optional<PositionImage> ();
635         }
636
637         return merge (subtitles);
638 }
639
640 bool
641 Player::video (weak_ptr<Piece> wp, ContentVideo video)
642 {
643         shared_ptr<Piece> piece = wp.lock ();
644         if (!piece) {
645                 return false;
646         }
647
648         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
649         if (frc.skip && (video.frame % 2) == 1) {
650                 return false;
651         }
652
653         /* Time of the first frame we will emit */
654         DCPTime const time = content_video_to_dcp (piece, video.frame);
655
656         /* Discard if it's outside the content's period or if it's before the last accurate seek */
657         if (
658                 time < piece->content->position() ||
659                 time >= piece->content->end() ||
660                 (_last_video_time && time < *_last_video_time)) {
661                 return false;
662         }
663
664         /* Fill gaps that we discover now that we have some video which needs to be emitted */
665
666         if (_last_video_time) {
667                 /* XXX: this may not work for 3D */
668                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
669                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
670                         LastVideoMap::const_iterator k = _last_video.find (wp);
671                         if (k != _last_video.end ()) {
672                                 emit_video (k->second, j);
673                         } else {
674                                 emit_video (black_player_video_frame(), j);
675                         }
676                 }
677         }
678
679         _last_video[wp].reset (
680                 new PlayerVideo (
681                         video.image,
682                         piece->content->video->crop (),
683                         piece->content->video->fade (video.frame),
684                         piece->content->video->scale().size (
685                                 piece->content->video, _video_container_size, _film->frame_size ()
686                                 ),
687                         _video_container_size,
688                         video.eyes,
689                         video.part,
690                         piece->content->video->colour_conversion ()
691                         )
692                 );
693
694         DCPTime t = time;
695         for (int i = 0; i < frc.repeat; ++i) {
696                 emit_video (_last_video[wp], t);
697                 t += one_video_frame ();
698         }
699
700         return true;
701 }
702
703 Frame
704 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
705 {
706         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
707
708         shared_ptr<Piece> piece = wp.lock ();
709         if (!piece) {
710                 return 0;
711         }
712
713         shared_ptr<AudioContent> content = piece->content->audio;
714         DCPOMATIC_ASSERT (content);
715
716         /* Compute time in the DCP */
717         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
718         /* And the end of this block in the DCP */
719         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
720
721         /* Remove anything that comes before the start or after the end of the content */
722         if (time < piece->content->position()) {
723                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
724                 if (!cut.first) {
725                         /* This audio is entirely discarded */
726                         return 0;
727                 }
728                 content_audio.audio = cut.first;
729                 time = cut.second;
730         } else if (time > piece->content->end()) {
731                 /* Discard it all */
732                 return 0;
733         } else if (end > piece->content->end()) {
734                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
735                 if (remaining_frames == 0) {
736                         return 0;
737                 }
738                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
739                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
740                 content_audio.audio = cut;
741         }
742
743         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
744
745         /* Gain */
746
747         if (content->gain() != 0) {
748                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
749                 gain->apply_gain (content->gain ());
750                 content_audio.audio = gain;
751         }
752
753         /* Remap */
754
755         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
756
757         /* Process */
758
759         if (_audio_processor) {
760                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
761         }
762
763         /* Push */
764
765         _audio_merger.push (content_audio.audio, time);
766         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
767         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
768         return content_audio.audio->frames();
769 }
770
771 void
772 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
773 {
774         shared_ptr<Piece> piece = wp.lock ();
775         if (!piece) {
776                 return;
777         }
778
779         /* Apply content's subtitle offsets */
780         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
781         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
782
783         /* Apply content's subtitle scale */
784         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
785         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
786
787         /* Apply a corrective translation to keep the subtitle centred after that scale */
788         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
789         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
790
791         PlayerSubtitles ps;
792         ps.image.push_back (subtitle.sub);
793         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
794
795         _active_subtitles.add_from (wp, ps, from);
796 }
797
798 void
799 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
800 {
801         shared_ptr<Piece> piece = wp.lock ();
802         if (!piece) {
803                 return;
804         }
805
806         PlayerSubtitles ps;
807         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
808
809         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
810                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
811                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
812                 float const xs = piece->content->subtitle->x_scale();
813                 float const ys = piece->content->subtitle->y_scale();
814                 float size = s.size();
815
816                 /* Adjust size to express the common part of the scaling;
817                    e.g. if xs = ys = 0.5 we scale size by 2.
818                 */
819                 if (xs > 1e-5 && ys > 1e-5) {
820                         size *= 1 / min (1 / xs, 1 / ys);
821                 }
822                 s.set_size (size);
823
824                 /* Then express aspect ratio changes */
825                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
826                         s.set_aspect_adjust (xs / ys);
827                 }
828
829                 s.set_in (dcp::Time(from.seconds(), 1000));
830                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
831                 ps.add_fonts (piece->content->subtitle->fonts ());
832         }
833
834         _active_subtitles.add_from (wp, ps, from);
835 }
836
837 void
838 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
839 {
840         if (!_active_subtitles.have (wp)) {
841                 return;
842         }
843
844         shared_ptr<Piece> piece = wp.lock ();
845         if (!piece) {
846                 return;
847         }
848
849         DCPTime const dcp_to = content_time_to_dcp (piece, to);
850
851         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
852
853         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
854                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
855         }
856 }
857
858 void
859 Player::seek (DCPTime time, bool accurate)
860 {
861         if (!_have_valid_pieces) {
862                 setup_pieces ();
863         }
864
865         if (_audio_processor) {
866                 _audio_processor->flush ();
867         }
868
869         _audio_merger.clear ();
870         _active_subtitles.clear ();
871
872         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
873                 if (time < i->content->position()) {
874                         /* Before; seek to 0 */
875                         i->decoder->seek (ContentTime(), accurate);
876                         i->done = false;
877                 } else if (i->content->position() <= time && time < i->content->end()) {
878                         /* During; seek to position */
879                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
880                         i->done = false;
881                 } else {
882                         /* After; this piece is done */
883                         i->done = true;
884                 }
885         }
886
887         if (accurate) {
888                 _last_video_time = time;
889                 _last_audio_time = time;
890         } else {
891                 _last_video_time = optional<DCPTime>();
892                 _last_audio_time = optional<DCPTime>();
893         }
894
895         _black.set_position (time);
896         _silent.set_position (time);
897
898         _last_video.clear ();
899 }
900
901 void
902 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
903 {
904         optional<PositionImage> subtitles = subtitles_for_frame (time);
905         if (subtitles) {
906                 pv->set_subtitle (subtitles.get ());
907         }
908
909         Video (pv, time);
910
911         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
912                 _last_video_time = time + one_video_frame();
913                 _active_subtitles.clear_before (time);
914         }
915 }
916
917 void
918 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
919 {
920         Audio (data, time);
921         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
922 }
923
924 void
925 Player::fill_audio (DCPTimePeriod period)
926 {
927         if (period.from == period.to) {
928                 return;
929         }
930
931         DCPOMATIC_ASSERT (period.from < period.to);
932
933         DCPTime t = period.from;
934         while (t < period.to) {
935                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
936                 Frame const samples = block.frames_round(_film->audio_frame_rate());
937                 if (samples) {
938                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
939                         silence->make_silent ();
940                         emit_audio (silence, t);
941                 }
942                 t += block;
943         }
944 }
945
946 DCPTime
947 Player::one_video_frame () const
948 {
949         return DCPTime::from_frames (1, _film->video_frame_rate ());
950 }
951
952 pair<shared_ptr<AudioBuffers>, DCPTime>
953 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
954 {
955         DCPTime const discard_time = discard_to - time;
956         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
957         Frame remaining_frames = audio->frames() - discard_frames;
958         if (remaining_frames <= 0) {
959                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
960         }
961         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
962         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
963         return make_pair(cut, time + discard_time);
964 }
965
966 void
967 Player::set_dcp_decode_reduction (optional<int> reduction)
968 {
969         if (reduction == _dcp_decode_reduction) {
970                 return;
971         }
972
973         _dcp_decode_reduction = reduction;
974         _have_valid_pieces = false;
975         Changed (false);
976 }