Fix logic of audio decoder positioning.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_subtitle (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->subtitle && _ignore_subtitle) {
125                         decoder->subtitle->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         if (_play_referenced) {
131                                 dcp->set_decode_referenced ();
132                         }
133                         dcp->set_forced_reduction (_dcp_decode_reduction);
134                 }
135
136                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
137                 _pieces.push_back (piece);
138
139                 if (decoder->video) {
140                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
141                 }
142
143                 if (decoder->audio) {
144                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
145                 }
146
147                 if (decoder->subtitle) {
148                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
149                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
150                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
151                 }
152         }
153
154         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
155                 if (i->content->audio) {
156                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
157                                 _stream_states[j] = StreamState (i, i->content->position ());
158                         }
159                 }
160         }
161
162         _black = Empty (_film->content(), _film->length(), bind(&Content::video, _1));
163         _silent = Empty (_film->content(), _film->length(), bind(&Content::audio, _1));
164
165         _last_video_time = DCPTime ();
166         _last_audio_time = DCPTime ();
167         _have_valid_pieces = true;
168 }
169
170 void
171 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
172 {
173         shared_ptr<Content> c = w.lock ();
174         if (!c) {
175                 return;
176         }
177
178         if (
179                 property == ContentProperty::POSITION ||
180                 property == ContentProperty::LENGTH ||
181                 property == ContentProperty::TRIM_START ||
182                 property == ContentProperty::TRIM_END ||
183                 property == ContentProperty::PATH ||
184                 property == VideoContentProperty::FRAME_TYPE ||
185                 property == DCPContentProperty::NEEDS_ASSETS ||
186                 property == DCPContentProperty::NEEDS_KDM ||
187                 property == SubtitleContentProperty::COLOUR ||
188                 property == SubtitleContentProperty::OUTLINE ||
189                 property == SubtitleContentProperty::SHADOW ||
190                 property == SubtitleContentProperty::EFFECT_COLOUR ||
191                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
192                 property == VideoContentProperty::COLOUR_CONVERSION
193                 ) {
194
195                 _have_valid_pieces = false;
196                 Changed (frequent);
197
198         } else if (
199                 property == SubtitleContentProperty::LINE_SPACING ||
200                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
201                 property == SubtitleContentProperty::Y_SCALE ||
202                 property == SubtitleContentProperty::FADE_IN ||
203                 property == SubtitleContentProperty::FADE_OUT ||
204                 property == ContentProperty::VIDEO_FRAME_RATE ||
205                 property == SubtitleContentProperty::USE ||
206                 property == SubtitleContentProperty::X_OFFSET ||
207                 property == SubtitleContentProperty::Y_OFFSET ||
208                 property == SubtitleContentProperty::X_SCALE ||
209                 property == SubtitleContentProperty::FONTS ||
210                 property == VideoContentProperty::CROP ||
211                 property == VideoContentProperty::SCALE ||
212                 property == VideoContentProperty::FADE_IN ||
213                 property == VideoContentProperty::FADE_OUT
214                 ) {
215
216                 Changed (frequent);
217         }
218 }
219
220 void
221 Player::set_video_container_size (dcp::Size s)
222 {
223         if (s == _video_container_size) {
224                 return;
225         }
226
227         _video_container_size = s;
228
229         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
230         _black_image->make_black ();
231
232         Changed (false);
233 }
234
235 void
236 Player::playlist_changed ()
237 {
238         _have_valid_pieces = false;
239         Changed (false);
240 }
241
242 void
243 Player::film_changed (Film::Property p)
244 {
245         /* Here we should notice Film properties that affect our output, and
246            alert listeners that our output now would be different to how it was
247            last time we were run.
248         */
249
250         if (p == Film::CONTAINER) {
251                 Changed (false);
252         } else if (p == Film::VIDEO_FRAME_RATE) {
253                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
254                    so we need new pieces here.
255                 */
256                 _have_valid_pieces = false;
257                 Changed (false);
258         } else if (p == Film::AUDIO_PROCESSOR) {
259                 if (_film->audio_processor ()) {
260                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
261                 }
262         }
263 }
264
265 list<PositionImage>
266 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
267 {
268         list<PositionImage> all;
269
270         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
271                 if (!i->image) {
272                         continue;
273                 }
274
275                 /* We will scale the subtitle up to fit _video_container_size */
276                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
277
278                 /* Then we need a corrective translation, consisting of two parts:
279                  *
280                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
281                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
282                  *
283                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
284                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
285                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
286                  *
287                  * Combining these two translations gives these expressions.
288                  */
289
290                 all.push_back (
291                         PositionImage (
292                                 i->image->scale (
293                                         scaled_size,
294                                         dcp::YUV_TO_RGB_REC601,
295                                         i->image->pixel_format (),
296                                         true,
297                                         _fast
298                                         ),
299                                 Position<int> (
300                                         lrint (_video_container_size.width * i->rectangle.x),
301                                         lrint (_video_container_size.height * i->rectangle.y)
302                                         )
303                                 )
304                         );
305         }
306
307         return all;
308 }
309
310 shared_ptr<PlayerVideo>
311 Player::black_player_video_frame () const
312 {
313         return shared_ptr<PlayerVideo> (
314                 new PlayerVideo (
315                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
316                         Crop (),
317                         optional<double> (),
318                         _video_container_size,
319                         _video_container_size,
320                         EYES_BOTH,
321                         PART_WHOLE,
322                         PresetColourConversion::all().front().conversion
323                 )
324         );
325 }
326
327 Frame
328 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
329 {
330         DCPTime s = t - piece->content->position ();
331         s = min (piece->content->length_after_trim(), s);
332         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
333
334         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
335            then convert that ContentTime to frames at the content's rate.  However this fails for
336            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
337            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
338
339            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
340         */
341         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
342 }
343
344 DCPTime
345 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
346 {
347         /* See comment in dcp_to_content_video */
348         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
349         return max (DCPTime (), d + piece->content->position ());
350 }
351
352 Frame
353 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
354 {
355         DCPTime s = t - piece->content->position ();
356         s = min (piece->content->length_after_trim(), s);
357         /* See notes in dcp_to_content_video */
358         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
359 }
360
361 DCPTime
362 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
363 {
364         /* See comment in dcp_to_content_video */
365         return DCPTime::from_frames (f, _film->audio_frame_rate())
366                 - DCPTime (piece->content->trim_start(), piece->frc)
367                 + piece->content->position();
368 }
369
370 ContentTime
371 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
372 {
373         DCPTime s = t - piece->content->position ();
374         s = min (piece->content->length_after_trim(), s);
375         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
376 }
377
378 DCPTime
379 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
380 {
381         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
382 }
383
384 list<shared_ptr<Font> >
385 Player::get_subtitle_fonts ()
386 {
387         if (!_have_valid_pieces) {
388                 setup_pieces ();
389         }
390
391         list<shared_ptr<Font> > fonts;
392         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
393                 if (p->content->subtitle) {
394                         /* XXX: things may go wrong if there are duplicate font IDs
395                            with different font files.
396                         */
397                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
398                         copy (f.begin(), f.end(), back_inserter (fonts));
399                 }
400         }
401
402         return fonts;
403 }
404
405 /** Set this player never to produce any video data */
406 void
407 Player::set_ignore_video ()
408 {
409         _ignore_video = true;
410 }
411
412 void
413 Player::set_ignore_subtitle ()
414 {
415         _ignore_subtitle = true;
416 }
417
418 /** Set whether or not this player should always burn text subtitles into the image,
419  *  regardless of the content settings.
420  *  @param burn true to always burn subtitles, false to obey content settings.
421  */
422 void
423 Player::set_always_burn_subtitles (bool burn)
424 {
425         _always_burn_subtitles = burn;
426 }
427
428 /** Sets up the player to be faster, possibly at the expense of quality */
429 void
430 Player::set_fast ()
431 {
432         _fast = true;
433         _have_valid_pieces = false;
434 }
435
436 void
437 Player::set_play_referenced ()
438 {
439         _play_referenced = true;
440         _have_valid_pieces = false;
441 }
442
443 list<ReferencedReelAsset>
444 Player::get_reel_assets ()
445 {
446         list<ReferencedReelAsset> a;
447
448         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
449                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
450                 if (!j) {
451                         continue;
452                 }
453
454                 scoped_ptr<DCPDecoder> decoder;
455                 try {
456                         decoder.reset (new DCPDecoder (j, _film->log(), false));
457                 } catch (...) {
458                         return a;
459                 }
460
461                 int64_t offset = 0;
462                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
463
464                         DCPOMATIC_ASSERT (j->video_frame_rate ());
465                         double const cfr = j->video_frame_rate().get();
466                         Frame const trim_start = j->trim_start().frames_round (cfr);
467                         Frame const trim_end = j->trim_end().frames_round (cfr);
468                         int const ffr = _film->video_frame_rate ();
469
470                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
471                         if (j->reference_video ()) {
472                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
473                                 DCPOMATIC_ASSERT (ra);
474                                 ra->set_entry_point (ra->entry_point() + trim_start);
475                                 ra->set_duration (ra->duration() - trim_start - trim_end);
476                                 a.push_back (
477                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
478                                         );
479                         }
480
481                         if (j->reference_audio ()) {
482                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
483                                 DCPOMATIC_ASSERT (ra);
484                                 ra->set_entry_point (ra->entry_point() + trim_start);
485                                 ra->set_duration (ra->duration() - trim_start - trim_end);
486                                 a.push_back (
487                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
488                                         );
489                         }
490
491                         if (j->reference_subtitle ()) {
492                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
493                                 DCPOMATIC_ASSERT (ra);
494                                 ra->set_entry_point (ra->entry_point() + trim_start);
495                                 ra->set_duration (ra->duration() - trim_start - trim_end);
496                                 a.push_back (
497                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
498                                         );
499                         }
500
501                         /* Assume that main picture duration is the length of the reel */
502                         offset += k->main_picture()->duration ();
503                 }
504         }
505
506         return a;
507 }
508
509 bool
510 Player::pass ()
511 {
512         if (!_have_valid_pieces) {
513                 setup_pieces ();
514         }
515
516         if (_playlist->length() == DCPTime()) {
517                 /* Special case of an empty Film; just give one black frame */
518                 emit_video (black_player_video_frame(), DCPTime());
519                 return true;
520         }
521
522         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
523
524         shared_ptr<Piece> earliest_content;
525         optional<DCPTime> earliest_time;
526
527         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
528                 if (!i->done) {
529                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
530                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
531                            the video.
532                         */
533                         if (!earliest_time || t < *earliest_time || (t == *earliest_time && i->decoder->subtitle)) {
534                                 earliest_time = t;
535                                 earliest_content = i;
536                         }
537                 }
538         }
539
540         bool done = false;
541
542         enum {
543                 NONE,
544                 CONTENT,
545                 BLACK,
546                 SILENT
547         } which = NONE;
548
549         if (earliest_content) {
550                 which = CONTENT;
551         }
552
553         if (!_black.done() && (!earliest_time || _black.position() < *earliest_time)) {
554                 earliest_time = _black.position ();
555                 which = BLACK;
556         }
557
558         if (!_silent.done() && (!earliest_time || _silent.position() < *earliest_time)) {
559                 earliest_time = _silent.position ();
560                 which = SILENT;
561         }
562
563         switch (which) {
564         case CONTENT:
565                 earliest_content->done = earliest_content->decoder->pass ();
566                 break;
567         case BLACK:
568                 emit_video (black_player_video_frame(), _black.position());
569                 _black.set_position (_black.position() + one_video_frame());
570                 break;
571         case SILENT:
572         {
573                 DCPTimePeriod period (_silent.period_at_position());
574                 if (period.duration() > one_video_frame()) {
575                         period.to = period.from + one_video_frame();
576                 }
577                 fill_audio (period);
578                 _silent.set_position (period.to);
579                 break;
580         }
581         case NONE:
582                 done = true;
583                 break;
584         }
585
586         /* Emit any audio that is ready */
587
588         DCPTime pull_to = _film->length ();
589         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
590                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
591                         pull_to = i->second.last_push_end;
592                 }
593         }
594
595         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
596         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
597                 if (_last_audio_time && i->second < *_last_audio_time) {
598                         /* This new data comes before the last we emitted (or the last seek); discard it */
599                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
600                         if (!cut.first) {
601                                 continue;
602                         }
603                         *i = cut;
604                 } else if (_last_audio_time && i->second > *_last_audio_time) {
605                         /* There's a gap between this data and the last we emitted; fill with silence */
606                         fill_audio (DCPTimePeriod (*_last_audio_time, i->second));
607                 }
608
609                 emit_audio (i->first, i->second);
610         }
611
612         return done;
613 }
614
615 optional<PositionImage>
616 Player::subtitles_for_frame (DCPTime time) const
617 {
618         list<PositionImage> subtitles;
619
620         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
621
622                 /* Image subtitles */
623                 list<PositionImage> c = transform_image_subtitles (i.image);
624                 copy (c.begin(), c.end(), back_inserter (subtitles));
625
626                 /* Text subtitles (rendered to an image) */
627                 if (!i.text.empty ()) {
628                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
629                         copy (s.begin(), s.end(), back_inserter (subtitles));
630                 }
631         }
632
633         if (subtitles.empty ()) {
634                 return optional<PositionImage> ();
635         }
636
637         return merge (subtitles);
638 }
639
640 bool
641 Player::video (weak_ptr<Piece> wp, ContentVideo video)
642 {
643         shared_ptr<Piece> piece = wp.lock ();
644         if (!piece) {
645                 return false;
646         }
647
648         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
649         if (frc.skip && (video.frame % 2) == 1) {
650                 return false;
651         }
652
653         /* Time of the first frame we will emit */
654         DCPTime const time = content_video_to_dcp (piece, video.frame);
655
656         /* Discard if it's outside the content's period or if it's before the last accurate seek */
657         if (
658                 time < piece->content->position() ||
659                 time >= piece->content->end() ||
660                 (_last_video_time && time < *_last_video_time)) {
661                 return false;
662         }
663
664         /* Fill gaps that we discover now that we have some video which needs to be emitted */
665
666         if (_last_video_time) {
667                 /* XXX: this may not work for 3D */
668                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
669                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
670                         LastVideoMap::const_iterator k = _last_video.find (wp);
671                         if (k != _last_video.end ()) {
672                                 emit_video (k->second, j);
673                         } else {
674                                 emit_video (black_player_video_frame(), j);
675                         }
676                 }
677         }
678
679         _last_video[wp].reset (
680                 new PlayerVideo (
681                         video.image,
682                         piece->content->video->crop (),
683                         piece->content->video->fade (video.frame),
684                         piece->content->video->scale().size (
685                                 piece->content->video, _video_container_size, _film->frame_size ()
686                                 ),
687                         _video_container_size,
688                         video.eyes,
689                         video.part,
690                         piece->content->video->colour_conversion ()
691                         )
692                 );
693
694         DCPTime t = time;
695         for (int i = 0; i < frc.repeat; ++i) {
696                 emit_video (_last_video[wp], t);
697                 t += one_video_frame ();
698         }
699
700         return true;
701 }
702
703 /** @return Number of input frames that were `accepted'.  This is the number of frames passed in
704  *  unless some were discarded at the end of the block.
705  */
706 Frame
707 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
708 {
709         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
710
711         shared_ptr<Piece> piece = wp.lock ();
712         if (!piece) {
713                 return 0;
714         }
715
716         shared_ptr<AudioContent> content = piece->content->audio;
717         DCPOMATIC_ASSERT (content);
718
719         /* Compute time in the DCP */
720         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
721         /* And the end of this block in the DCP */
722         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
723
724         /* We consider frames trimmed off the beginning to nevertheless be `accepted'; it's only frames trimmed
725            off the end that are considered as discarded.  This logic is necessary to ensure correct reel lengths,
726            although the precise details escape me at the moment.
727         */
728         Frame accepted = content_audio.audio->frames();
729
730         /* Remove anything that comes before the start or after the end of the content */
731         if (time < piece->content->position()) {
732                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
733                 if (!cut.first) {
734                         /* This audio is entirely discarded */
735                         return 0;
736                 }
737                 content_audio.audio = cut.first;
738                 time = cut.second;
739         } else if (time > piece->content->end()) {
740                 /* Discard it all */
741                 return 0;
742         } else if (end > piece->content->end()) {
743                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
744                 if (remaining_frames == 0) {
745                         return 0;
746                 }
747                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
748                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
749                 content_audio.audio = cut;
750                 accepted = content_audio.audio->frames();
751         }
752
753         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
754
755         /* Gain */
756
757         if (content->gain() != 0) {
758                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
759                 gain->apply_gain (content->gain ());
760                 content_audio.audio = gain;
761         }
762
763         /* Remap */
764
765         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
766
767         /* Process */
768
769         if (_audio_processor) {
770                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
771         }
772
773         /* Push */
774
775         _audio_merger.push (content_audio.audio, time);
776         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
777         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
778         return accepted;
779 }
780
781 void
782 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
783 {
784         shared_ptr<Piece> piece = wp.lock ();
785         if (!piece) {
786                 return;
787         }
788
789         /* Apply content's subtitle offsets */
790         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
791         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
792
793         /* Apply content's subtitle scale */
794         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
795         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
796
797         /* Apply a corrective translation to keep the subtitle centred after that scale */
798         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
799         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
800
801         PlayerSubtitles ps;
802         ps.image.push_back (subtitle.sub);
803         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
804
805         _active_subtitles.add_from (wp, ps, from);
806 }
807
808 void
809 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
810 {
811         shared_ptr<Piece> piece = wp.lock ();
812         if (!piece) {
813                 return;
814         }
815
816         PlayerSubtitles ps;
817         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
818
819         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
820                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
821                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
822                 float const xs = piece->content->subtitle->x_scale();
823                 float const ys = piece->content->subtitle->y_scale();
824                 float size = s.size();
825
826                 /* Adjust size to express the common part of the scaling;
827                    e.g. if xs = ys = 0.5 we scale size by 2.
828                 */
829                 if (xs > 1e-5 && ys > 1e-5) {
830                         size *= 1 / min (1 / xs, 1 / ys);
831                 }
832                 s.set_size (size);
833
834                 /* Then express aspect ratio changes */
835                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
836                         s.set_aspect_adjust (xs / ys);
837                 }
838
839                 s.set_in (dcp::Time(from.seconds(), 1000));
840                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
841                 ps.add_fonts (piece->content->subtitle->fonts ());
842         }
843
844         _active_subtitles.add_from (wp, ps, from);
845 }
846
847 void
848 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
849 {
850         if (!_active_subtitles.have (wp)) {
851                 return;
852         }
853
854         shared_ptr<Piece> piece = wp.lock ();
855         if (!piece) {
856                 return;
857         }
858
859         DCPTime const dcp_to = content_time_to_dcp (piece, to);
860
861         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
862
863         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
864                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
865         }
866 }
867
868 void
869 Player::seek (DCPTime time, bool accurate)
870 {
871         if (!_have_valid_pieces) {
872                 setup_pieces ();
873         }
874
875         if (_audio_processor) {
876                 _audio_processor->flush ();
877         }
878
879         _audio_merger.clear ();
880         _active_subtitles.clear ();
881
882         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
883                 if (time < i->content->position()) {
884                         /* Before; seek to 0 */
885                         i->decoder->seek (ContentTime(), accurate);
886                         i->done = false;
887                 } else if (i->content->position() <= time && time < i->content->end()) {
888                         /* During; seek to position */
889                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
890                         i->done = false;
891                 } else {
892                         /* After; this piece is done */
893                         i->done = true;
894                 }
895         }
896
897         if (accurate) {
898                 _last_video_time = time;
899                 _last_audio_time = time;
900         } else {
901                 _last_video_time = optional<DCPTime>();
902                 _last_audio_time = optional<DCPTime>();
903         }
904
905         _black.set_position (time);
906         _silent.set_position (time);
907
908         _last_video.clear ();
909 }
910
911 void
912 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
913 {
914         optional<PositionImage> subtitles = subtitles_for_frame (time);
915         if (subtitles) {
916                 pv->set_subtitle (subtitles.get ());
917         }
918
919         Video (pv, time);
920
921         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
922                 _last_video_time = time + one_video_frame();
923                 _active_subtitles.clear_before (time);
924         }
925 }
926
927 void
928 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
929 {
930         Audio (data, time);
931         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
932 }
933
934 void
935 Player::fill_audio (DCPTimePeriod period)
936 {
937         if (period.from == period.to) {
938                 return;
939         }
940
941         DCPOMATIC_ASSERT (period.from < period.to);
942
943         DCPTime t = period.from;
944         while (t < period.to) {
945                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
946                 Frame const samples = block.frames_round(_film->audio_frame_rate());
947                 if (samples) {
948                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
949                         silence->make_silent ();
950                         emit_audio (silence, t);
951                 }
952                 t += block;
953         }
954 }
955
956 DCPTime
957 Player::one_video_frame () const
958 {
959         return DCPTime::from_frames (1, _film->video_frame_rate ());
960 }
961
962 pair<shared_ptr<AudioBuffers>, DCPTime>
963 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
964 {
965         DCPTime const discard_time = discard_to - time;
966         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
967         Frame remaining_frames = audio->frames() - discard_frames;
968         if (remaining_frames <= 0) {
969                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
970         }
971         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
972         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
973         return make_pair(cut, time + discard_time);
974 }
975
976 void
977 Player::set_dcp_decode_reduction (optional<int> reduction)
978 {
979         if (reduction == _dcp_decode_reduction) {
980                 return;
981         }
982
983         _dcp_decode_reduction = reduction;
984         _have_valid_pieces = false;
985         Changed (false);
986 }