Fix missing subtitle when it is at the same time as some video in different content.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_audio (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->audio && _ignore_audio) {
125                         decoder->audio->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         dcp->set_decode_referenced ();
131                 }
132
133                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
134                 _pieces.push_back (piece);
135
136                 if (decoder->video) {
137                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
138                 }
139
140                 if (decoder->audio) {
141                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
142                 }
143
144                 if (decoder->subtitle) {
145                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
146                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         _black = Empty (_playlist, bind(&Content::video, _1));
160         _silent = Empty (_playlist, bind(&Content::audio, _1));
161
162         _last_video_time = DCPTime ();
163         _last_audio_time = DCPTime ();
164         _have_valid_pieces = true;
165 }
166
167 void
168 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
169 {
170         shared_ptr<Content> c = w.lock ();
171         if (!c) {
172                 return;
173         }
174
175         if (
176                 property == ContentProperty::POSITION ||
177                 property == ContentProperty::LENGTH ||
178                 property == ContentProperty::TRIM_START ||
179                 property == ContentProperty::TRIM_END ||
180                 property == ContentProperty::PATH ||
181                 property == VideoContentProperty::FRAME_TYPE ||
182                 property == DCPContentProperty::NEEDS_ASSETS ||
183                 property == DCPContentProperty::NEEDS_KDM ||
184                 property == SubtitleContentProperty::COLOUR ||
185                 property == SubtitleContentProperty::OUTLINE ||
186                 property == SubtitleContentProperty::SHADOW ||
187                 property == SubtitleContentProperty::EFFECT_COLOUR ||
188                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
189                 property == VideoContentProperty::COLOUR_CONVERSION
190                 ) {
191
192                 _have_valid_pieces = false;
193                 Changed (frequent);
194
195         } else if (
196                 property == SubtitleContentProperty::LINE_SPACING ||
197                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
198                 property == SubtitleContentProperty::Y_SCALE ||
199                 property == SubtitleContentProperty::FADE_IN ||
200                 property == SubtitleContentProperty::FADE_OUT ||
201                 property == ContentProperty::VIDEO_FRAME_RATE ||
202                 property == SubtitleContentProperty::USE ||
203                 property == SubtitleContentProperty::X_OFFSET ||
204                 property == SubtitleContentProperty::Y_OFFSET ||
205                 property == SubtitleContentProperty::X_SCALE ||
206                 property == SubtitleContentProperty::FONTS ||
207                 property == VideoContentProperty::CROP ||
208                 property == VideoContentProperty::SCALE ||
209                 property == VideoContentProperty::FADE_IN ||
210                 property == VideoContentProperty::FADE_OUT
211                 ) {
212
213                 Changed (frequent);
214         }
215 }
216
217 void
218 Player::set_video_container_size (dcp::Size s)
219 {
220         if (s == _video_container_size) {
221                 return;
222         }
223
224         _video_container_size = s;
225
226         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
227         _black_image->make_black ();
228
229         Changed (false);
230 }
231
232 void
233 Player::playlist_changed ()
234 {
235         _have_valid_pieces = false;
236         Changed (false);
237 }
238
239 void
240 Player::film_changed (Film::Property p)
241 {
242         /* Here we should notice Film properties that affect our output, and
243            alert listeners that our output now would be different to how it was
244            last time we were run.
245         */
246
247         if (p == Film::CONTAINER) {
248                 Changed (false);
249         } else if (p == Film::VIDEO_FRAME_RATE) {
250                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
251                    so we need new pieces here.
252                 */
253                 _have_valid_pieces = false;
254                 Changed (false);
255         } else if (p == Film::AUDIO_PROCESSOR) {
256                 if (_film->audio_processor ()) {
257                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
258                 }
259         }
260 }
261
262 list<PositionImage>
263 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
264 {
265         list<PositionImage> all;
266
267         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
268                 if (!i->image) {
269                         continue;
270                 }
271
272                 /* We will scale the subtitle up to fit _video_container_size */
273                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
274
275                 /* Then we need a corrective translation, consisting of two parts:
276                  *
277                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
278                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
279                  *
280                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
281                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
282                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
283                  *
284                  * Combining these two translations gives these expressions.
285                  */
286
287                 all.push_back (
288                         PositionImage (
289                                 i->image->scale (
290                                         scaled_size,
291                                         dcp::YUV_TO_RGB_REC601,
292                                         i->image->pixel_format (),
293                                         true,
294                                         _fast
295                                         ),
296                                 Position<int> (
297                                         lrint (_video_container_size.width * i->rectangle.x),
298                                         lrint (_video_container_size.height * i->rectangle.y)
299                                         )
300                                 )
301                         );
302         }
303
304         return all;
305 }
306
307 shared_ptr<PlayerVideo>
308 Player::black_player_video_frame () const
309 {
310         return shared_ptr<PlayerVideo> (
311                 new PlayerVideo (
312                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
313                         Crop (),
314                         optional<double> (),
315                         _video_container_size,
316                         _video_container_size,
317                         EYES_BOTH,
318                         PART_WHOLE,
319                         PresetColourConversion::all().front().conversion
320                 )
321         );
322 }
323
324 Frame
325 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
326 {
327         DCPTime s = t - piece->content->position ();
328         s = min (piece->content->length_after_trim(), s);
329         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
330
331         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
332            then convert that ContentTime to frames at the content's rate.  However this fails for
333            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
334            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
335
336            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
337         */
338         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
339 }
340
341 DCPTime
342 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
343 {
344         /* See comment in dcp_to_content_video */
345         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
346         return max (DCPTime (), d + piece->content->position ());
347 }
348
349 Frame
350 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
351 {
352         DCPTime s = t - piece->content->position ();
353         s = min (piece->content->length_after_trim(), s);
354         /* See notes in dcp_to_content_video */
355         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
356 }
357
358 DCPTime
359 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
360 {
361         /* See comment in dcp_to_content_video */
362         return DCPTime::from_frames (f, _film->audio_frame_rate())
363                 - DCPTime (piece->content->trim_start(), piece->frc)
364                 + piece->content->position();
365 }
366
367 ContentTime
368 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
369 {
370         DCPTime s = t - piece->content->position ();
371         s = min (piece->content->length_after_trim(), s);
372         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
373 }
374
375 DCPTime
376 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
377 {
378         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
379 }
380
381 list<shared_ptr<Font> >
382 Player::get_subtitle_fonts ()
383 {
384         if (!_have_valid_pieces) {
385                 setup_pieces ();
386         }
387
388         list<shared_ptr<Font> > fonts;
389         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
390                 if (p->content->subtitle) {
391                         /* XXX: things may go wrong if there are duplicate font IDs
392                            with different font files.
393                         */
394                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
395                         copy (f.begin(), f.end(), back_inserter (fonts));
396                 }
397         }
398
399         return fonts;
400 }
401
402 /** Set this player never to produce any video data */
403 void
404 Player::set_ignore_video ()
405 {
406         _ignore_video = true;
407 }
408
409 /** Set whether or not this player should always burn text subtitles into the image,
410  *  regardless of the content settings.
411  *  @param burn true to always burn subtitles, false to obey content settings.
412  */
413 void
414 Player::set_always_burn_subtitles (bool burn)
415 {
416         _always_burn_subtitles = burn;
417 }
418
419 void
420 Player::set_fast ()
421 {
422         _fast = true;
423         _have_valid_pieces = false;
424 }
425
426 void
427 Player::set_play_referenced ()
428 {
429         _play_referenced = true;
430         _have_valid_pieces = false;
431 }
432
433 list<ReferencedReelAsset>
434 Player::get_reel_assets ()
435 {
436         list<ReferencedReelAsset> a;
437
438         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
439                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
440                 if (!j) {
441                         continue;
442                 }
443
444                 scoped_ptr<DCPDecoder> decoder;
445                 try {
446                         decoder.reset (new DCPDecoder (j, _film->log()));
447                 } catch (...) {
448                         return a;
449                 }
450
451                 int64_t offset = 0;
452                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
453
454                         DCPOMATIC_ASSERT (j->video_frame_rate ());
455                         double const cfr = j->video_frame_rate().get();
456                         Frame const trim_start = j->trim_start().frames_round (cfr);
457                         Frame const trim_end = j->trim_end().frames_round (cfr);
458                         int const ffr = _film->video_frame_rate ();
459
460                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
461                         if (j->reference_video ()) {
462                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
463                                 DCPOMATIC_ASSERT (ra);
464                                 ra->set_entry_point (ra->entry_point() + trim_start);
465                                 ra->set_duration (ra->duration() - trim_start - trim_end);
466                                 a.push_back (
467                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
468                                         );
469                         }
470
471                         if (j->reference_audio ()) {
472                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
473                                 DCPOMATIC_ASSERT (ra);
474                                 ra->set_entry_point (ra->entry_point() + trim_start);
475                                 ra->set_duration (ra->duration() - trim_start - trim_end);
476                                 a.push_back (
477                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
478                                         );
479                         }
480
481                         if (j->reference_subtitle ()) {
482                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
483                                 DCPOMATIC_ASSERT (ra);
484                                 ra->set_entry_point (ra->entry_point() + trim_start);
485                                 ra->set_duration (ra->duration() - trim_start - trim_end);
486                                 a.push_back (
487                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
488                                         );
489                         }
490
491                         /* Assume that main picture duration is the length of the reel */
492                         offset += k->main_picture()->duration ();
493                 }
494         }
495
496         return a;
497 }
498
499 bool
500 Player::pass ()
501 {
502         if (!_have_valid_pieces) {
503                 setup_pieces ();
504         }
505
506         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
507
508         shared_ptr<Piece> earliest;
509         DCPTime earliest_content;
510
511         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
512                 if (!i->done) {
513                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
514                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
515                            the video.
516                         */
517                         if (!earliest || t < earliest_content || (t == earliest_content && i->decoder->subtitle)) {
518                                 earliest_content = t;
519                                 earliest = i;
520                         }
521                 }
522         }
523
524         bool done = false;
525
526         if (!_black.done() && (!earliest || _black.position() < earliest_content)) {
527                 /* There is some black that must be emitted */
528                 emit_video (black_player_video_frame(), _black.position());
529                 _black.set_position (_black.position() + one_video_frame());
530         } else if (!_silent.done() && (!earliest || _silent.position() < earliest_content)) {
531                 /* There is some silence that must be emitted */
532                 DCPTimePeriod period (_silent.period_at_position());
533                 if (period.duration() > one_video_frame()) {
534                         period.to = period.from + one_video_frame();
535                 }
536                 fill_audio (period);
537                 _silent.set_position (period.to);
538         } else if (_playlist->length() == DCPTime()) {
539                 /* Special case of an empty Film; just give one black frame */
540                 emit_video (black_player_video_frame(), DCPTime());
541         } else if (earliest) {
542                 earliest->done = earliest->decoder->pass ();
543         } else {
544                 done = true;
545         }
546
547         /* Emit any audio that is ready */
548
549         DCPTime pull_to = _playlist->length ();
550         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
551                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
552                         pull_to = i->second.last_push_end;
553                 }
554         }
555
556         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
557         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
558                 if (_last_audio_time && i->second < *_last_audio_time) {
559                         /* There has been an accurate seek and we have received some audio before the seek time;
560                            discard it.
561                         */
562                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
563                         if (!cut.first) {
564                                 continue;
565                         }
566                         *i = cut;
567                 }
568
569                 emit_audio (i->first, i->second);
570         }
571
572         return done;
573 }
574
575 optional<PositionImage>
576 Player::subtitles_for_frame (DCPTime time) const
577 {
578         list<PositionImage> subtitles;
579
580         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
581
582                 /* Image subtitles */
583                 list<PositionImage> c = transform_image_subtitles (i.image);
584                 copy (c.begin(), c.end(), back_inserter (subtitles));
585
586                 /* Text subtitles (rendered to an image) */
587                 if (!i.text.empty ()) {
588                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
589                         copy (s.begin(), s.end(), back_inserter (subtitles));
590                 }
591         }
592
593         if (subtitles.empty ()) {
594                 return optional<PositionImage> ();
595         }
596
597         return merge (subtitles);
598 }
599
600 void
601 Player::video (weak_ptr<Piece> wp, ContentVideo video)
602 {
603         shared_ptr<Piece> piece = wp.lock ();
604         if (!piece) {
605                 return;
606         }
607
608         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
609         if (frc.skip && (video.frame % 2) == 1) {
610                 return;
611         }
612
613         /* Time and period of the frame we will emit */
614         DCPTime const time = content_video_to_dcp (piece, video.frame);
615         DCPTimePeriod const period (time, time + one_video_frame());
616
617         /* Fill gaps that we discover now that we have some video which needs to be emitted */
618
619         if (_last_video_time) {
620                 /* XXX: this may not work for 3D */
621                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
622                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
623                         LastVideoMap::const_iterator k = _last_video.find (wp);
624                         if (k != _last_video.end ()) {
625                                 emit_video (k->second, j);
626                         } else {
627                                 emit_video (black_player_video_frame(), j);
628                         }
629                 }
630         }
631
632         /* Discard if it's outside the content's period or if it's before the last accurate seek */
633         if (
634                 time < piece->content->position() ||
635                 time >= piece->content->end() ||
636                 (_last_video_time && time < *_last_video_time)) {
637                 return;
638         }
639
640         _last_video[wp].reset (
641                 new PlayerVideo (
642                         video.image,
643                         piece->content->video->crop (),
644                         piece->content->video->fade (video.frame),
645                         piece->content->video->scale().size (
646                                 piece->content->video, _video_container_size, _film->frame_size ()
647                                 ),
648                         _video_container_size,
649                         video.eyes,
650                         video.part,
651                         piece->content->video->colour_conversion ()
652                         )
653                 );
654
655         emit_video (_last_video[wp], time);
656 }
657
658 void
659 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
660 {
661         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
662
663         shared_ptr<Piece> piece = wp.lock ();
664         if (!piece) {
665                 return;
666         }
667
668         shared_ptr<AudioContent> content = piece->content->audio;
669         DCPOMATIC_ASSERT (content);
670
671         /* Compute time in the DCP */
672         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
673         /* And the end of this block in the DCP */
674         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
675
676         /* Remove anything that comes before the start or after the end of the content */
677         if (time < piece->content->position()) {
678                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
679                 if (!cut.first) {
680                         /* This audio is entirely discarded */
681                         return;
682                 }
683                 content_audio.audio = cut.first;
684                 time = cut.second;
685         } else if (time > piece->content->end()) {
686                 /* Discard it all */
687                 return;
688         } else if (end > piece->content->end()) {
689                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
690                 if (remaining_frames == 0) {
691                         return;
692                 }
693                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
694                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
695                 content_audio.audio = cut;
696         }
697
698         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
699
700         /* Gain */
701
702         if (content->gain() != 0) {
703                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
704                 gain->apply_gain (content->gain ());
705                 content_audio.audio = gain;
706         }
707
708         /* Remap */
709
710         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
711
712         /* Process */
713
714         if (_audio_processor) {
715                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
716         }
717
718         /* Push */
719
720         _audio_merger.push (content_audio.audio, time);
721         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
722         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
723 }
724
725 void
726 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
727 {
728         shared_ptr<Piece> piece = wp.lock ();
729         if (!piece) {
730                 return;
731         }
732
733         /* Apply content's subtitle offsets */
734         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
735         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
736
737         /* Apply content's subtitle scale */
738         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
739         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
740
741         /* Apply a corrective translation to keep the subtitle centred after that scale */
742         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
743         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
744
745         PlayerSubtitles ps;
746         ps.image.push_back (subtitle.sub);
747         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
748
749         _active_subtitles.add_from (wp, ps, from);
750 }
751
752 void
753 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
754 {
755         shared_ptr<Piece> piece = wp.lock ();
756         if (!piece) {
757                 return;
758         }
759
760         PlayerSubtitles ps;
761         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
762
763         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
764                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
765                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
766                 float const xs = piece->content->subtitle->x_scale();
767                 float const ys = piece->content->subtitle->y_scale();
768                 float size = s.size();
769
770                 /* Adjust size to express the common part of the scaling;
771                    e.g. if xs = ys = 0.5 we scale size by 2.
772                 */
773                 if (xs > 1e-5 && ys > 1e-5) {
774                         size *= 1 / min (1 / xs, 1 / ys);
775                 }
776                 s.set_size (size);
777
778                 /* Then express aspect ratio changes */
779                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
780                         s.set_aspect_adjust (xs / ys);
781                 }
782
783                 s.set_in (dcp::Time(from.seconds(), 1000));
784                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
785                 ps.add_fonts (piece->content->subtitle->fonts ());
786         }
787
788         _active_subtitles.add_from (wp, ps, from);
789 }
790
791 void
792 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
793 {
794         if (!_active_subtitles.have (wp)) {
795                 return;
796         }
797
798         shared_ptr<Piece> piece = wp.lock ();
799         if (!piece) {
800                 return;
801         }
802
803         DCPTime const dcp_to = content_time_to_dcp (piece, to);
804
805         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
806
807         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
808                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
809         }
810 }
811
812 void
813 Player::seek (DCPTime time, bool accurate)
814 {
815         if (_audio_processor) {
816                 _audio_processor->flush ();
817         }
818
819         _audio_merger.clear ();
820         _active_subtitles.clear ();
821
822         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
823                 if (time < i->content->position()) {
824                         /* Before; seek to 0 */
825                         i->decoder->seek (ContentTime(), accurate);
826                         i->done = false;
827                 } else if (i->content->position() <= time && time < i->content->end()) {
828                         /* During; seek to position */
829                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
830                         i->done = false;
831                 } else {
832                         /* After; this piece is done */
833                         i->done = true;
834                 }
835         }
836
837         if (accurate) {
838                 _last_video_time = time;
839                 _last_audio_time = time;
840         } else {
841                 _last_video_time = optional<DCPTime>();
842                 _last_audio_time = optional<DCPTime>();
843         }
844
845         _black.set_position (time);
846         _silent.set_position (time);
847
848         _last_video.clear ();
849 }
850
851 void
852 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
853 {
854         optional<PositionImage> subtitles = subtitles_for_frame (time);
855         if (subtitles) {
856                 pv->set_subtitle (subtitles.get ());
857         }
858
859         Video (pv, time);
860
861         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
862                 _last_video_time = time + one_video_frame();
863                 _active_subtitles.clear_before (time);
864         }
865 }
866
867 void
868 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
869 {
870         Audio (data, time);
871         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
872 }
873
874 void
875 Player::fill_audio (DCPTimePeriod period)
876 {
877         if (period.from == period.to) {
878                 return;
879         }
880
881         DCPOMATIC_ASSERT (period.from < period.to);
882
883         DCPTime t = period.from;
884         while (t < period.to) {
885                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
886                 Frame const samples = block.frames_round(_film->audio_frame_rate());
887                 if (samples) {
888                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
889                         silence->make_silent ();
890                         emit_audio (silence, t);
891                 }
892                 t += block;
893         }
894 }
895
896 DCPTime
897 Player::one_video_frame () const
898 {
899         return DCPTime::from_frames (1, _film->video_frame_rate ());
900 }
901
902 pair<shared_ptr<AudioBuffers>, DCPTime>
903 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
904 {
905         DCPTime const discard_time = discard_to - time;
906         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
907         Frame remaining_frames = audio->frames() - discard_frames;
908         if (remaining_frames <= 0) {
909                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
910         }
911         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
912         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
913         return make_pair(cut, time + discard_time);
914 }