5fb349b0e0870c3b1756564ecf052d6563ae9267
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         if (!_play_referenced) {
160                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
162                         if (dc) {
163                                 if (dc->reference_video()) {
164                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
165                                 }
166                                 if (dc->reference_audio()) {
167                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
168                                 }
169                         }
170                 }
171         }
172
173         _last_video_time = optional<DCPTime> ();
174         _last_audio_time = optional<DCPTime> ();
175         _have_valid_pieces = true;
176 }
177
178 void
179 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
180 {
181         shared_ptr<Content> c = w.lock ();
182         if (!c) {
183                 return;
184         }
185
186         if (
187                 property == ContentProperty::POSITION ||
188                 property == ContentProperty::LENGTH ||
189                 property == ContentProperty::TRIM_START ||
190                 property == ContentProperty::TRIM_END ||
191                 property == ContentProperty::PATH ||
192                 property == VideoContentProperty::FRAME_TYPE ||
193                 property == DCPContentProperty::NEEDS_ASSETS ||
194                 property == DCPContentProperty::NEEDS_KDM ||
195                 property == SubtitleContentProperty::COLOUR ||
196                 property == SubtitleContentProperty::OUTLINE ||
197                 property == SubtitleContentProperty::SHADOW ||
198                 property == SubtitleContentProperty::EFFECT_COLOUR ||
199                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
200                 property == VideoContentProperty::COLOUR_CONVERSION
201                 ) {
202
203                 _have_valid_pieces = false;
204                 Changed (frequent);
205
206         } else if (
207                 property == SubtitleContentProperty::LINE_SPACING ||
208                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
209                 property == SubtitleContentProperty::Y_SCALE ||
210                 property == SubtitleContentProperty::FADE_IN ||
211                 property == SubtitleContentProperty::FADE_OUT ||
212                 property == ContentProperty::VIDEO_FRAME_RATE ||
213                 property == SubtitleContentProperty::USE ||
214                 property == SubtitleContentProperty::X_OFFSET ||
215                 property == SubtitleContentProperty::Y_OFFSET ||
216                 property == SubtitleContentProperty::X_SCALE ||
217                 property == SubtitleContentProperty::FONTS ||
218                 property == VideoContentProperty::CROP ||
219                 property == VideoContentProperty::SCALE ||
220                 property == VideoContentProperty::FADE_IN ||
221                 property == VideoContentProperty::FADE_OUT
222                 ) {
223
224                 Changed (frequent);
225         }
226 }
227
228 void
229 Player::set_video_container_size (dcp::Size s)
230 {
231         if (s == _video_container_size) {
232                 return;
233         }
234
235         _video_container_size = s;
236
237         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
238         _black_image->make_black ();
239
240         Changed (false);
241 }
242
243 void
244 Player::playlist_changed ()
245 {
246         _have_valid_pieces = false;
247         Changed (false);
248 }
249
250 void
251 Player::film_changed (Film::Property p)
252 {
253         /* Here we should notice Film properties that affect our output, and
254            alert listeners that our output now would be different to how it was
255            last time we were run.
256         */
257
258         if (p == Film::CONTAINER) {
259                 Changed (false);
260         } else if (p == Film::VIDEO_FRAME_RATE) {
261                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
262                    so we need new pieces here.
263                 */
264                 _have_valid_pieces = false;
265                 Changed (false);
266         } else if (p == Film::AUDIO_PROCESSOR) {
267                 if (_film->audio_processor ()) {
268                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
269                 }
270         }
271 }
272
273 list<PositionImage>
274 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
275 {
276         list<PositionImage> all;
277
278         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
279                 if (!i->image) {
280                         continue;
281                 }
282
283                 /* We will scale the subtitle up to fit _video_container_size */
284                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
285
286                 /* Then we need a corrective translation, consisting of two parts:
287                  *
288                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
289                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
290                  *
291                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
292                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
293                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
294                  *
295                  * Combining these two translations gives these expressions.
296                  */
297
298                 all.push_back (
299                         PositionImage (
300                                 i->image->scale (
301                                         scaled_size,
302                                         dcp::YUV_TO_RGB_REC601,
303                                         i->image->pixel_format (),
304                                         true,
305                                         _fast
306                                         ),
307                                 Position<int> (
308                                         lrint (_video_container_size.width * i->rectangle.x),
309                                         lrint (_video_container_size.height * i->rectangle.y)
310                                         )
311                                 )
312                         );
313         }
314
315         return all;
316 }
317
318 shared_ptr<PlayerVideo>
319 Player::black_player_video_frame () const
320 {
321         return shared_ptr<PlayerVideo> (
322                 new PlayerVideo (
323                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
324                         Crop (),
325                         optional<double> (),
326                         _video_container_size,
327                         _video_container_size,
328                         EYES_BOTH,
329                         PART_WHOLE,
330                         PresetColourConversion::all().front().conversion
331                 )
332         );
333 }
334
335 Frame
336 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
337 {
338         DCPTime s = t - piece->content->position ();
339         s = min (piece->content->length_after_trim(), s);
340         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
341
342         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
343            then convert that ContentTime to frames at the content's rate.  However this fails for
344            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
345            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
346
347            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
348         */
349         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
350 }
351
352 DCPTime
353 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
354 {
355         /* See comment in dcp_to_content_video */
356         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
357         return max (DCPTime (), d + piece->content->position ());
358 }
359
360 Frame
361 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
362 {
363         DCPTime s = t - piece->content->position ();
364         s = min (piece->content->length_after_trim(), s);
365         /* See notes in dcp_to_content_video */
366         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
367 }
368
369 DCPTime
370 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
371 {
372         /* See comment in dcp_to_content_video */
373         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
374         return max (DCPTime (), d + piece->content->position ());
375 }
376
377 ContentTime
378 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
379 {
380         DCPTime s = t - piece->content->position ();
381         s = min (piece->content->length_after_trim(), s);
382         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
383 }
384
385 DCPTime
386 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
387 {
388         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
389 }
390
391 list<shared_ptr<Font> >
392 Player::get_subtitle_fonts ()
393 {
394         if (!_have_valid_pieces) {
395                 setup_pieces ();
396         }
397
398         list<shared_ptr<Font> > fonts;
399         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
400                 if (p->content->subtitle) {
401                         /* XXX: things may go wrong if there are duplicate font IDs
402                            with different font files.
403                         */
404                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
405                         copy (f.begin(), f.end(), back_inserter (fonts));
406                 }
407         }
408
409         return fonts;
410 }
411
412 /** Set this player never to produce any video data */
413 void
414 Player::set_ignore_video ()
415 {
416         _ignore_video = true;
417 }
418
419 /** Set this player never to produce any audio data */
420 void
421 Player::set_ignore_audio ()
422 {
423         _ignore_audio = true;
424 }
425
426 /** Set whether or not this player should always burn text subtitles into the image,
427  *  regardless of the content settings.
428  *  @param burn true to always burn subtitles, false to obey content settings.
429  */
430 void
431 Player::set_always_burn_subtitles (bool burn)
432 {
433         _always_burn_subtitles = burn;
434 }
435
436 void
437 Player::set_fast ()
438 {
439         _fast = true;
440         _have_valid_pieces = false;
441 }
442
443 void
444 Player::set_play_referenced ()
445 {
446         _play_referenced = true;
447         _have_valid_pieces = false;
448 }
449
450 list<ReferencedReelAsset>
451 Player::get_reel_assets ()
452 {
453         list<ReferencedReelAsset> a;
454
455         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
456                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
457                 if (!j) {
458                         continue;
459                 }
460
461                 scoped_ptr<DCPDecoder> decoder;
462                 try {
463                         decoder.reset (new DCPDecoder (j, _film->log()));
464                 } catch (...) {
465                         return a;
466                 }
467
468                 int64_t offset = 0;
469                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
470
471                         DCPOMATIC_ASSERT (j->video_frame_rate ());
472                         double const cfr = j->video_frame_rate().get();
473                         Frame const trim_start = j->trim_start().frames_round (cfr);
474                         Frame const trim_end = j->trim_end().frames_round (cfr);
475                         int const ffr = _film->video_frame_rate ();
476
477                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
478                         if (j->reference_video ()) {
479                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
480                                 DCPOMATIC_ASSERT (ra);
481                                 ra->set_entry_point (ra->entry_point() + trim_start);
482                                 ra->set_duration (ra->duration() - trim_start - trim_end);
483                                 a.push_back (
484                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
485                                         );
486                         }
487
488                         if (j->reference_audio ()) {
489                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
490                                 DCPOMATIC_ASSERT (ra);
491                                 ra->set_entry_point (ra->entry_point() + trim_start);
492                                 ra->set_duration (ra->duration() - trim_start - trim_end);
493                                 a.push_back (
494                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
495                                         );
496                         }
497
498                         if (j->reference_subtitle ()) {
499                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
500                                 DCPOMATIC_ASSERT (ra);
501                                 ra->set_entry_point (ra->entry_point() + trim_start);
502                                 ra->set_duration (ra->duration() - trim_start - trim_end);
503                                 a.push_back (
504                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
505                                         );
506                         }
507
508                         /* Assume that main picture duration is the length of the reel */
509                         offset += k->main_picture()->duration ();
510                 }
511         }
512
513         return a;
514 }
515
516 list<shared_ptr<Piece> >
517 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
518 {
519         if (!_have_valid_pieces) {
520                 setup_pieces ();
521         }
522
523         list<shared_ptr<Piece> > overlaps;
524         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
525                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
526                         overlaps.push_back (i);
527                 }
528         }
529
530         return overlaps;
531 }
532
533 bool
534 Player::pass ()
535 {
536         if (!_have_valid_pieces) {
537                 setup_pieces ();
538         }
539
540         shared_ptr<Piece> earliest;
541         DCPTime earliest_content;
542
543         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
544                 if (!i->done) {
545                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
546                         if (!earliest || t < earliest_content) {
547                                 earliest_content = t;
548                                 earliest = i;
549                         }
550                 }
551         }
552
553         if (!earliest) {
554                 /* No more content; fill up with silent black */
555                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
556                 if (_last_video_time) {
557                         remaining_video.from = _last_video_time.get() + one_video_frame();
558                 }
559                 fill_video (remaining_video);
560                 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
561                 if (_last_audio_time) {
562                         remaining_audio.from = _last_audio_time.get();
563                 }
564                 fill_audio (remaining_audio);
565                 return true;
566         }
567
568         earliest->done = earliest->decoder->pass ();
569         if (earliest->done && earliest->content->audio) {
570                 /* Flush the Player audio system for this piece */
571                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
572                         audio_flush (earliest, i);
573                 }
574         }
575
576         /* Emit any audio that is ready */
577
578         DCPTime pull_from = _playlist->length ();
579         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
580                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
581                         pull_from = i->second.last_push_end;
582                 }
583         }
584
585         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
586         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
587                 if (_last_audio_time && i->second < _last_audio_time.get()) {
588                         cout << "FAIL " << to_string(i->second) << " " << to_string(_last_audio_time.get()) << "\n";
589                 }
590                 DCPOMATIC_ASSERT (!_last_audio_time || i->second >= _last_audio_time.get());
591                 if (_last_audio_time) {
592                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
593                 }
594                 Audio (i->first, i->second);
595                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
596         }
597
598         return false;
599 }
600
601 void
602 Player::video (weak_ptr<Piece> wp, ContentVideo video)
603 {
604         shared_ptr<Piece> piece = wp.lock ();
605         if (!piece) {
606                 return;
607         }
608
609         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
610         if (frc.skip && (video.frame % 2) == 1) {
611                 return;
612         }
613
614         /* Time and period of the frame we will emit */
615         DCPTime const time = content_video_to_dcp (piece, video.frame);
616         DCPTimePeriod const period (time, time + one_video_frame());
617
618         /* Discard if it's outside the content's period */
619         if (time < piece->content->position() || time >= piece->content->end()) {
620                 return;
621         }
622
623         /* Get any subtitles */
624
625         optional<PositionImage> subtitles;
626
627         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
628
629                 if (!i->second.overlap (period)) {
630                         continue;
631                 }
632
633                 list<PositionImage> sub_images;
634
635                 /* Image subtitles */
636                 list<PositionImage> c = transform_image_subtitles (i->first.image);
637                 copy (c.begin(), c.end(), back_inserter (sub_images));
638
639                 /* Text subtitles (rendered to an image) */
640                 if (!i->first.text.empty ()) {
641                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
642                         copy (s.begin (), s.end (), back_inserter (sub_images));
643                 }
644
645                 if (!sub_images.empty ()) {
646                         subtitles = merge (sub_images);
647                 }
648         }
649
650         /* Fill gaps */
651
652         if (_last_video_time) {
653                 fill_video (DCPTimePeriod (_last_video_time.get() + one_video_frame(), time));
654         }
655
656         _last_video.reset (
657                 new PlayerVideo (
658                         video.image,
659                         piece->content->video->crop (),
660                         piece->content->video->fade (video.frame),
661                         piece->content->video->scale().size (
662                                 piece->content->video, _video_container_size, _film->frame_size ()
663                                 ),
664                         _video_container_size,
665                         video.eyes,
666                         video.part,
667                         piece->content->video->colour_conversion ()
668                         )
669                 );
670
671         if (subtitles) {
672                 _last_video->set_subtitle (subtitles.get ());
673         }
674
675         _last_video_time = time;
676
677         Video (_last_video, *_last_video_time);
678
679         /* Discard any subtitles we no longer need */
680
681         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
682                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
683                 ++tmp;
684
685                 if (i->second.to < time) {
686                         _subtitles.erase (i);
687                 }
688
689                 i = tmp;
690         }
691 }
692
693 void
694 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
695 {
696         shared_ptr<AudioContent> content = piece->content->audio;
697         DCPOMATIC_ASSERT (content);
698
699         shared_ptr<Resampler> r = resampler (content, stream, false);
700         if (!r) {
701                 return;
702         }
703
704         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
705         if (ro.first->frames() == 0) {
706                 return;
707         }
708
709         ContentAudio content_audio;
710         content_audio.audio = ro.first;
711         content_audio.frame = ro.second;
712
713         /* Compute time in the DCP */
714         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
715
716         audio_transform (content, stream, content_audio, time);
717 }
718
719 /** Do our common processing on some audio */
720 void
721 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
722 {
723         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
724
725         /* Gain */
726
727         if (content->gain() != 0) {
728                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
729                 gain->apply_gain (content->gain ());
730                 content_audio.audio = gain;
731         }
732
733         /* Remap */
734
735         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
736         dcp_mapped->make_silent ();
737
738         AudioMapping map = stream->mapping ();
739         for (int i = 0; i < map.input_channels(); ++i) {
740                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
741                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
742                                 dcp_mapped->accumulate_channel (
743                                         content_audio.audio.get(),
744                                         i,
745                                         static_cast<dcp::Channel> (j),
746                                         map.get (i, static_cast<dcp::Channel> (j))
747                                         );
748                         }
749                 }
750         }
751
752         content_audio.audio = dcp_mapped;
753
754         /* Process */
755
756         if (_audio_processor) {
757                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
758         }
759
760         /* Push */
761
762         _audio_merger.push (content_audio.audio, time);
763         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
764         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
765 }
766
767 void
768 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
769 {
770         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
771
772         shared_ptr<Piece> piece = wp.lock ();
773         if (!piece) {
774                 return;
775         }
776
777         shared_ptr<AudioContent> content = piece->content->audio;
778         DCPOMATIC_ASSERT (content);
779
780         /* Resample */
781         if (stream->frame_rate() != content->resampled_frame_rate()) {
782                 shared_ptr<Resampler> r = resampler (content, stream, true);
783                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
784                 if (ro.first->frames() == 0) {
785                         return;
786                 }
787                 content_audio.audio = ro.first;
788                 content_audio.frame = ro.second;
789         }
790
791         /* Compute time in the DCP */
792         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
793         /* And the end of this block in the DCP */
794         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
795
796         /* Remove anything that comes before the start or after the end of the content */
797         if (time < piece->content->position()) {
798                 DCPTime const discard_time = piece->content->position() - time;
799                 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
800                 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
801                 if (remaining_frames <= 0) {
802                         /* This audio is entirely discarded */
803                         return;
804                 }
805                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
806                 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
807                 content_audio.audio = cut;
808                 time += discard_time;
809         } else if (time > piece->content->end()) {
810                 /* Discard it all */
811                 return;
812         } else if (end > piece->content->end()) {
813                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
814                 DCPOMATIC_ASSERT (remaining_frames > 0);
815                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
816                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
817                 content_audio.audio = cut;
818         }
819
820         audio_transform (content, stream, content_audio, time);
821 }
822
823 void
824 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
825 {
826         shared_ptr<Piece> piece = wp.lock ();
827         if (!piece) {
828                 return;
829         }
830
831         /* Apply content's subtitle offsets */
832         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
833         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
834
835         /* Apply content's subtitle scale */
836         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
837         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
838
839         /* Apply a corrective translation to keep the subtitle centred after that scale */
840         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
841         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
842
843         PlayerSubtitles ps;
844         ps.image.push_back (subtitle.sub);
845         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
846
847         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
848                 _subtitles.push_back (make_pair (ps, period));
849         } else {
850                 Subtitle (ps, period);
851         }
852 }
853
854 void
855 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
856 {
857         shared_ptr<Piece> piece = wp.lock ();
858         if (!piece) {
859                 return;
860         }
861
862         PlayerSubtitles ps;
863         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
864
865         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
866                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
867                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
868                 float const xs = piece->content->subtitle->x_scale();
869                 float const ys = piece->content->subtitle->y_scale();
870                 float size = s.size();
871
872                 /* Adjust size to express the common part of the scaling;
873                    e.g. if xs = ys = 0.5 we scale size by 2.
874                 */
875                 if (xs > 1e-5 && ys > 1e-5) {
876                         size *= 1 / min (1 / xs, 1 / ys);
877                 }
878                 s.set_size (size);
879
880                 /* Then express aspect ratio changes */
881                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
882                         s.set_aspect_adjust (xs / ys);
883                 }
884
885                 s.set_in (dcp::Time(period.from.seconds(), 1000));
886                 s.set_out (dcp::Time(period.to.seconds(), 1000));
887                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
888                 ps.add_fonts (piece->content->subtitle->fonts ());
889         }
890
891         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
892                 _subtitles.push_back (make_pair (ps, period));
893         } else {
894                 Subtitle (ps, period);
895         }
896 }
897
898 void
899 Player::seek (DCPTime time, bool accurate)
900 {
901         if (_audio_processor) {
902                 _audio_processor->flush ();
903         }
904
905         for (ResamplerMap::iterator i = _resamplers.begin(); i != _resamplers.end(); ++i) {
906                 i->second->flush ();
907                 i->second->reset ();
908         }
909
910         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
911                 i->done = false;
912                 if (i->content->position() <= time && time < i->content->end()) {
913                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
914                 }
915         }
916
917         if (accurate) {
918                 _last_video_time = time - one_video_frame ();
919                 _last_audio_time = time;
920                 cout << "_last_audio_time -> " << to_string(time) << "\n";
921         } else {
922                 _last_video_time = optional<DCPTime> ();
923                 _last_audio_time = optional<DCPTime> ();
924                 cout << "_last_audio_time -> []\n";
925         }
926 }
927
928 shared_ptr<Resampler>
929 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
930 {
931         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
932         if (i != _resamplers.end ()) {
933                 return i->second;
934         }
935
936         if (!create) {
937                 return shared_ptr<Resampler> ();
938         }
939
940         LOG_GENERAL (
941                 "Creating new resampler from %1 to %2 with %3 channels",
942                 stream->frame_rate(),
943                 content->resampled_frame_rate(),
944                 stream->channels()
945                 );
946
947         shared_ptr<Resampler> r (
948                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
949                 );
950
951         _resamplers[make_pair(content, stream)] = r;
952         return r;
953 }
954
955 void
956 Player::fill_video (DCPTimePeriod period)
957 {
958         /* XXX: this may not work for 3D */
959         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
960                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
961                         if (_playlist->video_content_at(j) && _last_video) {
962                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
963                         } else {
964                                 Video (black_player_video_frame(), j);
965                         }
966                 }
967         }
968 }
969
970 void
971 Player::fill_audio (DCPTimePeriod period)
972 {
973         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
974                 DCPTime t = i.from;
975                 while (t < i.to) {
976                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
977                         Frame const samples = block.frames_round(_film->audio_frame_rate());
978                         if (samples) {
979                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
980                                 silence->make_silent ();
981                                 Audio (silence, t);
982                         }
983                         t += block;
984                 }
985         }
986 }
987
988 DCPTime
989 Player::one_video_frame () const
990 {
991         return DCPTime::from_frames (1, _film->video_frame_rate ());
992 }