Keep active subtitles around until the video they are on has been emitted.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
148                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
149                 }
150         }
151
152         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
153                 if (i->content->audio) {
154                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
155                                 _stream_states[j] = StreamState (i, i->content->position ());
156                         }
157                 }
158         }
159
160         if (!_play_referenced) {
161                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
162                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
163                         if (dc) {
164                                 if (dc->reference_video()) {
165                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
166                                 }
167                                 if (dc->reference_audio()) {
168                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
169                                 }
170                         }
171                 }
172         }
173
174         _last_video_time = optional<DCPTime> ();
175         _last_audio_time = optional<DCPTime> ();
176         _have_valid_pieces = true;
177 }
178
179 void
180 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
181 {
182         shared_ptr<Content> c = w.lock ();
183         if (!c) {
184                 return;
185         }
186
187         if (
188                 property == ContentProperty::POSITION ||
189                 property == ContentProperty::LENGTH ||
190                 property == ContentProperty::TRIM_START ||
191                 property == ContentProperty::TRIM_END ||
192                 property == ContentProperty::PATH ||
193                 property == VideoContentProperty::FRAME_TYPE ||
194                 property == DCPContentProperty::NEEDS_ASSETS ||
195                 property == DCPContentProperty::NEEDS_KDM ||
196                 property == SubtitleContentProperty::COLOUR ||
197                 property == SubtitleContentProperty::OUTLINE ||
198                 property == SubtitleContentProperty::SHADOW ||
199                 property == SubtitleContentProperty::EFFECT_COLOUR ||
200                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
201                 property == VideoContentProperty::COLOUR_CONVERSION
202                 ) {
203
204                 _have_valid_pieces = false;
205                 Changed (frequent);
206
207         } else if (
208                 property == SubtitleContentProperty::LINE_SPACING ||
209                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
210                 property == SubtitleContentProperty::Y_SCALE ||
211                 property == SubtitleContentProperty::FADE_IN ||
212                 property == SubtitleContentProperty::FADE_OUT ||
213                 property == ContentProperty::VIDEO_FRAME_RATE ||
214                 property == SubtitleContentProperty::USE ||
215                 property == SubtitleContentProperty::X_OFFSET ||
216                 property == SubtitleContentProperty::Y_OFFSET ||
217                 property == SubtitleContentProperty::X_SCALE ||
218                 property == SubtitleContentProperty::FONTS ||
219                 property == VideoContentProperty::CROP ||
220                 property == VideoContentProperty::SCALE ||
221                 property == VideoContentProperty::FADE_IN ||
222                 property == VideoContentProperty::FADE_OUT
223                 ) {
224
225                 Changed (frequent);
226         }
227 }
228
229 void
230 Player::set_video_container_size (dcp::Size s)
231 {
232         if (s == _video_container_size) {
233                 return;
234         }
235
236         _video_container_size = s;
237
238         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
239         _black_image->make_black ();
240
241         Changed (false);
242 }
243
244 void
245 Player::playlist_changed ()
246 {
247         _have_valid_pieces = false;
248         Changed (false);
249 }
250
251 void
252 Player::film_changed (Film::Property p)
253 {
254         /* Here we should notice Film properties that affect our output, and
255            alert listeners that our output now would be different to how it was
256            last time we were run.
257         */
258
259         if (p == Film::CONTAINER) {
260                 Changed (false);
261         } else if (p == Film::VIDEO_FRAME_RATE) {
262                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
263                    so we need new pieces here.
264                 */
265                 _have_valid_pieces = false;
266                 Changed (false);
267         } else if (p == Film::AUDIO_PROCESSOR) {
268                 if (_film->audio_processor ()) {
269                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
270                 }
271         }
272 }
273
274 list<PositionImage>
275 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
276 {
277         list<PositionImage> all;
278
279         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
280                 if (!i->image) {
281                         continue;
282                 }
283
284                 /* We will scale the subtitle up to fit _video_container_size */
285                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
286
287                 /* Then we need a corrective translation, consisting of two parts:
288                  *
289                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
290                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
291                  *
292                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
293                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
294                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
295                  *
296                  * Combining these two translations gives these expressions.
297                  */
298
299                 all.push_back (
300                         PositionImage (
301                                 i->image->scale (
302                                         scaled_size,
303                                         dcp::YUV_TO_RGB_REC601,
304                                         i->image->pixel_format (),
305                                         true,
306                                         _fast
307                                         ),
308                                 Position<int> (
309                                         lrint (_video_container_size.width * i->rectangle.x),
310                                         lrint (_video_container_size.height * i->rectangle.y)
311                                         )
312                                 )
313                         );
314         }
315
316         return all;
317 }
318
319 shared_ptr<PlayerVideo>
320 Player::black_player_video_frame () const
321 {
322         return shared_ptr<PlayerVideo> (
323                 new PlayerVideo (
324                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
325                         Crop (),
326                         optional<double> (),
327                         _video_container_size,
328                         _video_container_size,
329                         EYES_BOTH,
330                         PART_WHOLE,
331                         PresetColourConversion::all().front().conversion
332                 )
333         );
334 }
335
336 Frame
337 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
338 {
339         DCPTime s = t - piece->content->position ();
340         s = min (piece->content->length_after_trim(), s);
341         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
342
343         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
344            then convert that ContentTime to frames at the content's rate.  However this fails for
345            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
346            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
347
348            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
349         */
350         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
351 }
352
353 DCPTime
354 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
355 {
356         /* See comment in dcp_to_content_video */
357         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
358         return max (DCPTime (), d + piece->content->position ());
359 }
360
361 Frame
362 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
363 {
364         DCPTime s = t - piece->content->position ();
365         s = min (piece->content->length_after_trim(), s);
366         /* See notes in dcp_to_content_video */
367         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
368 }
369
370 DCPTime
371 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
372 {
373         /* See comment in dcp_to_content_video */
374         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
375         return max (DCPTime (), d + piece->content->position ());
376 }
377
378 ContentTime
379 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
380 {
381         DCPTime s = t - piece->content->position ();
382         s = min (piece->content->length_after_trim(), s);
383         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
384 }
385
386 DCPTime
387 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
388 {
389         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
390 }
391
392 list<shared_ptr<Font> >
393 Player::get_subtitle_fonts ()
394 {
395         if (!_have_valid_pieces) {
396                 setup_pieces ();
397         }
398
399         list<shared_ptr<Font> > fonts;
400         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
401                 if (p->content->subtitle) {
402                         /* XXX: things may go wrong if there are duplicate font IDs
403                            with different font files.
404                         */
405                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
406                         copy (f.begin(), f.end(), back_inserter (fonts));
407                 }
408         }
409
410         return fonts;
411 }
412
413 /** Set this player never to produce any video data */
414 void
415 Player::set_ignore_video ()
416 {
417         _ignore_video = true;
418 }
419
420 /** Set whether or not this player should always burn text subtitles into the image,
421  *  regardless of the content settings.
422  *  @param burn true to always burn subtitles, false to obey content settings.
423  */
424 void
425 Player::set_always_burn_subtitles (bool burn)
426 {
427         _always_burn_subtitles = burn;
428 }
429
430 void
431 Player::set_fast ()
432 {
433         _fast = true;
434         _have_valid_pieces = false;
435 }
436
437 void
438 Player::set_play_referenced ()
439 {
440         _play_referenced = true;
441         _have_valid_pieces = false;
442 }
443
444 list<ReferencedReelAsset>
445 Player::get_reel_assets ()
446 {
447         list<ReferencedReelAsset> a;
448
449         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
450                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
451                 if (!j) {
452                         continue;
453                 }
454
455                 scoped_ptr<DCPDecoder> decoder;
456                 try {
457                         decoder.reset (new DCPDecoder (j, _film->log()));
458                 } catch (...) {
459                         return a;
460                 }
461
462                 int64_t offset = 0;
463                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
464
465                         DCPOMATIC_ASSERT (j->video_frame_rate ());
466                         double const cfr = j->video_frame_rate().get();
467                         Frame const trim_start = j->trim_start().frames_round (cfr);
468                         Frame const trim_end = j->trim_end().frames_round (cfr);
469                         int const ffr = _film->video_frame_rate ();
470
471                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
472                         if (j->reference_video ()) {
473                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
474                                 DCPOMATIC_ASSERT (ra);
475                                 ra->set_entry_point (ra->entry_point() + trim_start);
476                                 ra->set_duration (ra->duration() - trim_start - trim_end);
477                                 a.push_back (
478                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
479                                         );
480                         }
481
482                         if (j->reference_audio ()) {
483                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
484                                 DCPOMATIC_ASSERT (ra);
485                                 ra->set_entry_point (ra->entry_point() + trim_start);
486                                 ra->set_duration (ra->duration() - trim_start - trim_end);
487                                 a.push_back (
488                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
489                                         );
490                         }
491
492                         if (j->reference_subtitle ()) {
493                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
494                                 DCPOMATIC_ASSERT (ra);
495                                 ra->set_entry_point (ra->entry_point() + trim_start);
496                                 ra->set_duration (ra->duration() - trim_start - trim_end);
497                                 a.push_back (
498                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
499                                         );
500                         }
501
502                         /* Assume that main picture duration is the length of the reel */
503                         offset += k->main_picture()->duration ();
504                 }
505         }
506
507         return a;
508 }
509
510 list<shared_ptr<Piece> >
511 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
512 {
513         if (!_have_valid_pieces) {
514                 setup_pieces ();
515         }
516
517         list<shared_ptr<Piece> > overlaps;
518         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
519                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
520                         overlaps.push_back (i);
521                 }
522         }
523
524         return overlaps;
525 }
526
527 bool
528 Player::pass ()
529 {
530         if (!_have_valid_pieces) {
531                 setup_pieces ();
532         }
533
534         shared_ptr<Piece> earliest;
535         DCPTime earliest_content;
536
537         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
538                 if (!i->done) {
539                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
540                         if (!earliest || t < earliest_content) {
541                                 earliest_content = t;
542                                 earliest = i;
543                         }
544                 }
545         }
546
547         if (!earliest) {
548                 /* No more content; fill up with silent black */
549                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
550                 if (_last_video_time) {
551                         remaining_video.from = _last_video_time.get();
552                 }
553                 fill_video (remaining_video);
554                 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
555                 if (_last_audio_time) {
556                         remaining_audio.from = _last_audio_time.get();
557                 }
558                 fill_audio (remaining_audio);
559                 return true;
560         }
561
562         earliest->done = earliest->decoder->pass ();
563         if (earliest->done && earliest->content->audio) {
564                 /* Flush the Player audio system for this piece */
565                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
566                         audio_flush (earliest, i);
567                 }
568         }
569
570         /* Emit any audio that is ready */
571
572         DCPTime pull_from = _playlist->length ();
573         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
574                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
575                         pull_from = i->second.last_push_end;
576                 }
577         }
578
579         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
580         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
581                 if (_last_audio_time && i->second < _last_audio_time.get()) {
582                         /* There has been an accurate seek and we have received some audio before the seek time;
583                            discard it.
584                         */
585                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
586                         if (!cut.first) {
587                                 continue;
588                         }
589                         *i = cut;
590                 }
591
592                 if (_last_audio_time) {
593                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
594                 }
595
596                 Audio (i->first, i->second);
597                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
598         }
599
600         return false;
601 }
602
603 void
604 Player::video (weak_ptr<Piece> wp, ContentVideo video)
605 {
606         shared_ptr<Piece> piece = wp.lock ();
607         if (!piece) {
608                 return;
609         }
610
611         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
612         if (frc.skip && (video.frame % 2) == 1) {
613                 return;
614         }
615
616         /* Time and period of the frame we will emit */
617         DCPTime const time = content_video_to_dcp (piece, video.frame);
618         DCPTimePeriod const period (time, time + one_video_frame());
619
620         /* Discard if it's outside the content's period or if it's before the last accurate seek */
621         if (time < piece->content->position() || time >= piece->content->end() || (_last_video_time && time < _last_video_time)) {
622                 return;
623         }
624
625         /* Get any subtitles */
626
627         optional<PositionImage> subtitles;
628
629         for (ActiveSubtitlesMap::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
630
631                 shared_ptr<Piece> sub_piece = i->first.lock ();
632                 if (!sub_piece) {
633                         continue;
634                 }
635
636                 if (!sub_piece->content->subtitle->use() || (!_always_burn_subtitles && !piece->content->subtitle->burn())) {
637                         continue;
638                 }
639
640                 ActiveSubtitles sub = i->second;
641
642                 if (sub.from > time || (sub.to && sub.to.get() <= time)) {
643                         continue;
644                 }
645
646                 list<PositionImage> sub_images;
647
648                 /* Image subtitles */
649                 list<PositionImage> c = transform_image_subtitles (sub.subs.image);
650                 copy (c.begin(), c.end(), back_inserter (sub_images));
651
652                 /* Text subtitles (rendered to an image) */
653                 if (!sub.subs.text.empty ()) {
654                         list<PositionImage> s = render_subtitles (sub.subs.text, sub.subs.fonts, _video_container_size, time);
655                         copy (s.begin (), s.end (), back_inserter (sub_images));
656                 }
657
658                 if (!sub_images.empty ()) {
659                         subtitles = merge (sub_images);
660                 }
661         }
662
663         /* Fill gaps */
664
665         if (_last_video_time) {
666                 fill_video (DCPTimePeriod (_last_video_time.get(), time));
667         }
668
669         _last_video.reset (
670                 new PlayerVideo (
671                         video.image,
672                         piece->content->video->crop (),
673                         piece->content->video->fade (video.frame),
674                         piece->content->video->scale().size (
675                                 piece->content->video, _video_container_size, _film->frame_size ()
676                                 ),
677                         _video_container_size,
678                         video.eyes,
679                         video.part,
680                         piece->content->video->colour_conversion ()
681                         )
682                 );
683
684         if (subtitles) {
685                 _last_video->set_subtitle (subtitles.get ());
686         }
687
688         Video (_last_video, time);
689
690         _last_video_time = time + one_video_frame ();
691
692         /* Clear any finished _active_subtitles */
693         ActiveSubtitlesMap updated;
694         for (ActiveSubtitlesMap::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
695                 if (!i->second.to || i->second.to.get() >= time) {
696                         updated[i->first] = i->second;
697                 }
698         }
699         _active_subtitles = updated;
700 }
701
702 void
703 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
704 {
705         shared_ptr<AudioContent> content = piece->content->audio;
706         DCPOMATIC_ASSERT (content);
707
708         shared_ptr<Resampler> r = resampler (content, stream, false);
709         if (!r) {
710                 return;
711         }
712
713         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
714         if (ro.first->frames() == 0) {
715                 return;
716         }
717
718         ContentAudio content_audio;
719         content_audio.audio = ro.first;
720         content_audio.frame = ro.second;
721
722         /* Compute time in the DCP */
723         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
724
725         audio_transform (content, stream, content_audio, time);
726 }
727
728 /** Do our common processing on some audio */
729 void
730 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
731 {
732         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
733
734         /* Gain */
735
736         if (content->gain() != 0) {
737                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
738                 gain->apply_gain (content->gain ());
739                 content_audio.audio = gain;
740         }
741
742         /* Remap */
743
744         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
745         dcp_mapped->make_silent ();
746
747         AudioMapping map = stream->mapping ();
748         for (int i = 0; i < map.input_channels(); ++i) {
749                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
750                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
751                                 dcp_mapped->accumulate_channel (
752                                         content_audio.audio.get(),
753                                         i,
754                                         static_cast<dcp::Channel> (j),
755                                         map.get (i, static_cast<dcp::Channel> (j))
756                                         );
757                         }
758                 }
759         }
760
761         content_audio.audio = dcp_mapped;
762
763         /* Process */
764
765         if (_audio_processor) {
766                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
767         }
768
769         /* Push */
770
771         _audio_merger.push (content_audio.audio, time);
772         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
773         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
774 }
775
776 void
777 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
778 {
779         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
780
781         shared_ptr<Piece> piece = wp.lock ();
782         if (!piece) {
783                 return;
784         }
785
786         shared_ptr<AudioContent> content = piece->content->audio;
787         DCPOMATIC_ASSERT (content);
788
789         /* Resample */
790         if (stream->frame_rate() != content->resampled_frame_rate()) {
791                 shared_ptr<Resampler> r = resampler (content, stream, true);
792                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
793                 if (ro.first->frames() == 0) {
794                         return;
795                 }
796                 content_audio.audio = ro.first;
797                 content_audio.frame = ro.second;
798         }
799
800         /* Compute time in the DCP */
801         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
802         /* And the end of this block in the DCP */
803         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
804
805         /* Remove anything that comes before the start or after the end of the content */
806         if (time < piece->content->position()) {
807                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
808                 if (!cut.first) {
809                         /* This audio is entirely discarded */
810                         return;
811                 }
812                 content_audio.audio = cut.first;
813                 time = cut.second;
814         } else if (time > piece->content->end()) {
815                 /* Discard it all */
816                 return;
817         } else if (end > piece->content->end()) {
818                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
819                 DCPOMATIC_ASSERT (remaining_frames > 0);
820                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
821                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
822                 content_audio.audio = cut;
823         }
824
825         audio_transform (content, stream, content_audio, time);
826 }
827
828 void
829 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
830 {
831         shared_ptr<Piece> piece = wp.lock ();
832         if (!piece) {
833                 return;
834         }
835
836         /* Apply content's subtitle offsets */
837         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
838         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
839
840         /* Apply content's subtitle scale */
841         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
842         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
843
844         /* Apply a corrective translation to keep the subtitle centred after that scale */
845         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
846         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
847
848         PlayerSubtitles ps;
849         ps.image.push_back (subtitle.sub);
850         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
851
852         _active_subtitles[wp] = ActiveSubtitles (ps, from);
853 }
854
855 void
856 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
857 {
858         shared_ptr<Piece> piece = wp.lock ();
859         if (!piece) {
860                 return;
861         }
862
863         PlayerSubtitles ps;
864         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
865
866         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
867                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
868                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
869                 float const xs = piece->content->subtitle->x_scale();
870                 float const ys = piece->content->subtitle->y_scale();
871                 float size = s.size();
872
873                 /* Adjust size to express the common part of the scaling;
874                    e.g. if xs = ys = 0.5 we scale size by 2.
875                 */
876                 if (xs > 1e-5 && ys > 1e-5) {
877                         size *= 1 / min (1 / xs, 1 / ys);
878                 }
879                 s.set_size (size);
880
881                 /* Then express aspect ratio changes */
882                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
883                         s.set_aspect_adjust (xs / ys);
884                 }
885
886                 s.set_in (dcp::Time(from.seconds(), 1000));
887                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
888                 ps.add_fonts (piece->content->subtitle->fonts ());
889         }
890
891         _active_subtitles[wp] = ActiveSubtitles (ps, from);
892 }
893
894 void
895 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
896 {
897         if (_active_subtitles.find (wp) == _active_subtitles.end ()) {
898                 return;
899         }
900
901         shared_ptr<Piece> piece = wp.lock ();
902         if (!piece) {
903                 return;
904         }
905
906         DCPTime const dcp_to = content_time_to_dcp (piece, to);
907
908         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
909                 Subtitle (_active_subtitles[wp].subs, DCPTimePeriod (_active_subtitles[wp].from, dcp_to));
910         }
911
912         _active_subtitles[wp].to = dcp_to;
913 }
914
915 void
916 Player::seek (DCPTime time, bool accurate)
917 {
918         if (_audio_processor) {
919                 _audio_processor->flush ();
920         }
921
922         for (ResamplerMap::iterator i = _resamplers.begin(); i != _resamplers.end(); ++i) {
923                 i->second->flush ();
924                 i->second->reset ();
925         }
926
927         _audio_merger.clear ();
928         _active_subtitles.clear ();
929
930         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
931                 if (time < i->content->position()) {
932                         /* Before; seek to 0 */
933                         i->decoder->seek (ContentTime(), accurate);
934                         i->done = false;
935                 } else if (i->content->position() <= time && time < i->content->end()) {
936                         /* During; seek to position */
937                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
938                         i->done = false;
939                 } else {
940                         /* After; this piece is done */
941                         i->done = true;
942                 }
943         }
944
945         if (accurate) {
946                 _last_video_time = time;
947                 _last_audio_time = time;
948         } else {
949                 _last_video_time = optional<DCPTime> ();
950                 _last_audio_time = optional<DCPTime> ();
951         }
952 }
953
954 shared_ptr<Resampler>
955 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
956 {
957         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
958         if (i != _resamplers.end ()) {
959                 return i->second;
960         }
961
962         if (!create) {
963                 return shared_ptr<Resampler> ();
964         }
965
966         LOG_GENERAL (
967                 "Creating new resampler from %1 to %2 with %3 channels",
968                 stream->frame_rate(),
969                 content->resampled_frame_rate(),
970                 stream->channels()
971                 );
972
973         shared_ptr<Resampler> r (
974                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
975                 );
976
977         _resamplers[make_pair(content, stream)] = r;
978         return r;
979 }
980
981 void
982 Player::fill_video (DCPTimePeriod period)
983 {
984         /* XXX: this may not work for 3D */
985         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
986                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
987                         if (_playlist->video_content_at(j) && _last_video) {
988                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
989                         } else {
990                                 Video (black_player_video_frame(), j);
991                         }
992                 }
993         }
994 }
995
996 void
997 Player::fill_audio (DCPTimePeriod period)
998 {
999         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
1000                 DCPTime t = i.from;
1001                 while (t < i.to) {
1002                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
1003                         Frame const samples = block.frames_round(_film->audio_frame_rate());
1004                         if (samples) {
1005                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
1006                                 silence->make_silent ();
1007                                 Audio (silence, t);
1008                         }
1009                         t += block;
1010                 }
1011         }
1012 }
1013
1014 DCPTime
1015 Player::one_video_frame () const
1016 {
1017         return DCPTime::from_frames (1, _film->video_frame_rate ());
1018 }
1019
1020 pair<shared_ptr<AudioBuffers>, DCPTime>
1021 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
1022 {
1023         DCPTime const discard_time = discard_to - time;
1024         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
1025         Frame remaining_frames = audio->frames() - discard_frames;
1026         if (remaining_frames <= 0) {
1027                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
1028         }
1029         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
1030         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
1031         return make_pair(cut, time + discard_time);
1032 }