Don't seek a piece if the seek is outside its boundary.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
148                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
149                 }
150         }
151
152         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
153                 if (i->content->audio) {
154                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
155                                 _stream_states[j] = StreamState (i, i->content->position ());
156                         }
157                 }
158         }
159
160         if (!_play_referenced) {
161                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
162                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
163                         if (dc) {
164                                 if (dc->reference_video()) {
165                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
166                                 }
167                                 if (dc->reference_audio()) {
168                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
169                                 }
170                         }
171                 }
172         }
173
174         _last_video_time = optional<DCPTime> ();
175         _last_audio_time = optional<DCPTime> ();
176         _have_valid_pieces = true;
177 }
178
179 void
180 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
181 {
182         shared_ptr<Content> c = w.lock ();
183         if (!c) {
184                 return;
185         }
186
187         if (
188                 property == ContentProperty::POSITION ||
189                 property == ContentProperty::LENGTH ||
190                 property == ContentProperty::TRIM_START ||
191                 property == ContentProperty::TRIM_END ||
192                 property == ContentProperty::PATH ||
193                 property == VideoContentProperty::FRAME_TYPE ||
194                 property == DCPContentProperty::NEEDS_ASSETS ||
195                 property == DCPContentProperty::NEEDS_KDM ||
196                 property == SubtitleContentProperty::COLOUR ||
197                 property == SubtitleContentProperty::OUTLINE ||
198                 property == SubtitleContentProperty::SHADOW ||
199                 property == SubtitleContentProperty::EFFECT_COLOUR ||
200                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
201                 property == VideoContentProperty::COLOUR_CONVERSION
202                 ) {
203
204                 _have_valid_pieces = false;
205                 Changed (frequent);
206
207         } else if (
208                 property == SubtitleContentProperty::LINE_SPACING ||
209                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
210                 property == SubtitleContentProperty::Y_SCALE ||
211                 property == SubtitleContentProperty::FADE_IN ||
212                 property == SubtitleContentProperty::FADE_OUT ||
213                 property == ContentProperty::VIDEO_FRAME_RATE ||
214                 property == SubtitleContentProperty::USE ||
215                 property == SubtitleContentProperty::X_OFFSET ||
216                 property == SubtitleContentProperty::Y_OFFSET ||
217                 property == SubtitleContentProperty::X_SCALE ||
218                 property == SubtitleContentProperty::FONTS ||
219                 property == VideoContentProperty::CROP ||
220                 property == VideoContentProperty::SCALE ||
221                 property == VideoContentProperty::FADE_IN ||
222                 property == VideoContentProperty::FADE_OUT
223                 ) {
224
225                 Changed (frequent);
226         }
227 }
228
229 void
230 Player::set_video_container_size (dcp::Size s)
231 {
232         if (s == _video_container_size) {
233                 return;
234         }
235
236         _video_container_size = s;
237
238         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
239         _black_image->make_black ();
240
241         Changed (false);
242 }
243
244 void
245 Player::playlist_changed ()
246 {
247         _have_valid_pieces = false;
248         Changed (false);
249 }
250
251 void
252 Player::film_changed (Film::Property p)
253 {
254         /* Here we should notice Film properties that affect our output, and
255            alert listeners that our output now would be different to how it was
256            last time we were run.
257         */
258
259         if (p == Film::CONTAINER) {
260                 Changed (false);
261         } else if (p == Film::VIDEO_FRAME_RATE) {
262                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
263                    so we need new pieces here.
264                 */
265                 _have_valid_pieces = false;
266                 Changed (false);
267         } else if (p == Film::AUDIO_PROCESSOR) {
268                 if (_film->audio_processor ()) {
269                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
270                 }
271         }
272 }
273
274 list<PositionImage>
275 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
276 {
277         list<PositionImage> all;
278
279         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
280                 if (!i->image) {
281                         continue;
282                 }
283
284                 /* We will scale the subtitle up to fit _video_container_size */
285                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
286
287                 /* Then we need a corrective translation, consisting of two parts:
288                  *
289                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
290                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
291                  *
292                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
293                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
294                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
295                  *
296                  * Combining these two translations gives these expressions.
297                  */
298
299                 all.push_back (
300                         PositionImage (
301                                 i->image->scale (
302                                         scaled_size,
303                                         dcp::YUV_TO_RGB_REC601,
304                                         i->image->pixel_format (),
305                                         true,
306                                         _fast
307                                         ),
308                                 Position<int> (
309                                         lrint (_video_container_size.width * i->rectangle.x),
310                                         lrint (_video_container_size.height * i->rectangle.y)
311                                         )
312                                 )
313                         );
314         }
315
316         return all;
317 }
318
319 shared_ptr<PlayerVideo>
320 Player::black_player_video_frame () const
321 {
322         return shared_ptr<PlayerVideo> (
323                 new PlayerVideo (
324                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
325                         Crop (),
326                         optional<double> (),
327                         _video_container_size,
328                         _video_container_size,
329                         EYES_BOTH,
330                         PART_WHOLE,
331                         PresetColourConversion::all().front().conversion
332                 )
333         );
334 }
335
336 Frame
337 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
338 {
339         DCPTime s = t - piece->content->position ();
340         s = min (piece->content->length_after_trim(), s);
341         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
342
343         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
344            then convert that ContentTime to frames at the content's rate.  However this fails for
345            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
346            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
347
348            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
349         */
350         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
351 }
352
353 DCPTime
354 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
355 {
356         /* See comment in dcp_to_content_video */
357         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
358         return max (DCPTime (), d + piece->content->position ());
359 }
360
361 Frame
362 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
363 {
364         DCPTime s = t - piece->content->position ();
365         s = min (piece->content->length_after_trim(), s);
366         /* See notes in dcp_to_content_video */
367         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
368 }
369
370 DCPTime
371 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
372 {
373         /* See comment in dcp_to_content_video */
374         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
375         return max (DCPTime (), d + piece->content->position ());
376 }
377
378 ContentTime
379 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
380 {
381         DCPTime s = t - piece->content->position ();
382         s = min (piece->content->length_after_trim(), s);
383         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
384 }
385
386 DCPTime
387 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
388 {
389         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
390 }
391
392 list<shared_ptr<Font> >
393 Player::get_subtitle_fonts ()
394 {
395         if (!_have_valid_pieces) {
396                 setup_pieces ();
397         }
398
399         list<shared_ptr<Font> > fonts;
400         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
401                 if (p->content->subtitle) {
402                         /* XXX: things may go wrong if there are duplicate font IDs
403                            with different font files.
404                         */
405                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
406                         copy (f.begin(), f.end(), back_inserter (fonts));
407                 }
408         }
409
410         return fonts;
411 }
412
413 /** Set this player never to produce any video data */
414 void
415 Player::set_ignore_video ()
416 {
417         _ignore_video = true;
418 }
419
420 /** Set whether or not this player should always burn text subtitles into the image,
421  *  regardless of the content settings.
422  *  @param burn true to always burn subtitles, false to obey content settings.
423  */
424 void
425 Player::set_always_burn_subtitles (bool burn)
426 {
427         _always_burn_subtitles = burn;
428 }
429
430 void
431 Player::set_fast ()
432 {
433         _fast = true;
434         _have_valid_pieces = false;
435 }
436
437 void
438 Player::set_play_referenced ()
439 {
440         _play_referenced = true;
441         _have_valid_pieces = false;
442 }
443
444 list<ReferencedReelAsset>
445 Player::get_reel_assets ()
446 {
447         list<ReferencedReelAsset> a;
448
449         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
450                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
451                 if (!j) {
452                         continue;
453                 }
454
455                 scoped_ptr<DCPDecoder> decoder;
456                 try {
457                         decoder.reset (new DCPDecoder (j, _film->log()));
458                 } catch (...) {
459                         return a;
460                 }
461
462                 int64_t offset = 0;
463                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
464
465                         DCPOMATIC_ASSERT (j->video_frame_rate ());
466                         double const cfr = j->video_frame_rate().get();
467                         Frame const trim_start = j->trim_start().frames_round (cfr);
468                         Frame const trim_end = j->trim_end().frames_round (cfr);
469                         int const ffr = _film->video_frame_rate ();
470
471                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
472                         if (j->reference_video ()) {
473                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
474                                 DCPOMATIC_ASSERT (ra);
475                                 ra->set_entry_point (ra->entry_point() + trim_start);
476                                 ra->set_duration (ra->duration() - trim_start - trim_end);
477                                 a.push_back (
478                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
479                                         );
480                         }
481
482                         if (j->reference_audio ()) {
483                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
484                                 DCPOMATIC_ASSERT (ra);
485                                 ra->set_entry_point (ra->entry_point() + trim_start);
486                                 ra->set_duration (ra->duration() - trim_start - trim_end);
487                                 a.push_back (
488                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
489                                         );
490                         }
491
492                         if (j->reference_subtitle ()) {
493                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
494                                 DCPOMATIC_ASSERT (ra);
495                                 ra->set_entry_point (ra->entry_point() + trim_start);
496                                 ra->set_duration (ra->duration() - trim_start - trim_end);
497                                 a.push_back (
498                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
499                                         );
500                         }
501
502                         /* Assume that main picture duration is the length of the reel */
503                         offset += k->main_picture()->duration ();
504                 }
505         }
506
507         return a;
508 }
509
510 list<shared_ptr<Piece> >
511 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
512 {
513         if (!_have_valid_pieces) {
514                 setup_pieces ();
515         }
516
517         list<shared_ptr<Piece> > overlaps;
518         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
519                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
520                         overlaps.push_back (i);
521                 }
522         }
523
524         return overlaps;
525 }
526
527 bool
528 Player::pass ()
529 {
530         if (!_have_valid_pieces) {
531                 setup_pieces ();
532         }
533
534         shared_ptr<Piece> earliest;
535         DCPTime earliest_content;
536
537         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
538                 if (!i->done) {
539                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
540                         if (!earliest || t < earliest_content) {
541                                 earliest_content = t;
542                                 earliest = i;
543                         }
544                 }
545         }
546
547         if (!earliest) {
548                 /* No more content; fill up with silent black */
549                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
550                 if (_last_video_time) {
551                         remaining_video.from = _last_video_time.get();
552                 }
553                 fill_video (remaining_video);
554                 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
555                 if (_last_audio_time) {
556                         remaining_audio.from = _last_audio_time.get();
557                 }
558                 fill_audio (remaining_audio);
559                 return true;
560         }
561
562         earliest->done = earliest->decoder->pass ();
563         if (earliest->done && earliest->content->audio) {
564                 /* Flush the Player audio system for this piece */
565                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
566                         audio_flush (earliest, i);
567                 }
568         }
569
570         /* Emit any audio that is ready */
571
572         DCPTime pull_from = _playlist->length ();
573         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
574                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
575                         pull_from = i->second.last_push_end;
576                 }
577         }
578
579         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
580         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
581                 if (_last_audio_time && i->second < _last_audio_time.get()) {
582                         /* There has been an accurate seek and we have received some audio before the seek time;
583                            discard it.
584                         */
585                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
586                         if (!cut.first) {
587                                 continue;
588                         }
589                         *i = cut;
590                 }
591
592                 if (_last_audio_time) {
593                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
594                 }
595
596                 Audio (i->first, i->second);
597                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
598         }
599
600         return false;
601 }
602
603 void
604 Player::video (weak_ptr<Piece> wp, ContentVideo video)
605 {
606         shared_ptr<Piece> piece = wp.lock ();
607         if (!piece) {
608                 return;
609         }
610
611         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
612         if (frc.skip && (video.frame % 2) == 1) {
613                 return;
614         }
615
616         /* Time and period of the frame we will emit */
617         DCPTime const time = content_video_to_dcp (piece, video.frame);
618         DCPTimePeriod const period (time, time + one_video_frame());
619
620         /* Discard if it's outside the content's period or if it's before the last accurate seek */
621         if (time < piece->content->position() || time >= piece->content->end() || (_last_video_time && time < _last_video_time)) {
622                 return;
623         }
624
625         /* Get any subtitles */
626
627         optional<PositionImage> subtitles;
628
629         for (ActiveSubtitles::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
630
631                 shared_ptr<Piece> sub_piece = i->first.lock ();
632                 if (!sub_piece) {
633                         continue;
634                 }
635
636                 if (!sub_piece->content->subtitle->use() || (!_always_burn_subtitles && !piece->content->subtitle->burn())) {
637                         continue;
638                 }
639
640                 pair<PlayerSubtitles, DCPTime> sub = i->second;
641
642                 list<PositionImage> sub_images;
643
644                 /* Image subtitles */
645                 list<PositionImage> c = transform_image_subtitles (sub.first.image);
646                 copy (c.begin(), c.end(), back_inserter (sub_images));
647
648                 /* Text subtitles (rendered to an image) */
649                 if (!sub.first.text.empty ()) {
650                         list<PositionImage> s = render_subtitles (sub.first.text, sub.first.fonts, _video_container_size, time);
651                         copy (s.begin (), s.end (), back_inserter (sub_images));
652                 }
653
654                 if (!sub_images.empty ()) {
655                         subtitles = merge (sub_images);
656                 }
657         }
658
659         /* Fill gaps */
660
661         if (_last_video_time) {
662                 fill_video (DCPTimePeriod (_last_video_time.get(), time));
663         }
664
665         _last_video.reset (
666                 new PlayerVideo (
667                         video.image,
668                         piece->content->video->crop (),
669                         piece->content->video->fade (video.frame),
670                         piece->content->video->scale().size (
671                                 piece->content->video, _video_container_size, _film->frame_size ()
672                                 ),
673                         _video_container_size,
674                         video.eyes,
675                         video.part,
676                         piece->content->video->colour_conversion ()
677                         )
678                 );
679
680         if (subtitles) {
681                 _last_video->set_subtitle (subtitles.get ());
682         }
683
684         Video (_last_video, time);
685
686         _last_video_time = time + one_video_frame ();
687 }
688
689 void
690 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
691 {
692         shared_ptr<AudioContent> content = piece->content->audio;
693         DCPOMATIC_ASSERT (content);
694
695         shared_ptr<Resampler> r = resampler (content, stream, false);
696         if (!r) {
697                 return;
698         }
699
700         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
701         if (ro.first->frames() == 0) {
702                 return;
703         }
704
705         ContentAudio content_audio;
706         content_audio.audio = ro.first;
707         content_audio.frame = ro.second;
708
709         /* Compute time in the DCP */
710         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
711
712         audio_transform (content, stream, content_audio, time);
713 }
714
715 /** Do our common processing on some audio */
716 void
717 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
718 {
719         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
720
721         /* Gain */
722
723         if (content->gain() != 0) {
724                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
725                 gain->apply_gain (content->gain ());
726                 content_audio.audio = gain;
727         }
728
729         /* Remap */
730
731         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
732         dcp_mapped->make_silent ();
733
734         AudioMapping map = stream->mapping ();
735         for (int i = 0; i < map.input_channels(); ++i) {
736                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
737                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
738                                 dcp_mapped->accumulate_channel (
739                                         content_audio.audio.get(),
740                                         i,
741                                         static_cast<dcp::Channel> (j),
742                                         map.get (i, static_cast<dcp::Channel> (j))
743                                         );
744                         }
745                 }
746         }
747
748         content_audio.audio = dcp_mapped;
749
750         /* Process */
751
752         if (_audio_processor) {
753                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
754         }
755
756         /* Push */
757
758         _audio_merger.push (content_audio.audio, time);
759         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
760         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
761 }
762
763 void
764 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
765 {
766         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
767
768         shared_ptr<Piece> piece = wp.lock ();
769         if (!piece) {
770                 return;
771         }
772
773         shared_ptr<AudioContent> content = piece->content->audio;
774         DCPOMATIC_ASSERT (content);
775
776         /* Resample */
777         if (stream->frame_rate() != content->resampled_frame_rate()) {
778                 shared_ptr<Resampler> r = resampler (content, stream, true);
779                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
780                 if (ro.first->frames() == 0) {
781                         return;
782                 }
783                 content_audio.audio = ro.first;
784                 content_audio.frame = ro.second;
785         }
786
787         /* Compute time in the DCP */
788         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
789         /* And the end of this block in the DCP */
790         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
791
792         /* Remove anything that comes before the start or after the end of the content */
793         if (time < piece->content->position()) {
794                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
795                 if (!cut.first) {
796                         /* This audio is entirely discarded */
797                         return;
798                 }
799                 content_audio.audio = cut.first;
800                 time = cut.second;
801         } else if (time > piece->content->end()) {
802                 /* Discard it all */
803                 return;
804         } else if (end > piece->content->end()) {
805                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
806                 DCPOMATIC_ASSERT (remaining_frames > 0);
807                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
808                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
809                 content_audio.audio = cut;
810         }
811
812         audio_transform (content, stream, content_audio, time);
813 }
814
815 void
816 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
817 {
818         shared_ptr<Piece> piece = wp.lock ();
819         if (!piece) {
820                 return;
821         }
822
823         /* Apply content's subtitle offsets */
824         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
825         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
826
827         /* Apply content's subtitle scale */
828         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
829         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
830
831         /* Apply a corrective translation to keep the subtitle centred after that scale */
832         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
833         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
834
835         PlayerSubtitles ps;
836         ps.image.push_back (subtitle.sub);
837         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
838
839         _active_subtitles[wp] = make_pair (ps, from);
840 }
841
842 void
843 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
844 {
845         shared_ptr<Piece> piece = wp.lock ();
846         if (!piece) {
847                 return;
848         }
849
850         PlayerSubtitles ps;
851         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
852
853         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
854                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
855                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
856                 float const xs = piece->content->subtitle->x_scale();
857                 float const ys = piece->content->subtitle->y_scale();
858                 float size = s.size();
859
860                 /* Adjust size to express the common part of the scaling;
861                    e.g. if xs = ys = 0.5 we scale size by 2.
862                 */
863                 if (xs > 1e-5 && ys > 1e-5) {
864                         size *= 1 / min (1 / xs, 1 / ys);
865                 }
866                 s.set_size (size);
867
868                 /* Then express aspect ratio changes */
869                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
870                         s.set_aspect_adjust (xs / ys);
871                 }
872
873                 s.set_in (dcp::Time(from.seconds(), 1000));
874                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
875                 ps.add_fonts (piece->content->subtitle->fonts ());
876         }
877
878         _active_subtitles[wp] = make_pair (ps, from);
879 }
880
881 void
882 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
883 {
884         if (_active_subtitles.find (wp) == _active_subtitles.end ()) {
885                 return;
886         }
887
888         shared_ptr<Piece> piece = wp.lock ();
889         if (!piece) {
890                 return;
891         }
892
893         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
894                 Subtitle (_active_subtitles[wp].first, DCPTimePeriod (_active_subtitles[wp].second, content_time_to_dcp (piece, to)));
895         }
896
897         _active_subtitles.erase (wp);
898 }
899
900 void
901 Player::seek (DCPTime time, bool accurate)
902 {
903         if (_audio_processor) {
904                 _audio_processor->flush ();
905         }
906
907         for (ResamplerMap::iterator i = _resamplers.begin(); i != _resamplers.end(); ++i) {
908                 i->second->flush ();
909                 i->second->reset ();
910         }
911
912         _audio_merger.clear ();
913         _active_subtitles.clear ();
914
915         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
916                 if (time < i->content->position()) {
917                         /* Before; seek to 0 */
918                         i->decoder->seek (ContentTime(), accurate);
919                         i->done = false;
920                 } else if (i->content->position() <= time && time < i->content->end()) {
921                         /* During; seek to position */
922                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
923                         i->done = false;
924                 } else {
925                         /* After; this piece is done */
926                         i->done = true;
927                 }
928         }
929
930         if (accurate) {
931                 _last_video_time = time;
932                 _last_audio_time = time;
933         } else {
934                 _last_video_time = optional<DCPTime> ();
935                 _last_audio_time = optional<DCPTime> ();
936         }
937 }
938
939 shared_ptr<Resampler>
940 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
941 {
942         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
943         if (i != _resamplers.end ()) {
944                 return i->second;
945         }
946
947         if (!create) {
948                 return shared_ptr<Resampler> ();
949         }
950
951         LOG_GENERAL (
952                 "Creating new resampler from %1 to %2 with %3 channels",
953                 stream->frame_rate(),
954                 content->resampled_frame_rate(),
955                 stream->channels()
956                 );
957
958         shared_ptr<Resampler> r (
959                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
960                 );
961
962         _resamplers[make_pair(content, stream)] = r;
963         return r;
964 }
965
966 void
967 Player::fill_video (DCPTimePeriod period)
968 {
969         /* XXX: this may not work for 3D */
970         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
971                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
972                         if (_playlist->video_content_at(j) && _last_video) {
973                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
974                         } else {
975                                 Video (black_player_video_frame(), j);
976                         }
977                 }
978         }
979 }
980
981 void
982 Player::fill_audio (DCPTimePeriod period)
983 {
984         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
985                 DCPTime t = i.from;
986                 while (t < i.to) {
987                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
988                         Frame const samples = block.frames_round(_film->audio_frame_rate());
989                         if (samples) {
990                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
991                                 silence->make_silent ();
992                                 Audio (silence, t);
993                         }
994                         t += block;
995                 }
996         }
997 }
998
999 DCPTime
1000 Player::one_video_frame () const
1001 {
1002         return DCPTime::from_frames (1, _film->video_frame_rate ());
1003 }
1004
1005 pair<shared_ptr<AudioBuffers>, DCPTime>
1006 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
1007 {
1008         DCPTime const discard_time = discard_to - time;
1009         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
1010         Frame remaining_frames = audio->frames() - discard_frames;
1011         if (remaining_frames <= 0) {
1012                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
1013         }
1014         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
1015         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
1016         return make_pair(cut, time + discard_time);
1017 }