Fix fill of timeline periods where there is no video.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
148                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
149                 }
150         }
151
152         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
153                 if (i->content->audio) {
154                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
155                                 _stream_states[j] = StreamState (i, i->content->position ());
156                         }
157                 }
158         }
159
160         if (!_play_referenced) {
161                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
162                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
163                         if (dc) {
164                                 if (dc->reference_video()) {
165                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
166                                 }
167                                 if (dc->reference_audio()) {
168                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
169                                 }
170                         }
171                 }
172         }
173
174         _last_video_time = optional<DCPTime> ();
175         _last_audio_time = optional<DCPTime> ();
176         _have_valid_pieces = true;
177 }
178
179 void
180 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
181 {
182         shared_ptr<Content> c = w.lock ();
183         if (!c) {
184                 return;
185         }
186
187         if (
188                 property == ContentProperty::POSITION ||
189                 property == ContentProperty::LENGTH ||
190                 property == ContentProperty::TRIM_START ||
191                 property == ContentProperty::TRIM_END ||
192                 property == ContentProperty::PATH ||
193                 property == VideoContentProperty::FRAME_TYPE ||
194                 property == DCPContentProperty::NEEDS_ASSETS ||
195                 property == DCPContentProperty::NEEDS_KDM ||
196                 property == SubtitleContentProperty::COLOUR ||
197                 property == SubtitleContentProperty::OUTLINE ||
198                 property == SubtitleContentProperty::SHADOW ||
199                 property == SubtitleContentProperty::EFFECT_COLOUR ||
200                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
201                 property == VideoContentProperty::COLOUR_CONVERSION
202                 ) {
203
204                 _have_valid_pieces = false;
205                 Changed (frequent);
206
207         } else if (
208                 property == SubtitleContentProperty::LINE_SPACING ||
209                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
210                 property == SubtitleContentProperty::Y_SCALE ||
211                 property == SubtitleContentProperty::FADE_IN ||
212                 property == SubtitleContentProperty::FADE_OUT ||
213                 property == ContentProperty::VIDEO_FRAME_RATE ||
214                 property == SubtitleContentProperty::USE ||
215                 property == SubtitleContentProperty::X_OFFSET ||
216                 property == SubtitleContentProperty::Y_OFFSET ||
217                 property == SubtitleContentProperty::X_SCALE ||
218                 property == SubtitleContentProperty::FONTS ||
219                 property == VideoContentProperty::CROP ||
220                 property == VideoContentProperty::SCALE ||
221                 property == VideoContentProperty::FADE_IN ||
222                 property == VideoContentProperty::FADE_OUT
223                 ) {
224
225                 Changed (frequent);
226         }
227 }
228
229 void
230 Player::set_video_container_size (dcp::Size s)
231 {
232         if (s == _video_container_size) {
233                 return;
234         }
235
236         _video_container_size = s;
237
238         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
239         _black_image->make_black ();
240
241         Changed (false);
242 }
243
244 void
245 Player::playlist_changed ()
246 {
247         _have_valid_pieces = false;
248         Changed (false);
249 }
250
251 void
252 Player::film_changed (Film::Property p)
253 {
254         /* Here we should notice Film properties that affect our output, and
255            alert listeners that our output now would be different to how it was
256            last time we were run.
257         */
258
259         if (p == Film::CONTAINER) {
260                 Changed (false);
261         } else if (p == Film::VIDEO_FRAME_RATE) {
262                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
263                    so we need new pieces here.
264                 */
265                 _have_valid_pieces = false;
266                 Changed (false);
267         } else if (p == Film::AUDIO_PROCESSOR) {
268                 if (_film->audio_processor ()) {
269                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
270                 }
271         }
272 }
273
274 list<PositionImage>
275 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
276 {
277         list<PositionImage> all;
278
279         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
280                 if (!i->image) {
281                         continue;
282                 }
283
284                 /* We will scale the subtitle up to fit _video_container_size */
285                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
286
287                 /* Then we need a corrective translation, consisting of two parts:
288                  *
289                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
290                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
291                  *
292                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
293                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
294                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
295                  *
296                  * Combining these two translations gives these expressions.
297                  */
298
299                 all.push_back (
300                         PositionImage (
301                                 i->image->scale (
302                                         scaled_size,
303                                         dcp::YUV_TO_RGB_REC601,
304                                         i->image->pixel_format (),
305                                         true,
306                                         _fast
307                                         ),
308                                 Position<int> (
309                                         lrint (_video_container_size.width * i->rectangle.x),
310                                         lrint (_video_container_size.height * i->rectangle.y)
311                                         )
312                                 )
313                         );
314         }
315
316         return all;
317 }
318
319 shared_ptr<PlayerVideo>
320 Player::black_player_video_frame () const
321 {
322         return shared_ptr<PlayerVideo> (
323                 new PlayerVideo (
324                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
325                         Crop (),
326                         optional<double> (),
327                         _video_container_size,
328                         _video_container_size,
329                         EYES_BOTH,
330                         PART_WHOLE,
331                         PresetColourConversion::all().front().conversion
332                 )
333         );
334 }
335
336 Frame
337 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
338 {
339         DCPTime s = t - piece->content->position ();
340         s = min (piece->content->length_after_trim(), s);
341         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
342
343         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
344            then convert that ContentTime to frames at the content's rate.  However this fails for
345            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
346            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
347
348            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
349         */
350         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
351 }
352
353 DCPTime
354 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
355 {
356         /* See comment in dcp_to_content_video */
357         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
358         return max (DCPTime (), d + piece->content->position ());
359 }
360
361 Frame
362 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
363 {
364         DCPTime s = t - piece->content->position ();
365         s = min (piece->content->length_after_trim(), s);
366         /* See notes in dcp_to_content_video */
367         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
368 }
369
370 DCPTime
371 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
372 {
373         /* See comment in dcp_to_content_video */
374         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
375         return max (DCPTime (), d + piece->content->position ());
376 }
377
378 ContentTime
379 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
380 {
381         DCPTime s = t - piece->content->position ();
382         s = min (piece->content->length_after_trim(), s);
383         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
384 }
385
386 DCPTime
387 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
388 {
389         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
390 }
391
392 list<shared_ptr<Font> >
393 Player::get_subtitle_fonts ()
394 {
395         if (!_have_valid_pieces) {
396                 setup_pieces ();
397         }
398
399         list<shared_ptr<Font> > fonts;
400         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
401                 if (p->content->subtitle) {
402                         /* XXX: things may go wrong if there are duplicate font IDs
403                            with different font files.
404                         */
405                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
406                         copy (f.begin(), f.end(), back_inserter (fonts));
407                 }
408         }
409
410         return fonts;
411 }
412
413 /** Set this player never to produce any video data */
414 void
415 Player::set_ignore_video ()
416 {
417         _ignore_video = true;
418 }
419
420 /** Set whether or not this player should always burn text subtitles into the image,
421  *  regardless of the content settings.
422  *  @param burn true to always burn subtitles, false to obey content settings.
423  */
424 void
425 Player::set_always_burn_subtitles (bool burn)
426 {
427         _always_burn_subtitles = burn;
428 }
429
430 void
431 Player::set_fast ()
432 {
433         _fast = true;
434         _have_valid_pieces = false;
435 }
436
437 void
438 Player::set_play_referenced ()
439 {
440         _play_referenced = true;
441         _have_valid_pieces = false;
442 }
443
444 list<ReferencedReelAsset>
445 Player::get_reel_assets ()
446 {
447         list<ReferencedReelAsset> a;
448
449         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
450                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
451                 if (!j) {
452                         continue;
453                 }
454
455                 scoped_ptr<DCPDecoder> decoder;
456                 try {
457                         decoder.reset (new DCPDecoder (j, _film->log()));
458                 } catch (...) {
459                         return a;
460                 }
461
462                 int64_t offset = 0;
463                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
464
465                         DCPOMATIC_ASSERT (j->video_frame_rate ());
466                         double const cfr = j->video_frame_rate().get();
467                         Frame const trim_start = j->trim_start().frames_round (cfr);
468                         Frame const trim_end = j->trim_end().frames_round (cfr);
469                         int const ffr = _film->video_frame_rate ();
470
471                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
472                         if (j->reference_video ()) {
473                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
474                                 DCPOMATIC_ASSERT (ra);
475                                 ra->set_entry_point (ra->entry_point() + trim_start);
476                                 ra->set_duration (ra->duration() - trim_start - trim_end);
477                                 a.push_back (
478                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
479                                         );
480                         }
481
482                         if (j->reference_audio ()) {
483                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
484                                 DCPOMATIC_ASSERT (ra);
485                                 ra->set_entry_point (ra->entry_point() + trim_start);
486                                 ra->set_duration (ra->duration() - trim_start - trim_end);
487                                 a.push_back (
488                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
489                                         );
490                         }
491
492                         if (j->reference_subtitle ()) {
493                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
494                                 DCPOMATIC_ASSERT (ra);
495                                 ra->set_entry_point (ra->entry_point() + trim_start);
496                                 ra->set_duration (ra->duration() - trim_start - trim_end);
497                                 a.push_back (
498                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
499                                         );
500                         }
501
502                         /* Assume that main picture duration is the length of the reel */
503                         offset += k->main_picture()->duration ();
504                 }
505         }
506
507         return a;
508 }
509
510 bool
511 Player::pass ()
512 {
513         if (!_have_valid_pieces) {
514                 setup_pieces ();
515         }
516
517         shared_ptr<Piece> earliest;
518         DCPTime earliest_content;
519
520         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
521                 if (!i->done) {
522                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
523                         if (!earliest || t < earliest_content) {
524                                 earliest_content = t;
525                                 earliest = i;
526                         }
527                 }
528         }
529
530         if (earliest) {
531                 earliest->done = earliest->decoder->pass ();
532                 if (earliest->done && earliest->content->audio) {
533                         /* Flush the Player audio system for this piece */
534                         BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
535                                 audio_flush (earliest, i);
536                         }
537                 }
538         }
539
540         if (_last_video_time) {
541                 fill_video (DCPTimePeriod (_last_video_time.get(), earliest ? earliest_content : _playlist->length()));
542         } else if (_last_seek_time) {
543                 fill_video (DCPTimePeriod (_last_seek_time.get(), _last_seek_time.get() + one_video_frame ()));
544         }
545
546         /* XXX: fill audio */
547
548         if (!earliest) {
549                 return true;
550         }
551
552         /* Emit any audio that is ready */
553
554         DCPTime pull_from = _playlist->length ();
555         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
556                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
557                         pull_from = i->second.last_push_end;
558                 }
559         }
560
561         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
562         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
563                 if (_last_audio_time && i->second < _last_audio_time.get()) {
564                         /* There has been an accurate seek and we have received some audio before the seek time;
565                            discard it.
566                         */
567                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
568                         if (!cut.first) {
569                                 continue;
570                         }
571                         *i = cut;
572                 }
573
574                 if (_last_audio_time) {
575                         /* XXX: does this remain necessary? */
576                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
577                 }
578
579                 Audio (i->first, i->second);
580                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
581         }
582
583         return false;
584 }
585
586 optional<PositionImage>
587 Player::subtitles_for_frame (DCPTime time) const
588 {
589         /* Get any subtitles */
590
591         optional<PositionImage> subtitles;
592
593         for (ActiveSubtitlesMap::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
594
595                 shared_ptr<Piece> sub_piece = i->first.lock ();
596                 if (!sub_piece) {
597                         continue;
598                 }
599
600                 if (!sub_piece->content->subtitle->use() || (!_always_burn_subtitles && !sub_piece->content->subtitle->burn())) {
601                         continue;
602                 }
603
604                 BOOST_FOREACH (ActiveSubtitles j, i->second) {
605
606                         if (j.from > time || (j.to && j.to.get() <= time)) {
607                                 continue;
608                         }
609
610                         list<PositionImage> sub_images;
611
612                         /* Image subtitles */
613                         list<PositionImage> c = transform_image_subtitles (j.subs.image);
614                         copy (c.begin(), c.end(), back_inserter (sub_images));
615
616                         /* Text subtitles (rendered to an image) */
617                         if (!j.subs.text.empty ()) {
618                                 list<PositionImage> s = render_subtitles (j.subs.text, j.subs.fonts, _video_container_size, time);
619                                 copy (s.begin (), s.end (), back_inserter (sub_images));
620                         }
621
622                         if (!sub_images.empty ()) {
623                                 subtitles = merge (sub_images);
624                         }
625                 }
626         }
627
628         return subtitles;
629 }
630
631 void
632 Player::video (weak_ptr<Piece> wp, ContentVideo video)
633 {
634         shared_ptr<Piece> piece = wp.lock ();
635         if (!piece) {
636                 return;
637         }
638
639         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
640         if (frc.skip && (video.frame % 2) == 1) {
641                 return;
642         }
643
644         /* Time and period of the frame we will emit */
645         DCPTime const time = content_video_to_dcp (piece, video.frame);
646         DCPTimePeriod const period (time, time + one_video_frame());
647
648         /* Discard if it's outside the content's period or if it's before the last accurate seek */
649         if (time < piece->content->position() || time >= piece->content->end() || (_last_video_time && time < _last_video_time)) {
650                 return;
651         }
652
653         /* Fill gaps caused by (the hopefully rare event of) a decoder not emitting contiguous video */
654
655         /* XXX: is this necessary? can it be done by the fill in pass? */
656         if (_last_video_time) {
657                 fill_video (DCPTimePeriod (_last_video_time.get(), time));
658         }
659
660         _last_video.reset (
661                 new PlayerVideo (
662                         video.image,
663                         piece->content->video->crop (),
664                         piece->content->video->fade (video.frame),
665                         piece->content->video->scale().size (
666                                 piece->content->video, _video_container_size, _film->frame_size ()
667                                 ),
668                         _video_container_size,
669                         video.eyes,
670                         video.part,
671                         piece->content->video->colour_conversion ()
672                         )
673                 );
674
675         optional<PositionImage> subtitles = subtitles_for_frame (time);
676         if (subtitles) {
677                 _last_video->set_subtitle (subtitles.get ());
678         }
679
680         Video (_last_video, time);
681
682         _last_video_time = time + one_video_frame ();
683
684         /* Clear any finished _active_subtitles */
685         ActiveSubtitlesMap updated;
686         for (ActiveSubtitlesMap::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
687                 list<ActiveSubtitles> as;
688                 BOOST_FOREACH (ActiveSubtitles j, i->second) {
689                         if (!j.to || j.to.get() >= time) {
690                                 as.push_back (j);
691                         }
692                 }
693                 if (!as.empty ()) {
694                         updated[i->first] = as;
695                 }
696         }
697         _active_subtitles = updated;
698 }
699
700 void
701 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
702 {
703         shared_ptr<AudioContent> content = piece->content->audio;
704         DCPOMATIC_ASSERT (content);
705
706         shared_ptr<Resampler> r = resampler (content, stream, false);
707         if (!r) {
708                 return;
709         }
710
711         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
712         if (ro.first->frames() == 0) {
713                 return;
714         }
715
716         ContentAudio content_audio;
717         content_audio.audio = ro.first;
718         content_audio.frame = ro.second;
719
720         /* Compute time in the DCP */
721         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
722
723         audio_transform (content, stream, content_audio, time);
724 }
725
726 /** Do our common processing on some audio */
727 void
728 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
729 {
730         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
731
732         /* Gain */
733
734         if (content->gain() != 0) {
735                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
736                 gain->apply_gain (content->gain ());
737                 content_audio.audio = gain;
738         }
739
740         /* Remap */
741
742         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
743         dcp_mapped->make_silent ();
744
745         AudioMapping map = stream->mapping ();
746         for (int i = 0; i < map.input_channels(); ++i) {
747                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
748                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
749                                 dcp_mapped->accumulate_channel (
750                                         content_audio.audio.get(),
751                                         i,
752                                         static_cast<dcp::Channel> (j),
753                                         map.get (i, static_cast<dcp::Channel> (j))
754                                         );
755                         }
756                 }
757         }
758
759         content_audio.audio = dcp_mapped;
760
761         /* Process */
762
763         if (_audio_processor) {
764                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
765         }
766
767         /* Push */
768
769         _audio_merger.push (content_audio.audio, time);
770         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
771         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
772 }
773
774 void
775 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
776 {
777         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
778
779         shared_ptr<Piece> piece = wp.lock ();
780         if (!piece) {
781                 return;
782         }
783
784         shared_ptr<AudioContent> content = piece->content->audio;
785         DCPOMATIC_ASSERT (content);
786
787         /* Resample */
788         if (stream->frame_rate() != content->resampled_frame_rate()) {
789                 shared_ptr<Resampler> r = resampler (content, stream, true);
790                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
791                 if (ro.first->frames() == 0) {
792                         return;
793                 }
794                 content_audio.audio = ro.first;
795                 content_audio.frame = ro.second;
796         }
797
798         /* Compute time in the DCP */
799         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
800         /* And the end of this block in the DCP */
801         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
802
803         /* Remove anything that comes before the start or after the end of the content */
804         if (time < piece->content->position()) {
805                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
806                 if (!cut.first) {
807                         /* This audio is entirely discarded */
808                         return;
809                 }
810                 content_audio.audio = cut.first;
811                 time = cut.second;
812         } else if (time > piece->content->end()) {
813                 /* Discard it all */
814                 return;
815         } else if (end > piece->content->end()) {
816                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
817                 DCPOMATIC_ASSERT (remaining_frames > 0);
818                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
819                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
820                 content_audio.audio = cut;
821         }
822
823         audio_transform (content, stream, content_audio, time);
824 }
825
826 void
827 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
828 {
829         shared_ptr<Piece> piece = wp.lock ();
830         if (!piece) {
831                 return;
832         }
833
834         /* Apply content's subtitle offsets */
835         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
836         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
837
838         /* Apply content's subtitle scale */
839         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
840         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
841
842         /* Apply a corrective translation to keep the subtitle centred after that scale */
843         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
844         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
845
846         PlayerSubtitles ps;
847         ps.image.push_back (subtitle.sub);
848         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
849
850         if (_active_subtitles.find(wp) == _active_subtitles.end()) {
851                 _active_subtitles[wp] = list<ActiveSubtitles>();
852         }
853         _active_subtitles[wp].push_back (ActiveSubtitles (ps, from));
854 }
855
856 void
857 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
858 {
859         shared_ptr<Piece> piece = wp.lock ();
860         if (!piece) {
861                 return;
862         }
863
864         PlayerSubtitles ps;
865         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
866
867         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
868                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
869                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
870                 float const xs = piece->content->subtitle->x_scale();
871                 float const ys = piece->content->subtitle->y_scale();
872                 float size = s.size();
873
874                 /* Adjust size to express the common part of the scaling;
875                    e.g. if xs = ys = 0.5 we scale size by 2.
876                 */
877                 if (xs > 1e-5 && ys > 1e-5) {
878                         size *= 1 / min (1 / xs, 1 / ys);
879                 }
880                 s.set_size (size);
881
882                 /* Then express aspect ratio changes */
883                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
884                         s.set_aspect_adjust (xs / ys);
885                 }
886
887                 s.set_in (dcp::Time(from.seconds(), 1000));
888                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
889                 ps.add_fonts (piece->content->subtitle->fonts ());
890         }
891
892         if (_active_subtitles.find(wp) == _active_subtitles.end()) {
893                 _active_subtitles[wp] = list<ActiveSubtitles> ();
894         }
895         _active_subtitles[wp].push_back (ActiveSubtitles (ps, from));
896 }
897
898 void
899 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
900 {
901         if (_active_subtitles.find (wp) == _active_subtitles.end ()) {
902                 return;
903         }
904
905         shared_ptr<Piece> piece = wp.lock ();
906         if (!piece) {
907                 return;
908         }
909
910         DCPTime const dcp_to = content_time_to_dcp (piece, to);
911
912         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
913                 Subtitle (_active_subtitles[wp].back().subs, DCPTimePeriod (_active_subtitles[wp].back().from, dcp_to));
914         }
915
916         _active_subtitles[wp].back().to = dcp_to;
917
918         BOOST_FOREACH (SubtitleString& i, _active_subtitles[wp].back().subs.text) {
919                 i.set_out (dcp::Time(dcp_to.seconds(), 1000));
920         }
921 }
922
923 void
924 Player::seek (DCPTime time, bool accurate)
925 {
926         if (_audio_processor) {
927                 _audio_processor->flush ();
928         }
929
930         for (ResamplerMap::iterator i = _resamplers.begin(); i != _resamplers.end(); ++i) {
931                 i->second->flush ();
932                 i->second->reset ();
933         }
934
935         _audio_merger.clear ();
936         _active_subtitles.clear ();
937
938         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
939                 if (time < i->content->position()) {
940                         /* Before; seek to 0 */
941                         i->decoder->seek (ContentTime(), accurate);
942                         i->done = false;
943                 } else if (i->content->position() <= time && time < i->content->end()) {
944                         /* During; seek to position */
945                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
946                         i->done = false;
947                 } else {
948                         /* After; this piece is done */
949                         i->done = true;
950                 }
951         }
952
953         if (accurate) {
954                 _last_video_time = time;
955                 _last_audio_time = time;
956         } else {
957                 _last_video_time = optional<DCPTime> ();
958                 _last_audio_time = optional<DCPTime> ();
959         }
960
961         _last_seek_time = time;
962 }
963
964 shared_ptr<Resampler>
965 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
966 {
967         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
968         if (i != _resamplers.end ()) {
969                 return i->second;
970         }
971
972         if (!create) {
973                 return shared_ptr<Resampler> ();
974         }
975
976         LOG_GENERAL (
977                 "Creating new resampler from %1 to %2 with %3 channels",
978                 stream->frame_rate(),
979                 content->resampled_frame_rate(),
980                 stream->channels()
981                 );
982
983         shared_ptr<Resampler> r (
984                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
985                 );
986
987         _resamplers[make_pair(content, stream)] = r;
988         return r;
989 }
990
991 void
992 Player::fill_video (DCPTimePeriod period)
993 {
994         /* XXX: this may not work for 3D */
995         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
996                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
997                         if (_playlist->video_content_at(j) && _last_video) {
998                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
999                         } else {
1000                                 shared_ptr<PlayerVideo> black = black_player_video_frame ();
1001                                 optional<PositionImage> subtitles = subtitles_for_frame (j);
1002                                 if (subtitles) {
1003                                         black->set_subtitle (subtitles.get ());
1004                                 }
1005                                 Video (black, j);
1006                         }
1007                         _last_video_time = j;
1008                 }
1009         }
1010 }
1011
1012 void
1013 Player::fill_audio (DCPTimePeriod period)
1014 {
1015         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
1016                 DCPTime t = i.from;
1017                 while (t < i.to) {
1018                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
1019                         Frame const samples = block.frames_round(_film->audio_frame_rate());
1020                         if (samples) {
1021                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
1022                                 silence->make_silent ();
1023                                 Audio (silence, t);
1024                         }
1025                         t += block;
1026                 }
1027         }
1028 }
1029
1030 DCPTime
1031 Player::one_video_frame () const
1032 {
1033         return DCPTime::from_frames (1, _film->video_frame_rate ());
1034 }
1035
1036 pair<shared_ptr<AudioBuffers>, DCPTime>
1037 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
1038 {
1039         DCPTime const discard_time = discard_to - time;
1040         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
1041         Frame remaining_frames = audio->frames() - discard_frames;
1042         if (remaining_frames <= 0) {
1043                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
1044         }
1045         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
1046         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
1047         return make_pair(cut, time + discard_time);
1048 }