Discard audio received before the time of the last accurate seek.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         if (!_play_referenced) {
160                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
162                         if (dc) {
163                                 if (dc->reference_video()) {
164                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
165                                 }
166                                 if (dc->reference_audio()) {
167                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
168                                 }
169                         }
170                 }
171         }
172
173         _last_video_time = optional<DCPTime> ();
174         _last_audio_time = optional<DCPTime> ();
175         _have_valid_pieces = true;
176 }
177
178 void
179 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
180 {
181         shared_ptr<Content> c = w.lock ();
182         if (!c) {
183                 return;
184         }
185
186         if (
187                 property == ContentProperty::POSITION ||
188                 property == ContentProperty::LENGTH ||
189                 property == ContentProperty::TRIM_START ||
190                 property == ContentProperty::TRIM_END ||
191                 property == ContentProperty::PATH ||
192                 property == VideoContentProperty::FRAME_TYPE ||
193                 property == DCPContentProperty::NEEDS_ASSETS ||
194                 property == DCPContentProperty::NEEDS_KDM ||
195                 property == SubtitleContentProperty::COLOUR ||
196                 property == SubtitleContentProperty::OUTLINE ||
197                 property == SubtitleContentProperty::SHADOW ||
198                 property == SubtitleContentProperty::EFFECT_COLOUR ||
199                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
200                 property == VideoContentProperty::COLOUR_CONVERSION
201                 ) {
202
203                 _have_valid_pieces = false;
204                 Changed (frequent);
205
206         } else if (
207                 property == SubtitleContentProperty::LINE_SPACING ||
208                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
209                 property == SubtitleContentProperty::Y_SCALE ||
210                 property == SubtitleContentProperty::FADE_IN ||
211                 property == SubtitleContentProperty::FADE_OUT ||
212                 property == ContentProperty::VIDEO_FRAME_RATE ||
213                 property == SubtitleContentProperty::USE ||
214                 property == SubtitleContentProperty::X_OFFSET ||
215                 property == SubtitleContentProperty::Y_OFFSET ||
216                 property == SubtitleContentProperty::X_SCALE ||
217                 property == SubtitleContentProperty::FONTS ||
218                 property == VideoContentProperty::CROP ||
219                 property == VideoContentProperty::SCALE ||
220                 property == VideoContentProperty::FADE_IN ||
221                 property == VideoContentProperty::FADE_OUT
222                 ) {
223
224                 Changed (frequent);
225         }
226 }
227
228 void
229 Player::set_video_container_size (dcp::Size s)
230 {
231         if (s == _video_container_size) {
232                 return;
233         }
234
235         _video_container_size = s;
236
237         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
238         _black_image->make_black ();
239
240         Changed (false);
241 }
242
243 void
244 Player::playlist_changed ()
245 {
246         _have_valid_pieces = false;
247         Changed (false);
248 }
249
250 void
251 Player::film_changed (Film::Property p)
252 {
253         /* Here we should notice Film properties that affect our output, and
254            alert listeners that our output now would be different to how it was
255            last time we were run.
256         */
257
258         if (p == Film::CONTAINER) {
259                 Changed (false);
260         } else if (p == Film::VIDEO_FRAME_RATE) {
261                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
262                    so we need new pieces here.
263                 */
264                 _have_valid_pieces = false;
265                 Changed (false);
266         } else if (p == Film::AUDIO_PROCESSOR) {
267                 if (_film->audio_processor ()) {
268                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
269                 }
270         }
271 }
272
273 list<PositionImage>
274 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
275 {
276         list<PositionImage> all;
277
278         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
279                 if (!i->image) {
280                         continue;
281                 }
282
283                 /* We will scale the subtitle up to fit _video_container_size */
284                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
285
286                 /* Then we need a corrective translation, consisting of two parts:
287                  *
288                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
289                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
290                  *
291                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
292                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
293                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
294                  *
295                  * Combining these two translations gives these expressions.
296                  */
297
298                 all.push_back (
299                         PositionImage (
300                                 i->image->scale (
301                                         scaled_size,
302                                         dcp::YUV_TO_RGB_REC601,
303                                         i->image->pixel_format (),
304                                         true,
305                                         _fast
306                                         ),
307                                 Position<int> (
308                                         lrint (_video_container_size.width * i->rectangle.x),
309                                         lrint (_video_container_size.height * i->rectangle.y)
310                                         )
311                                 )
312                         );
313         }
314
315         return all;
316 }
317
318 shared_ptr<PlayerVideo>
319 Player::black_player_video_frame () const
320 {
321         return shared_ptr<PlayerVideo> (
322                 new PlayerVideo (
323                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
324                         Crop (),
325                         optional<double> (),
326                         _video_container_size,
327                         _video_container_size,
328                         EYES_BOTH,
329                         PART_WHOLE,
330                         PresetColourConversion::all().front().conversion
331                 )
332         );
333 }
334
335 Frame
336 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
337 {
338         DCPTime s = t - piece->content->position ();
339         s = min (piece->content->length_after_trim(), s);
340         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
341
342         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
343            then convert that ContentTime to frames at the content's rate.  However this fails for
344            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
345            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
346
347            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
348         */
349         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
350 }
351
352 DCPTime
353 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
354 {
355         /* See comment in dcp_to_content_video */
356         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
357         return max (DCPTime (), d + piece->content->position ());
358 }
359
360 Frame
361 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
362 {
363         DCPTime s = t - piece->content->position ();
364         s = min (piece->content->length_after_trim(), s);
365         /* See notes in dcp_to_content_video */
366         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
367 }
368
369 DCPTime
370 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
371 {
372         /* See comment in dcp_to_content_video */
373         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
374         return max (DCPTime (), d + piece->content->position ());
375 }
376
377 ContentTime
378 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
379 {
380         DCPTime s = t - piece->content->position ();
381         s = min (piece->content->length_after_trim(), s);
382         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
383 }
384
385 DCPTime
386 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
387 {
388         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
389 }
390
391 list<shared_ptr<Font> >
392 Player::get_subtitle_fonts ()
393 {
394         if (!_have_valid_pieces) {
395                 setup_pieces ();
396         }
397
398         list<shared_ptr<Font> > fonts;
399         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
400                 if (p->content->subtitle) {
401                         /* XXX: things may go wrong if there are duplicate font IDs
402                            with different font files.
403                         */
404                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
405                         copy (f.begin(), f.end(), back_inserter (fonts));
406                 }
407         }
408
409         return fonts;
410 }
411
412 /** Set this player never to produce any video data */
413 void
414 Player::set_ignore_video ()
415 {
416         _ignore_video = true;
417 }
418
419 /** Set this player never to produce any audio data */
420 void
421 Player::set_ignore_audio ()
422 {
423         _ignore_audio = true;
424 }
425
426 /** Set whether or not this player should always burn text subtitles into the image,
427  *  regardless of the content settings.
428  *  @param burn true to always burn subtitles, false to obey content settings.
429  */
430 void
431 Player::set_always_burn_subtitles (bool burn)
432 {
433         _always_burn_subtitles = burn;
434 }
435
436 void
437 Player::set_fast ()
438 {
439         _fast = true;
440         _have_valid_pieces = false;
441 }
442
443 void
444 Player::set_play_referenced ()
445 {
446         _play_referenced = true;
447         _have_valid_pieces = false;
448 }
449
450 list<ReferencedReelAsset>
451 Player::get_reel_assets ()
452 {
453         list<ReferencedReelAsset> a;
454
455         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
456                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
457                 if (!j) {
458                         continue;
459                 }
460
461                 scoped_ptr<DCPDecoder> decoder;
462                 try {
463                         decoder.reset (new DCPDecoder (j, _film->log()));
464                 } catch (...) {
465                         return a;
466                 }
467
468                 int64_t offset = 0;
469                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
470
471                         DCPOMATIC_ASSERT (j->video_frame_rate ());
472                         double const cfr = j->video_frame_rate().get();
473                         Frame const trim_start = j->trim_start().frames_round (cfr);
474                         Frame const trim_end = j->trim_end().frames_round (cfr);
475                         int const ffr = _film->video_frame_rate ();
476
477                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
478                         if (j->reference_video ()) {
479                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
480                                 DCPOMATIC_ASSERT (ra);
481                                 ra->set_entry_point (ra->entry_point() + trim_start);
482                                 ra->set_duration (ra->duration() - trim_start - trim_end);
483                                 a.push_back (
484                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
485                                         );
486                         }
487
488                         if (j->reference_audio ()) {
489                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
490                                 DCPOMATIC_ASSERT (ra);
491                                 ra->set_entry_point (ra->entry_point() + trim_start);
492                                 ra->set_duration (ra->duration() - trim_start - trim_end);
493                                 a.push_back (
494                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
495                                         );
496                         }
497
498                         if (j->reference_subtitle ()) {
499                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
500                                 DCPOMATIC_ASSERT (ra);
501                                 ra->set_entry_point (ra->entry_point() + trim_start);
502                                 ra->set_duration (ra->duration() - trim_start - trim_end);
503                                 a.push_back (
504                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
505                                         );
506                         }
507
508                         /* Assume that main picture duration is the length of the reel */
509                         offset += k->main_picture()->duration ();
510                 }
511         }
512
513         return a;
514 }
515
516 list<shared_ptr<Piece> >
517 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
518 {
519         if (!_have_valid_pieces) {
520                 setup_pieces ();
521         }
522
523         list<shared_ptr<Piece> > overlaps;
524         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
525                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
526                         overlaps.push_back (i);
527                 }
528         }
529
530         return overlaps;
531 }
532
533 bool
534 Player::pass ()
535 {
536         if (!_have_valid_pieces) {
537                 setup_pieces ();
538         }
539
540         shared_ptr<Piece> earliest;
541         DCPTime earliest_content;
542
543         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
544                 if (!i->done) {
545                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
546                         if (!earliest || t < earliest_content) {
547                                 earliest_content = t;
548                                 earliest = i;
549                         }
550                 }
551         }
552
553         if (!earliest) {
554                 /* No more content; fill up with silent black */
555                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
556                 if (_last_video_time) {
557                         remaining_video.from = _last_video_time.get() + one_video_frame();
558                 }
559                 fill_video (remaining_video);
560                 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
561                 if (_last_audio_time) {
562                         remaining_audio.from = _last_audio_time.get();
563                 }
564                 fill_audio (remaining_audio);
565                 return true;
566         }
567
568         earliest->done = earliest->decoder->pass ();
569         if (earliest->done && earliest->content->audio) {
570                 /* Flush the Player audio system for this piece */
571                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
572                         audio_flush (earliest, i);
573                 }
574         }
575
576         /* Emit any audio that is ready */
577
578         DCPTime pull_from = _playlist->length ();
579         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
580                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
581                         pull_from = i->second.last_push_end;
582                 }
583         }
584
585         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
586         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
587                 if (_last_audio_time && i->second < _last_audio_time.get()) {
588                         /* There has been an accurate seek and we have received some audio before the seek time;
589                            discard it.
590                         */
591                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
592                         if (!cut.first) {
593                                 continue;
594                         }
595                         *i = cut;
596                 }
597
598                 if (_last_audio_time) {
599                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
600                 }
601
602                 Audio (i->first, i->second);
603                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
604         }
605
606         return false;
607 }
608
609 void
610 Player::video (weak_ptr<Piece> wp, ContentVideo video)
611 {
612         shared_ptr<Piece> piece = wp.lock ();
613         if (!piece) {
614                 return;
615         }
616
617         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
618         if (frc.skip && (video.frame % 2) == 1) {
619                 return;
620         }
621
622         /* Time and period of the frame we will emit */
623         DCPTime const time = content_video_to_dcp (piece, video.frame);
624         DCPTimePeriod const period (time, time + one_video_frame());
625
626         /* Discard if it's outside the content's period */
627         if (time < piece->content->position() || time >= piece->content->end()) {
628                 return;
629         }
630
631         /* Get any subtitles */
632
633         optional<PositionImage> subtitles;
634
635         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
636
637                 if (!i->second.overlap (period)) {
638                         continue;
639                 }
640
641                 list<PositionImage> sub_images;
642
643                 /* Image subtitles */
644                 list<PositionImage> c = transform_image_subtitles (i->first.image);
645                 copy (c.begin(), c.end(), back_inserter (sub_images));
646
647                 /* Text subtitles (rendered to an image) */
648                 if (!i->first.text.empty ()) {
649                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
650                         copy (s.begin (), s.end (), back_inserter (sub_images));
651                 }
652
653                 if (!sub_images.empty ()) {
654                         subtitles = merge (sub_images);
655                 }
656         }
657
658         /* Fill gaps */
659
660         if (_last_video_time) {
661                 fill_video (DCPTimePeriod (_last_video_time.get() + one_video_frame(), time));
662         }
663
664         _last_video.reset (
665                 new PlayerVideo (
666                         video.image,
667                         piece->content->video->crop (),
668                         piece->content->video->fade (video.frame),
669                         piece->content->video->scale().size (
670                                 piece->content->video, _video_container_size, _film->frame_size ()
671                                 ),
672                         _video_container_size,
673                         video.eyes,
674                         video.part,
675                         piece->content->video->colour_conversion ()
676                         )
677                 );
678
679         if (subtitles) {
680                 _last_video->set_subtitle (subtitles.get ());
681         }
682
683         _last_video_time = time;
684
685         Video (_last_video, *_last_video_time);
686
687         /* Discard any subtitles we no longer need */
688
689         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
690                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
691                 ++tmp;
692
693                 if (i->second.to < time) {
694                         _subtitles.erase (i);
695                 }
696
697                 i = tmp;
698         }
699 }
700
701 void
702 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
703 {
704         shared_ptr<AudioContent> content = piece->content->audio;
705         DCPOMATIC_ASSERT (content);
706
707         shared_ptr<Resampler> r = resampler (content, stream, false);
708         if (!r) {
709                 return;
710         }
711
712         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
713         if (ro.first->frames() == 0) {
714                 return;
715         }
716
717         ContentAudio content_audio;
718         content_audio.audio = ro.first;
719         content_audio.frame = ro.second;
720
721         /* Compute time in the DCP */
722         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
723
724         audio_transform (content, stream, content_audio, time);
725 }
726
727 /** Do our common processing on some audio */
728 void
729 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
730 {
731         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
732
733         /* Gain */
734
735         if (content->gain() != 0) {
736                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
737                 gain->apply_gain (content->gain ());
738                 content_audio.audio = gain;
739         }
740
741         /* Remap */
742
743         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
744         dcp_mapped->make_silent ();
745
746         AudioMapping map = stream->mapping ();
747         for (int i = 0; i < map.input_channels(); ++i) {
748                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
749                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
750                                 dcp_mapped->accumulate_channel (
751                                         content_audio.audio.get(),
752                                         i,
753                                         static_cast<dcp::Channel> (j),
754                                         map.get (i, static_cast<dcp::Channel> (j))
755                                         );
756                         }
757                 }
758         }
759
760         content_audio.audio = dcp_mapped;
761
762         /* Process */
763
764         if (_audio_processor) {
765                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
766         }
767
768         /* Push */
769
770         _audio_merger.push (content_audio.audio, time);
771         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
772         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
773 }
774
775 void
776 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
777 {
778         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
779
780         shared_ptr<Piece> piece = wp.lock ();
781         if (!piece) {
782                 return;
783         }
784
785         shared_ptr<AudioContent> content = piece->content->audio;
786         DCPOMATIC_ASSERT (content);
787
788         /* Resample */
789         if (stream->frame_rate() != content->resampled_frame_rate()) {
790                 shared_ptr<Resampler> r = resampler (content, stream, true);
791                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
792                 if (ro.first->frames() == 0) {
793                         return;
794                 }
795                 content_audio.audio = ro.first;
796                 content_audio.frame = ro.second;
797         }
798
799         /* Compute time in the DCP */
800         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
801         /* And the end of this block in the DCP */
802         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
803
804         /* Remove anything that comes before the start or after the end of the content */
805         if (time < piece->content->position()) {
806                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
807                 if (!cut.first) {
808                         /* This audio is entirely discarded */
809                         return;
810                 }
811                 content_audio.audio = cut.first;
812                 time = cut.second;
813         } else if (time > piece->content->end()) {
814                 /* Discard it all */
815                 return;
816         } else if (end > piece->content->end()) {
817                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
818                 DCPOMATIC_ASSERT (remaining_frames > 0);
819                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
820                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
821                 content_audio.audio = cut;
822         }
823
824         audio_transform (content, stream, content_audio, time);
825 }
826
827 void
828 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
829 {
830         shared_ptr<Piece> piece = wp.lock ();
831         if (!piece) {
832                 return;
833         }
834
835         /* Apply content's subtitle offsets */
836         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
837         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
838
839         /* Apply content's subtitle scale */
840         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
841         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
842
843         /* Apply a corrective translation to keep the subtitle centred after that scale */
844         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
845         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
846
847         PlayerSubtitles ps;
848         ps.image.push_back (subtitle.sub);
849         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
850
851         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
852                 _subtitles.push_back (make_pair (ps, period));
853         } else {
854                 Subtitle (ps, period);
855         }
856 }
857
858 void
859 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
860 {
861         shared_ptr<Piece> piece = wp.lock ();
862         if (!piece) {
863                 return;
864         }
865
866         PlayerSubtitles ps;
867         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
868
869         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
870                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
871                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
872                 float const xs = piece->content->subtitle->x_scale();
873                 float const ys = piece->content->subtitle->y_scale();
874                 float size = s.size();
875
876                 /* Adjust size to express the common part of the scaling;
877                    e.g. if xs = ys = 0.5 we scale size by 2.
878                 */
879                 if (xs > 1e-5 && ys > 1e-5) {
880                         size *= 1 / min (1 / xs, 1 / ys);
881                 }
882                 s.set_size (size);
883
884                 /* Then express aspect ratio changes */
885                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
886                         s.set_aspect_adjust (xs / ys);
887                 }
888
889                 s.set_in (dcp::Time(period.from.seconds(), 1000));
890                 s.set_out (dcp::Time(period.to.seconds(), 1000));
891                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
892                 ps.add_fonts (piece->content->subtitle->fonts ());
893         }
894
895         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
896                 _subtitles.push_back (make_pair (ps, period));
897         } else {
898                 Subtitle (ps, period);
899         }
900 }
901
902 void
903 Player::seek (DCPTime time, bool accurate)
904 {
905         if (_audio_processor) {
906                 _audio_processor->flush ();
907         }
908
909         for (ResamplerMap::iterator i = _resamplers.begin(); i != _resamplers.end(); ++i) {
910                 i->second->flush ();
911                 i->second->reset ();
912         }
913
914         _audio_merger.clear ();
915
916         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
917                 i->done = false;
918                 if (i->content->position() <= time && time < i->content->end()) {
919                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
920                 }
921         }
922
923         if (accurate) {
924                 _last_video_time = time - one_video_frame ();
925                 _last_audio_time = time;
926         } else {
927                 _last_video_time = optional<DCPTime> ();
928                 _last_audio_time = optional<DCPTime> ();
929         }
930 }
931
932 shared_ptr<Resampler>
933 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
934 {
935         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
936         if (i != _resamplers.end ()) {
937                 return i->second;
938         }
939
940         if (!create) {
941                 return shared_ptr<Resampler> ();
942         }
943
944         LOG_GENERAL (
945                 "Creating new resampler from %1 to %2 with %3 channels",
946                 stream->frame_rate(),
947                 content->resampled_frame_rate(),
948                 stream->channels()
949                 );
950
951         shared_ptr<Resampler> r (
952                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
953                 );
954
955         _resamplers[make_pair(content, stream)] = r;
956         return r;
957 }
958
959 void
960 Player::fill_video (DCPTimePeriod period)
961 {
962         /* XXX: this may not work for 3D */
963         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
964                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
965                         if (_playlist->video_content_at(j) && _last_video) {
966                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
967                         } else {
968                                 Video (black_player_video_frame(), j);
969                         }
970                 }
971         }
972 }
973
974 void
975 Player::fill_audio (DCPTimePeriod period)
976 {
977         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
978                 DCPTime t = i.from;
979                 while (t < i.to) {
980                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
981                         Frame const samples = block.frames_round(_film->audio_frame_rate());
982                         if (samples) {
983                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
984                                 silence->make_silent ();
985                                 Audio (silence, t);
986                         }
987                         t += block;
988                 }
989         }
990 }
991
992 DCPTime
993 Player::one_video_frame () const
994 {
995         return DCPTime::from_frames (1, _film->video_frame_rate ());
996 }
997
998 pair<shared_ptr<AudioBuffers>, DCPTime>
999 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
1000 {
1001         DCPTime const discard_time = discard_to - time;
1002         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
1003         Frame remaining_frames = audio->frames() - discard_frames;
1004         if (remaining_frames <= 0) {
1005                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
1006         }
1007         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
1008         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
1009         return make_pair(cut, time + discard_time);
1010 }