Several fixes to audio.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         if (!_play_referenced) {
160                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
162                         if (dc) {
163                                 if (dc->reference_video()) {
164                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
165                                 }
166                                 if (dc->reference_audio()) {
167                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
168                                 }
169                         }
170                 }
171         }
172
173         _have_valid_pieces = true;
174 }
175
176 void
177 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
178 {
179         shared_ptr<Content> c = w.lock ();
180         if (!c) {
181                 return;
182         }
183
184         if (
185                 property == ContentProperty::POSITION ||
186                 property == ContentProperty::LENGTH ||
187                 property == ContentProperty::TRIM_START ||
188                 property == ContentProperty::TRIM_END ||
189                 property == ContentProperty::PATH ||
190                 property == VideoContentProperty::FRAME_TYPE ||
191                 property == DCPContentProperty::NEEDS_ASSETS ||
192                 property == DCPContentProperty::NEEDS_KDM ||
193                 property == SubtitleContentProperty::COLOUR ||
194                 property == SubtitleContentProperty::OUTLINE ||
195                 property == SubtitleContentProperty::SHADOW ||
196                 property == SubtitleContentProperty::EFFECT_COLOUR ||
197                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
198                 property == VideoContentProperty::COLOUR_CONVERSION
199                 ) {
200
201                 _have_valid_pieces = false;
202                 Changed (frequent);
203
204         } else if (
205                 property == SubtitleContentProperty::LINE_SPACING ||
206                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
207                 property == SubtitleContentProperty::Y_SCALE ||
208                 property == SubtitleContentProperty::FADE_IN ||
209                 property == SubtitleContentProperty::FADE_OUT ||
210                 property == ContentProperty::VIDEO_FRAME_RATE ||
211                 property == SubtitleContentProperty::USE ||
212                 property == SubtitleContentProperty::X_OFFSET ||
213                 property == SubtitleContentProperty::Y_OFFSET ||
214                 property == SubtitleContentProperty::X_SCALE ||
215                 property == SubtitleContentProperty::FONTS ||
216                 property == VideoContentProperty::CROP ||
217                 property == VideoContentProperty::SCALE ||
218                 property == VideoContentProperty::FADE_IN ||
219                 property == VideoContentProperty::FADE_OUT
220                 ) {
221
222                 Changed (frequent);
223         }
224 }
225
226 void
227 Player::set_video_container_size (dcp::Size s)
228 {
229         if (s == _video_container_size) {
230                 return;
231         }
232
233         _video_container_size = s;
234
235         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
236         _black_image->make_black ();
237
238         Changed (false);
239 }
240
241 void
242 Player::playlist_changed ()
243 {
244         _have_valid_pieces = false;
245         Changed (false);
246 }
247
248 void
249 Player::film_changed (Film::Property p)
250 {
251         /* Here we should notice Film properties that affect our output, and
252            alert listeners that our output now would be different to how it was
253            last time we were run.
254         */
255
256         if (p == Film::CONTAINER) {
257                 Changed (false);
258         } else if (p == Film::VIDEO_FRAME_RATE) {
259                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
260                    so we need new pieces here.
261                 */
262                 _have_valid_pieces = false;
263                 Changed (false);
264         } else if (p == Film::AUDIO_PROCESSOR) {
265                 if (_film->audio_processor ()) {
266                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
267                 }
268         }
269 }
270
271 list<PositionImage>
272 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
273 {
274         list<PositionImage> all;
275
276         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
277                 if (!i->image) {
278                         continue;
279                 }
280
281                 /* We will scale the subtitle up to fit _video_container_size */
282                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
283
284                 /* Then we need a corrective translation, consisting of two parts:
285                  *
286                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
287                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
288                  *
289                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
290                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
291                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
292                  *
293                  * Combining these two translations gives these expressions.
294                  */
295
296                 all.push_back (
297                         PositionImage (
298                                 i->image->scale (
299                                         scaled_size,
300                                         dcp::YUV_TO_RGB_REC601,
301                                         i->image->pixel_format (),
302                                         true,
303                                         _fast
304                                         ),
305                                 Position<int> (
306                                         lrint (_video_container_size.width * i->rectangle.x),
307                                         lrint (_video_container_size.height * i->rectangle.y)
308                                         )
309                                 )
310                         );
311         }
312
313         return all;
314 }
315
316 shared_ptr<PlayerVideo>
317 Player::black_player_video_frame () const
318 {
319         return shared_ptr<PlayerVideo> (
320                 new PlayerVideo (
321                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
322                         Crop (),
323                         optional<double> (),
324                         _video_container_size,
325                         _video_container_size,
326                         EYES_BOTH,
327                         PART_WHOLE,
328                         PresetColourConversion::all().front().conversion
329                 )
330         );
331 }
332
333 Frame
334 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
335 {
336         DCPTime s = t - piece->content->position ();
337         s = min (piece->content->length_after_trim(), s);
338         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
339
340         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
341            then convert that ContentTime to frames at the content's rate.  However this fails for
342            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
343            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
344
345            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
346         */
347         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
348 }
349
350 DCPTime
351 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
352 {
353         /* See comment in dcp_to_content_video */
354         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
355         return max (DCPTime (), d + piece->content->position ());
356 }
357
358 Frame
359 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
360 {
361         DCPTime s = t - piece->content->position ();
362         s = min (piece->content->length_after_trim(), s);
363         /* See notes in dcp_to_content_video */
364         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
365 }
366
367 DCPTime
368 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
369 {
370         /* See comment in dcp_to_content_video */
371         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
372         return max (DCPTime (), d + piece->content->position ());
373 }
374
375 ContentTime
376 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
377 {
378         DCPTime s = t - piece->content->position ();
379         s = min (piece->content->length_after_trim(), s);
380         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
381 }
382
383 DCPTime
384 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
385 {
386         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
387 }
388
389 list<shared_ptr<Font> >
390 Player::get_subtitle_fonts ()
391 {
392         if (!_have_valid_pieces) {
393                 setup_pieces ();
394         }
395
396         list<shared_ptr<Font> > fonts;
397         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
398                 if (p->content->subtitle) {
399                         /* XXX: things may go wrong if there are duplicate font IDs
400                            with different font files.
401                         */
402                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
403                         copy (f.begin(), f.end(), back_inserter (fonts));
404                 }
405         }
406
407         return fonts;
408 }
409
410 /** Set this player never to produce any video data */
411 void
412 Player::set_ignore_video ()
413 {
414         _ignore_video = true;
415 }
416
417 /** Set this player never to produce any audio data */
418 void
419 Player::set_ignore_audio ()
420 {
421         _ignore_audio = true;
422 }
423
424 /** Set whether or not this player should always burn text subtitles into the image,
425  *  regardless of the content settings.
426  *  @param burn true to always burn subtitles, false to obey content settings.
427  */
428 void
429 Player::set_always_burn_subtitles (bool burn)
430 {
431         _always_burn_subtitles = burn;
432 }
433
434 void
435 Player::set_fast ()
436 {
437         _fast = true;
438         _have_valid_pieces = false;
439 }
440
441 void
442 Player::set_play_referenced ()
443 {
444         _play_referenced = true;
445         _have_valid_pieces = false;
446 }
447
448 list<ReferencedReelAsset>
449 Player::get_reel_assets ()
450 {
451         list<ReferencedReelAsset> a;
452
453         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
454                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
455                 if (!j) {
456                         continue;
457                 }
458
459                 scoped_ptr<DCPDecoder> decoder;
460                 try {
461                         decoder.reset (new DCPDecoder (j, _film->log()));
462                 } catch (...) {
463                         return a;
464                 }
465
466                 int64_t offset = 0;
467                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
468
469                         DCPOMATIC_ASSERT (j->video_frame_rate ());
470                         double const cfr = j->video_frame_rate().get();
471                         Frame const trim_start = j->trim_start().frames_round (cfr);
472                         Frame const trim_end = j->trim_end().frames_round (cfr);
473                         int const ffr = _film->video_frame_rate ();
474
475                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
476                         if (j->reference_video ()) {
477                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
478                                 DCPOMATIC_ASSERT (ra);
479                                 ra->set_entry_point (ra->entry_point() + trim_start);
480                                 ra->set_duration (ra->duration() - trim_start - trim_end);
481                                 a.push_back (
482                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
483                                         );
484                         }
485
486                         if (j->reference_audio ()) {
487                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
488                                 DCPOMATIC_ASSERT (ra);
489                                 ra->set_entry_point (ra->entry_point() + trim_start);
490                                 ra->set_duration (ra->duration() - trim_start - trim_end);
491                                 a.push_back (
492                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
493                                         );
494                         }
495
496                         if (j->reference_subtitle ()) {
497                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
498                                 DCPOMATIC_ASSERT (ra);
499                                 ra->set_entry_point (ra->entry_point() + trim_start);
500                                 ra->set_duration (ra->duration() - trim_start - trim_end);
501                                 a.push_back (
502                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
503                                         );
504                         }
505
506                         /* Assume that main picture duration is the length of the reel */
507                         offset += k->main_picture()->duration ();
508                 }
509         }
510
511         return a;
512 }
513
514 list<shared_ptr<Piece> >
515 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
516 {
517         if (!_have_valid_pieces) {
518                 setup_pieces ();
519         }
520
521         list<shared_ptr<Piece> > overlaps;
522         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
523                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
524                         overlaps.push_back (i);
525                 }
526         }
527
528         return overlaps;
529 }
530
531 bool
532 Player::pass ()
533 {
534         if (!_have_valid_pieces) {
535                 setup_pieces ();
536         }
537
538         shared_ptr<Piece> earliest;
539         DCPTime earliest_content;
540
541         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
542                 if (!i->done) {
543                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
544                         if (!earliest || t < earliest_content) {
545                                 earliest_content = t;
546                                 earliest = i;
547                         }
548                 }
549         }
550
551         if (!earliest) {
552                 /* No more content; fill up with silent black */
553                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
554                 if (_last_video_time) {
555                         remaining_video.from = _last_video_time.get() + one_video_frame();
556                 }
557                 fill_video (remaining_video);
558                 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
559                 if (_last_audio_time) {
560                         remaining_audio.from = _last_audio_time.get();
561                 }
562                 fill_audio (remaining_audio);
563                 return true;
564         }
565
566         earliest->done = earliest->decoder->pass ();
567         if (earliest->done && earliest->content->audio) {
568                 /* Flush the Player audio system for this piece */
569                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
570                         audio_flush (earliest, i);
571                 }
572         }
573
574         /* Emit any audio that is ready */
575
576         DCPTime pull_from = _playlist->length ();
577         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
578                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
579                         pull_from = i->second.last_push_end;
580                 }
581         }
582
583         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
584         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
585                 DCPOMATIC_ASSERT (!_last_audio_time || i->second >= _last_audio_time.get());
586                 if (_last_audio_time) {
587                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
588                 }
589                 Audio (i->first, i->second);
590                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
591         }
592
593         return false;
594 }
595
596 void
597 Player::video (weak_ptr<Piece> wp, ContentVideo video)
598 {
599         shared_ptr<Piece> piece = wp.lock ();
600         if (!piece) {
601                 return;
602         }
603
604         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
605         if (frc.skip && (video.frame % 2) == 1) {
606                 return;
607         }
608
609         /* Time and period of the frame we will emit */
610         DCPTime const time = content_video_to_dcp (piece, video.frame);
611         DCPTimePeriod const period (time, time + one_video_frame());
612
613         /* Discard if it's outside the content's period */
614         if (time < piece->content->position() || time >= piece->content->end()) {
615                 return;
616         }
617
618         /* Get any subtitles */
619
620         optional<PositionImage> subtitles;
621
622         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
623
624                 if (!i->second.overlap (period)) {
625                         continue;
626                 }
627
628                 list<PositionImage> sub_images;
629
630                 /* Image subtitles */
631                 list<PositionImage> c = transform_image_subtitles (i->first.image);
632                 copy (c.begin(), c.end(), back_inserter (sub_images));
633
634                 /* Text subtitles (rendered to an image) */
635                 if (!i->first.text.empty ()) {
636                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
637                         copy (s.begin (), s.end (), back_inserter (sub_images));
638                 }
639
640                 if (!sub_images.empty ()) {
641                         subtitles = merge (sub_images);
642                 }
643         }
644
645         /* Fill gaps */
646
647         if (_last_video_time) {
648                 fill_video (DCPTimePeriod (_last_video_time.get() + one_video_frame(), time));
649         }
650
651         _last_video.reset (
652                 new PlayerVideo (
653                         video.image,
654                         piece->content->video->crop (),
655                         piece->content->video->fade (video.frame),
656                         piece->content->video->scale().size (
657                                 piece->content->video, _video_container_size, _film->frame_size ()
658                                 ),
659                         _video_container_size,
660                         video.eyes,
661                         video.part,
662                         piece->content->video->colour_conversion ()
663                         )
664                 );
665
666         if (subtitles) {
667                 _last_video->set_subtitle (subtitles.get ());
668         }
669
670         _last_video_time = time;
671
672         Video (_last_video, *_last_video_time);
673
674         /* Discard any subtitles we no longer need */
675
676         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
677                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
678                 ++tmp;
679
680                 if (i->second.to < time) {
681                         _subtitles.erase (i);
682                 }
683
684                 i = tmp;
685         }
686 }
687
688 void
689 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
690 {
691         shared_ptr<AudioContent> content = piece->content->audio;
692         DCPOMATIC_ASSERT (content);
693
694         shared_ptr<Resampler> r = resampler (content, stream, false);
695         if (!r) {
696                 return;
697         }
698
699         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
700         ContentAudio content_audio;
701         content_audio.audio = ro.first;
702         content_audio.frame = ro.second;
703
704         /* Compute time in the DCP */
705         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
706
707         audio_transform (content, stream, content_audio, time);
708 }
709
710 /** Do our common processing on some audio */
711 void
712 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
713 {
714         /* Gain */
715
716         if (content->gain() != 0) {
717                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
718                 gain->apply_gain (content->gain ());
719                 content_audio.audio = gain;
720         }
721
722         /* Remap */
723
724         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
725         dcp_mapped->make_silent ();
726
727         AudioMapping map = stream->mapping ();
728         for (int i = 0; i < map.input_channels(); ++i) {
729                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
730                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
731                                 dcp_mapped->accumulate_channel (
732                                         content_audio.audio.get(),
733                                         i,
734                                         static_cast<dcp::Channel> (j),
735                                         map.get (i, static_cast<dcp::Channel> (j))
736                                         );
737                         }
738                 }
739         }
740
741         content_audio.audio = dcp_mapped;
742
743         /* Process */
744
745         if (_audio_processor) {
746                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
747         }
748
749         /* Push */
750
751         _audio_merger.push (content_audio.audio, time);
752         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
753         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
754 }
755
756 void
757 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
758 {
759         shared_ptr<Piece> piece = wp.lock ();
760         if (!piece) {
761                 return;
762         }
763
764         shared_ptr<AudioContent> content = piece->content->audio;
765         DCPOMATIC_ASSERT (content);
766
767         /* Resample */
768         if (stream->frame_rate() != content->resampled_frame_rate()) {
769                 shared_ptr<Resampler> r = resampler (content, stream, true);
770                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
771                 content_audio.audio = ro.first;
772                 content_audio.frame = ro.second;
773         }
774
775         /* Compute time in the DCP */
776         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
777         /* And the end of this block in the DCP */
778         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
779
780         /* Remove anything that comes before the start or after the end of the content */
781         if (time < piece->content->position()) {
782                 DCPTime const discard_time = piece->content->position() - time;
783                 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
784                 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
785                 if (remaining_frames <= 0) {
786                         /* This audio is entirely discarded */
787                         return;
788                 }
789                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
790                 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
791                 content_audio.audio = cut;
792                 time += discard_time;
793         } else if (time > piece->content->end()) {
794                 /* Discard it all */
795                 return;
796         } else if (end > piece->content->end()) {
797                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
798                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
799                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
800                 content_audio.audio = cut;
801         }
802
803         audio_transform (content, stream, content_audio, time);
804 }
805
806 void
807 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
808 {
809         shared_ptr<Piece> piece = wp.lock ();
810         if (!piece) {
811                 return;
812         }
813
814         /* Apply content's subtitle offsets */
815         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
816         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
817
818         /* Apply content's subtitle scale */
819         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
820         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
821
822         /* Apply a corrective translation to keep the subtitle centred after that scale */
823         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
824         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
825
826         PlayerSubtitles ps;
827         ps.image.push_back (subtitle.sub);
828         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
829
830         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
831                 _subtitles.push_back (make_pair (ps, period));
832         } else {
833                 Subtitle (ps, period);
834         }
835 }
836
837 void
838 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
839 {
840         shared_ptr<Piece> piece = wp.lock ();
841         if (!piece) {
842                 return;
843         }
844
845         PlayerSubtitles ps;
846         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
847
848         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
849                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
850                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
851                 float const xs = piece->content->subtitle->x_scale();
852                 float const ys = piece->content->subtitle->y_scale();
853                 float size = s.size();
854
855                 /* Adjust size to express the common part of the scaling;
856                    e.g. if xs = ys = 0.5 we scale size by 2.
857                 */
858                 if (xs > 1e-5 && ys > 1e-5) {
859                         size *= 1 / min (1 / xs, 1 / ys);
860                 }
861                 s.set_size (size);
862
863                 /* Then express aspect ratio changes */
864                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
865                         s.set_aspect_adjust (xs / ys);
866                 }
867
868                 s.set_in (dcp::Time(period.from.seconds(), 1000));
869                 s.set_out (dcp::Time(period.to.seconds(), 1000));
870                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
871                 ps.add_fonts (piece->content->subtitle->fonts ());
872         }
873
874         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
875                 _subtitles.push_back (make_pair (ps, period));
876         } else {
877                 Subtitle (ps, period);
878         }
879 }
880
881 void
882 Player::seek (DCPTime time, bool accurate)
883 {
884         if (_audio_processor) {
885                 _audio_processor->flush ();
886         }
887
888         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
889                 if (i->content->position() <= time && time < i->content->end()) {
890                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
891                         i->done = false;
892                 }
893         }
894
895         if (accurate) {
896                 _last_video_time = time - one_video_frame ();
897                 _last_audio_time = time;
898         } else {
899                 _last_video_time = optional<DCPTime> ();
900                 _last_audio_time = optional<DCPTime> ();
901         }
902 }
903
904 shared_ptr<Resampler>
905 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
906 {
907         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
908         if (i != _resamplers.end ()) {
909                 return i->second;
910         }
911
912         if (!create) {
913                 return shared_ptr<Resampler> ();
914         }
915
916         LOG_GENERAL (
917                 "Creating new resampler from %1 to %2 with %3 channels",
918                 stream->frame_rate(),
919                 content->resampled_frame_rate(),
920                 stream->channels()
921                 );
922
923         shared_ptr<Resampler> r (
924                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
925                 );
926
927         _resamplers[make_pair(content, stream)] = r;
928         return r;
929 }
930
931 void
932 Player::fill_video (DCPTimePeriod period)
933 {
934         /* XXX: this may not work for 3D */
935         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
936                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
937                         if (_playlist->video_content_at(j) && _last_video) {
938                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
939                         } else {
940                                 Video (black_player_video_frame(), j);
941                         }
942                 }
943         }
944 }
945
946 void
947 Player::fill_audio (DCPTimePeriod period)
948 {
949         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
950                 DCPTime t = i.from;
951                 while (t < i.to) {
952                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
953                         Frame const samples = block.frames_round(_film->audio_frame_rate());
954                         if (samples) {
955                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
956                                 silence->make_silent ();
957                                 Audio (silence, t);
958                         }
959                         t += block;
960                 }
961         }
962 }
963
964 DCPTime
965 Player::one_video_frame () const
966 {
967         return DCPTime::from_frames (1, _film->video_frame_rate ());
968 }