Fix merging of audio in various circumstances.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         if (!_play_referenced) {
160                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
162                         if (dc) {
163                                 if (dc->reference_video()) {
164                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
165                                 }
166                                 if (dc->reference_audio()) {
167                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
168                                 }
169                         }
170                 }
171         }
172
173         _have_valid_pieces = true;
174 }
175
176 void
177 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
178 {
179         shared_ptr<Content> c = w.lock ();
180         if (!c) {
181                 return;
182         }
183
184         if (
185                 property == ContentProperty::POSITION ||
186                 property == ContentProperty::LENGTH ||
187                 property == ContentProperty::TRIM_START ||
188                 property == ContentProperty::TRIM_END ||
189                 property == ContentProperty::PATH ||
190                 property == VideoContentProperty::FRAME_TYPE ||
191                 property == DCPContentProperty::NEEDS_ASSETS ||
192                 property == DCPContentProperty::NEEDS_KDM ||
193                 property == SubtitleContentProperty::COLOUR ||
194                 property == SubtitleContentProperty::OUTLINE ||
195                 property == SubtitleContentProperty::SHADOW ||
196                 property == SubtitleContentProperty::EFFECT_COLOUR ||
197                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
198                 property == VideoContentProperty::COLOUR_CONVERSION
199                 ) {
200
201                 _have_valid_pieces = false;
202                 Changed (frequent);
203
204         } else if (
205                 property == SubtitleContentProperty::LINE_SPACING ||
206                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
207                 property == SubtitleContentProperty::Y_SCALE ||
208                 property == SubtitleContentProperty::FADE_IN ||
209                 property == SubtitleContentProperty::FADE_OUT ||
210                 property == ContentProperty::VIDEO_FRAME_RATE ||
211                 property == SubtitleContentProperty::USE ||
212                 property == SubtitleContentProperty::X_OFFSET ||
213                 property == SubtitleContentProperty::Y_OFFSET ||
214                 property == SubtitleContentProperty::X_SCALE ||
215                 property == SubtitleContentProperty::FONTS ||
216                 property == VideoContentProperty::CROP ||
217                 property == VideoContentProperty::SCALE ||
218                 property == VideoContentProperty::FADE_IN ||
219                 property == VideoContentProperty::FADE_OUT
220                 ) {
221
222                 Changed (frequent);
223         }
224 }
225
226 void
227 Player::set_video_container_size (dcp::Size s)
228 {
229         _video_container_size = s;
230
231         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
232         _black_image->make_black ();
233 }
234
235 void
236 Player::playlist_changed ()
237 {
238         _have_valid_pieces = false;
239         Changed (false);
240 }
241
242 void
243 Player::film_changed (Film::Property p)
244 {
245         /* Here we should notice Film properties that affect our output, and
246            alert listeners that our output now would be different to how it was
247            last time we were run.
248         */
249
250         if (p == Film::CONTAINER) {
251                 Changed (false);
252         } else if (p == Film::VIDEO_FRAME_RATE) {
253                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
254                    so we need new pieces here.
255                 */
256                 _have_valid_pieces = false;
257                 Changed (false);
258         } else if (p == Film::AUDIO_PROCESSOR) {
259                 if (_film->audio_processor ()) {
260                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
261                 }
262         }
263 }
264
265 list<PositionImage>
266 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
267 {
268         list<PositionImage> all;
269
270         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
271                 if (!i->image) {
272                         continue;
273                 }
274
275                 /* We will scale the subtitle up to fit _video_container_size */
276                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
277
278                 /* Then we need a corrective translation, consisting of two parts:
279                  *
280                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
281                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
282                  *
283                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
284                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
285                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
286                  *
287                  * Combining these two translations gives these expressions.
288                  */
289
290                 all.push_back (
291                         PositionImage (
292                                 i->image->scale (
293                                         scaled_size,
294                                         dcp::YUV_TO_RGB_REC601,
295                                         i->image->pixel_format (),
296                                         true,
297                                         _fast
298                                         ),
299                                 Position<int> (
300                                         lrint (_video_container_size.width * i->rectangle.x),
301                                         lrint (_video_container_size.height * i->rectangle.y)
302                                         )
303                                 )
304                         );
305         }
306
307         return all;
308 }
309
310 shared_ptr<PlayerVideo>
311 Player::black_player_video_frame () const
312 {
313         return shared_ptr<PlayerVideo> (
314                 new PlayerVideo (
315                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
316                         Crop (),
317                         optional<double> (),
318                         _video_container_size,
319                         _video_container_size,
320                         EYES_BOTH,
321                         PART_WHOLE,
322                         PresetColourConversion::all().front().conversion
323                 )
324         );
325 }
326
327 Frame
328 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
329 {
330         DCPTime s = t - piece->content->position ();
331         s = min (piece->content->length_after_trim(), s);
332         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
333
334         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
335            then convert that ContentTime to frames at the content's rate.  However this fails for
336            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
337            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
338
339            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
340         */
341         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
342 }
343
344 DCPTime
345 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
346 {
347         /* See comment in dcp_to_content_video */
348         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
349         return max (DCPTime (), d + piece->content->position ());
350 }
351
352 Frame
353 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
354 {
355         DCPTime s = t - piece->content->position ();
356         s = min (piece->content->length_after_trim(), s);
357         /* See notes in dcp_to_content_video */
358         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
359 }
360
361 DCPTime
362 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
363 {
364         /* See comment in dcp_to_content_video */
365         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start (), piece->frc);
366         return max (DCPTime (), d + piece->content->position ());
367 }
368
369 ContentTime
370 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
371 {
372         DCPTime s = t - piece->content->position ();
373         s = min (piece->content->length_after_trim(), s);
374         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
375 }
376
377 DCPTime
378 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
379 {
380         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
381 }
382
383 list<shared_ptr<Font> >
384 Player::get_subtitle_fonts ()
385 {
386         if (!_have_valid_pieces) {
387                 setup_pieces ();
388         }
389
390         list<shared_ptr<Font> > fonts;
391         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
392                 if (p->content->subtitle) {
393                         /* XXX: things may go wrong if there are duplicate font IDs
394                            with different font files.
395                         */
396                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
397                         copy (f.begin(), f.end(), back_inserter (fonts));
398                 }
399         }
400
401         return fonts;
402 }
403
404 /** Set this player never to produce any video data */
405 void
406 Player::set_ignore_video ()
407 {
408         _ignore_video = true;
409 }
410
411 /** Set this player never to produce any audio data */
412 void
413 Player::set_ignore_audio ()
414 {
415         _ignore_audio = true;
416 }
417
418 /** Set whether or not this player should always burn text subtitles into the image,
419  *  regardless of the content settings.
420  *  @param burn true to always burn subtitles, false to obey content settings.
421  */
422 void
423 Player::set_always_burn_subtitles (bool burn)
424 {
425         _always_burn_subtitles = burn;
426 }
427
428 void
429 Player::set_fast ()
430 {
431         _fast = true;
432         _have_valid_pieces = false;
433 }
434
435 void
436 Player::set_play_referenced ()
437 {
438         _play_referenced = true;
439         _have_valid_pieces = false;
440 }
441
442 list<ReferencedReelAsset>
443 Player::get_reel_assets ()
444 {
445         list<ReferencedReelAsset> a;
446
447         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
448                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
449                 if (!j) {
450                         continue;
451                 }
452
453                 scoped_ptr<DCPDecoder> decoder;
454                 try {
455                         decoder.reset (new DCPDecoder (j, _film->log()));
456                 } catch (...) {
457                         return a;
458                 }
459
460                 int64_t offset = 0;
461                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
462
463                         DCPOMATIC_ASSERT (j->video_frame_rate ());
464                         double const cfr = j->video_frame_rate().get();
465                         Frame const trim_start = j->trim_start().frames_round (cfr);
466                         Frame const trim_end = j->trim_end().frames_round (cfr);
467                         int const ffr = _film->video_frame_rate ();
468
469                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
470                         if (j->reference_video ()) {
471                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
472                                 DCPOMATIC_ASSERT (ra);
473                                 ra->set_entry_point (ra->entry_point() + trim_start);
474                                 ra->set_duration (ra->duration() - trim_start - trim_end);
475                                 a.push_back (
476                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
477                                         );
478                         }
479
480                         if (j->reference_audio ()) {
481                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
482                                 DCPOMATIC_ASSERT (ra);
483                                 ra->set_entry_point (ra->entry_point() + trim_start);
484                                 ra->set_duration (ra->duration() - trim_start - trim_end);
485                                 a.push_back (
486                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
487                                         );
488                         }
489
490                         if (j->reference_subtitle ()) {
491                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
492                                 DCPOMATIC_ASSERT (ra);
493                                 ra->set_entry_point (ra->entry_point() + trim_start);
494                                 ra->set_duration (ra->duration() - trim_start - trim_end);
495                                 a.push_back (
496                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
497                                         );
498                         }
499
500                         /* Assume that main picture duration is the length of the reel */
501                         offset += k->main_picture()->duration ();
502                 }
503         }
504
505         return a;
506 }
507
508 list<shared_ptr<Piece> >
509 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
510 {
511         if (!_have_valid_pieces) {
512                 setup_pieces ();
513         }
514
515         list<shared_ptr<Piece> > overlaps;
516         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
517                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
518                         overlaps.push_back (i);
519                 }
520         }
521
522         return overlaps;
523 }
524
525 bool
526 Player::pass ()
527 {
528         if (!_have_valid_pieces) {
529                 setup_pieces ();
530         }
531
532         shared_ptr<Piece> earliest;
533         DCPTime earliest_content;
534
535         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
536                 if (!i->done) {
537                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
538                         if (!earliest || t < earliest_content) {
539                                 earliest_content = t;
540                                 earliest = i;
541                         }
542                 }
543         }
544
545         if (!earliest) {
546                 /* No more content; fill up with silent black */
547                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
548                 if (_last_time) {
549                         remaining_video.from = _last_time.get() + one_video_frame();
550                 }
551                 fill_video (remaining_video);
552                 fill_audio (DCPTimePeriod (_last_audio_time, _playlist->length()));
553                 return true;
554         }
555
556         earliest->done = earliest->decoder->pass ();
557
558         /* Emit any audio that is ready */
559
560         DCPTime pull_from = _playlist->length ();
561         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
562                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
563                         pull_from = i->second.last_push_end;
564                 }
565         }
566
567         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
568         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
569                 DCPOMATIC_ASSERT (i->second >= _last_audio_time);
570                 fill_audio (DCPTimePeriod (_last_audio_time, i->second));
571                 Audio (i->first, i->second);
572                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
573         }
574
575         return false;
576 }
577
578 void
579 Player::video (weak_ptr<Piece> wp, ContentVideo video)
580 {
581         shared_ptr<Piece> piece = wp.lock ();
582         if (!piece) {
583                 return;
584         }
585
586         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
587         if (frc.skip && (video.frame % 2) == 1) {
588                 return;
589         }
590
591         /* Time and period of the frame we will emit */
592         DCPTime const time = content_video_to_dcp (piece, video.frame);
593         DCPTimePeriod const period (time, time + one_video_frame());
594
595         /* Discard if it's outside the content's period */
596         if (time < piece->content->position() || time >= piece->content->end()) {
597                 return;
598         }
599
600         /* Get any subtitles */
601
602         optional<PositionImage> subtitles;
603
604         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
605
606                 if (!i->second.overlap (period)) {
607                         continue;
608                 }
609
610                 list<PositionImage> sub_images;
611
612                 /* Image subtitles */
613                 list<PositionImage> c = transform_image_subtitles (i->first.image);
614                 copy (c.begin(), c.end(), back_inserter (sub_images));
615
616                 /* Text subtitles (rendered to an image) */
617                 if (!i->first.text.empty ()) {
618                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
619                         copy (s.begin (), s.end (), back_inserter (sub_images));
620                 }
621
622                 if (!sub_images.empty ()) {
623                         subtitles = merge (sub_images);
624                 }
625         }
626
627         /* Fill gaps */
628
629         if (_last_time) {
630                 fill_video (DCPTimePeriod (_last_time.get() + one_video_frame(), time));
631         }
632
633         _last_video.reset (
634                 new PlayerVideo (
635                         video.image,
636                         piece->content->video->crop (),
637                         piece->content->video->fade (video.frame),
638                         piece->content->video->scale().size (
639                                 piece->content->video, _video_container_size, _film->frame_size ()
640                                 ),
641                         _video_container_size,
642                         video.eyes,
643                         video.part,
644                         piece->content->video->colour_conversion ()
645                         )
646                 );
647
648         if (subtitles) {
649                 _last_video->set_subtitle (subtitles.get ());
650         }
651
652         _last_time = time;
653
654         Video (_last_video, *_last_time);
655
656         /* Discard any subtitles we no longer need */
657
658         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
659                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
660                 ++tmp;
661
662                 if (i->second.to < time) {
663                         _subtitles.erase (i);
664                 }
665
666                 i = tmp;
667         }
668 }
669
670 void
671 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
672 {
673         shared_ptr<Piece> piece = wp.lock ();
674         if (!piece) {
675                 return;
676         }
677
678         shared_ptr<AudioContent> content = piece->content->audio;
679         DCPOMATIC_ASSERT (content);
680
681         /* Gain */
682         if (content->gain() != 0) {
683                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
684                 gain->apply_gain (content->gain ());
685                 content_audio.audio = gain;
686         }
687
688         /* Resample */
689         if (stream->frame_rate() != content->resampled_frame_rate()) {
690                 shared_ptr<Resampler> r = resampler (content, stream, true);
691                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
692                 content_audio.audio = ro.first;
693                 content_audio.frame = ro.second;
694         }
695
696         /* XXX: end-trimming used to be checked here */
697
698         /* Compute time in the DCP */
699         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
700
701         /* Remove anything that comes before the start of the content */
702         if (time < piece->content->position()) {
703                 DCPTime const discard_time = piece->content->position() - time;
704                 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
705                 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
706                 if (remaining_frames <= 0) {
707                         /* This audio is entirely discarded */
708                         return;
709                 }
710                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
711                 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
712                 content_audio.audio = cut;
713                 time += discard_time;
714         }
715
716         /* Remap channels */
717         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
718         dcp_mapped->make_silent ();
719
720         AudioMapping map = stream->mapping ();
721         for (int i = 0; i < map.input_channels(); ++i) {
722                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
723                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
724                                 dcp_mapped->accumulate_channel (
725                                         content_audio.audio.get(),
726                                         i,
727                                         static_cast<dcp::Channel> (j),
728                                         map.get (i, static_cast<dcp::Channel> (j))
729                                         );
730                         }
731                 }
732         }
733
734         content_audio.audio = dcp_mapped;
735
736         if (_audio_processor) {
737                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
738         }
739
740         _audio_merger.push (content_audio.audio, time);
741
742         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
743         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
744 }
745
746 void
747 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
748 {
749         shared_ptr<Piece> piece = wp.lock ();
750         if (!piece) {
751                 return;
752         }
753
754         /* Apply content's subtitle offsets */
755         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
756         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
757
758         /* Apply content's subtitle scale */
759         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
760         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
761
762         /* Apply a corrective translation to keep the subtitle centred after that scale */
763         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
764         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
765
766         PlayerSubtitles ps;
767         ps.image.push_back (subtitle.sub);
768         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
769
770         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
771                 _subtitles.push_back (make_pair (ps, period));
772         } else {
773                 Subtitle (ps, period);
774         }
775 }
776
777 void
778 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
779 {
780         shared_ptr<Piece> piece = wp.lock ();
781         if (!piece) {
782                 return;
783         }
784
785         PlayerSubtitles ps;
786         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
787
788         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
789                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
790                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
791                 float const xs = piece->content->subtitle->x_scale();
792                 float const ys = piece->content->subtitle->y_scale();
793                 float size = s.size();
794
795                 /* Adjust size to express the common part of the scaling;
796                    e.g. if xs = ys = 0.5 we scale size by 2.
797                 */
798                 if (xs > 1e-5 && ys > 1e-5) {
799                         size *= 1 / min (1 / xs, 1 / ys);
800                 }
801                 s.set_size (size);
802
803                 /* Then express aspect ratio changes */
804                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
805                         s.set_aspect_adjust (xs / ys);
806                 }
807
808                 s.set_in (dcp::Time(period.from.seconds(), 1000));
809                 s.set_out (dcp::Time(period.to.seconds(), 1000));
810                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
811                 ps.add_fonts (piece->content->subtitle->fonts ());
812         }
813
814         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
815                 _subtitles.push_back (make_pair (ps, period));
816         } else {
817                 Subtitle (ps, period);
818         }
819 }
820
821 void
822 Player::seek (DCPTime time, bool accurate)
823 {
824         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
825                 if (i->content->position() <= time && time < i->content->end()) {
826                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
827                         i->done = false;
828                 }
829         }
830
831         if (accurate) {
832                 _last_time = time - one_video_frame ();
833         } else {
834                 _last_time = optional<DCPTime> ();
835         }
836 }
837
838 shared_ptr<Resampler>
839 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
840 {
841         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
842         if (i != _resamplers.end ()) {
843                 return i->second;
844         }
845
846         if (!create) {
847                 return shared_ptr<Resampler> ();
848         }
849
850         LOG_GENERAL (
851                 "Creating new resampler from %1 to %2 with %3 channels",
852                 stream->frame_rate(),
853                 content->resampled_frame_rate(),
854                 stream->channels()
855                 );
856
857         shared_ptr<Resampler> r (
858                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
859                 );
860
861         _resamplers[make_pair(content, stream)] = r;
862         return r;
863 }
864
865 void
866 Player::fill_video (DCPTimePeriod period)
867 {
868         /* XXX: this may not work for 3D */
869         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
870                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
871                         if (_playlist->video_content_at(j) && _last_video) {
872                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
873                         } else {
874                                 Video (black_player_video_frame(), j);
875                         }
876                 }
877         }
878 }
879
880 void
881 Player::fill_audio (DCPTimePeriod period)
882 {
883         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
884                 DCPTime t = i.from;
885                 while (t < i.to) {
886                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
887                         Frame const samples = block.frames_round(_film->audio_frame_rate());
888                         if (samples) {
889                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
890                                 silence->make_silent ();
891                                 Audio (silence, t);
892                         }
893                         t += block;
894                 }
895         }
896 }
897
898 DCPTime
899 Player::one_video_frame () const
900 {
901         return DCPTime::from_frames (1, _film->video_frame_rate ());
902 }