Seemingly basically working butler for video.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageData.connect (bind (&Player::image_subtitle, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextData.connect (bind (&Player::text_subtitle, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         if (!_play_referenced) {
160                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
161                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
162                         if (dc) {
163                                 if (dc->reference_video()) {
164                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
165                                 }
166                                 if (dc->reference_audio()) {
167                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
168                                 }
169                         }
170                 }
171         }
172
173         _have_valid_pieces = true;
174 }
175
176 void
177 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
178 {
179         shared_ptr<Content> c = w.lock ();
180         if (!c) {
181                 return;
182         }
183
184         if (
185                 property == ContentProperty::POSITION ||
186                 property == ContentProperty::LENGTH ||
187                 property == ContentProperty::TRIM_START ||
188                 property == ContentProperty::TRIM_END ||
189                 property == ContentProperty::PATH ||
190                 property == VideoContentProperty::FRAME_TYPE ||
191                 property == DCPContentProperty::NEEDS_ASSETS ||
192                 property == DCPContentProperty::NEEDS_KDM ||
193                 property == SubtitleContentProperty::COLOUR ||
194                 property == SubtitleContentProperty::OUTLINE ||
195                 property == SubtitleContentProperty::SHADOW ||
196                 property == SubtitleContentProperty::EFFECT_COLOUR ||
197                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
198                 property == VideoContentProperty::COLOUR_CONVERSION
199                 ) {
200
201                 _have_valid_pieces = false;
202                 Changed (frequent);
203
204         } else if (
205                 property == SubtitleContentProperty::LINE_SPACING ||
206                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
207                 property == SubtitleContentProperty::Y_SCALE ||
208                 property == SubtitleContentProperty::FADE_IN ||
209                 property == SubtitleContentProperty::FADE_OUT ||
210                 property == ContentProperty::VIDEO_FRAME_RATE ||
211                 property == SubtitleContentProperty::USE ||
212                 property == SubtitleContentProperty::X_OFFSET ||
213                 property == SubtitleContentProperty::Y_OFFSET ||
214                 property == SubtitleContentProperty::X_SCALE ||
215                 property == SubtitleContentProperty::FONTS ||
216                 property == VideoContentProperty::CROP ||
217                 property == VideoContentProperty::SCALE ||
218                 property == VideoContentProperty::FADE_IN ||
219                 property == VideoContentProperty::FADE_OUT
220                 ) {
221
222                 Changed (frequent);
223         }
224 }
225
226 void
227 Player::set_video_container_size (dcp::Size s)
228 {
229         if (s == _video_container_size) {
230                 return;
231         }
232
233         _video_container_size = s;
234
235         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
236         _black_image->make_black ();
237
238         Changed (false);
239 }
240
241 void
242 Player::playlist_changed ()
243 {
244         _have_valid_pieces = false;
245         Changed (false);
246 }
247
248 void
249 Player::film_changed (Film::Property p)
250 {
251         /* Here we should notice Film properties that affect our output, and
252            alert listeners that our output now would be different to how it was
253            last time we were run.
254         */
255
256         if (p == Film::CONTAINER) {
257                 Changed (false);
258         } else if (p == Film::VIDEO_FRAME_RATE) {
259                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
260                    so we need new pieces here.
261                 */
262                 _have_valid_pieces = false;
263                 Changed (false);
264         } else if (p == Film::AUDIO_PROCESSOR) {
265                 if (_film->audio_processor ()) {
266                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
267                 }
268         }
269 }
270
271 list<PositionImage>
272 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
273 {
274         list<PositionImage> all;
275
276         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
277                 if (!i->image) {
278                         continue;
279                 }
280
281                 /* We will scale the subtitle up to fit _video_container_size */
282                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
283
284                 /* Then we need a corrective translation, consisting of two parts:
285                  *
286                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
287                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
288                  *
289                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
290                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
291                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
292                  *
293                  * Combining these two translations gives these expressions.
294                  */
295
296                 all.push_back (
297                         PositionImage (
298                                 i->image->scale (
299                                         scaled_size,
300                                         dcp::YUV_TO_RGB_REC601,
301                                         i->image->pixel_format (),
302                                         true,
303                                         _fast
304                                         ),
305                                 Position<int> (
306                                         lrint (_video_container_size.width * i->rectangle.x),
307                                         lrint (_video_container_size.height * i->rectangle.y)
308                                         )
309                                 )
310                         );
311         }
312
313         return all;
314 }
315
316 shared_ptr<PlayerVideo>
317 Player::black_player_video_frame () const
318 {
319         return shared_ptr<PlayerVideo> (
320                 new PlayerVideo (
321                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
322                         Crop (),
323                         optional<double> (),
324                         _video_container_size,
325                         _video_container_size,
326                         EYES_BOTH,
327                         PART_WHOLE,
328                         PresetColourConversion::all().front().conversion
329                 )
330         );
331 }
332
333 Frame
334 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
335 {
336         DCPTime s = t - piece->content->position ();
337         s = min (piece->content->length_after_trim(), s);
338         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
339
340         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
341            then convert that ContentTime to frames at the content's rate.  However this fails for
342            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
343            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
344
345            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
346         */
347         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
348 }
349
350 DCPTime
351 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
352 {
353         /* See comment in dcp_to_content_video */
354         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
355         return max (DCPTime (), d + piece->content->position ());
356 }
357
358 Frame
359 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
360 {
361         DCPTime s = t - piece->content->position ();
362         s = min (piece->content->length_after_trim(), s);
363         /* See notes in dcp_to_content_video */
364         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
365 }
366
367 DCPTime
368 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
369 {
370         /* See comment in dcp_to_content_video */
371         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
372         return max (DCPTime (), d + piece->content->position ());
373 }
374
375 ContentTime
376 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
377 {
378         DCPTime s = t - piece->content->position ();
379         s = min (piece->content->length_after_trim(), s);
380         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
381 }
382
383 DCPTime
384 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
385 {
386         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
387 }
388
389 list<shared_ptr<Font> >
390 Player::get_subtitle_fonts ()
391 {
392         if (!_have_valid_pieces) {
393                 setup_pieces ();
394         }
395
396         list<shared_ptr<Font> > fonts;
397         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
398                 if (p->content->subtitle) {
399                         /* XXX: things may go wrong if there are duplicate font IDs
400                            with different font files.
401                         */
402                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
403                         copy (f.begin(), f.end(), back_inserter (fonts));
404                 }
405         }
406
407         return fonts;
408 }
409
410 /** Set this player never to produce any video data */
411 void
412 Player::set_ignore_video ()
413 {
414         _ignore_video = true;
415 }
416
417 /** Set this player never to produce any audio data */
418 void
419 Player::set_ignore_audio ()
420 {
421         _ignore_audio = true;
422 }
423
424 /** Set whether or not this player should always burn text subtitles into the image,
425  *  regardless of the content settings.
426  *  @param burn true to always burn subtitles, false to obey content settings.
427  */
428 void
429 Player::set_always_burn_subtitles (bool burn)
430 {
431         _always_burn_subtitles = burn;
432 }
433
434 void
435 Player::set_fast ()
436 {
437         _fast = true;
438         _have_valid_pieces = false;
439 }
440
441 void
442 Player::set_play_referenced ()
443 {
444         _play_referenced = true;
445         _have_valid_pieces = false;
446 }
447
448 list<ReferencedReelAsset>
449 Player::get_reel_assets ()
450 {
451         list<ReferencedReelAsset> a;
452
453         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
454                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
455                 if (!j) {
456                         continue;
457                 }
458
459                 scoped_ptr<DCPDecoder> decoder;
460                 try {
461                         decoder.reset (new DCPDecoder (j, _film->log()));
462                 } catch (...) {
463                         return a;
464                 }
465
466                 int64_t offset = 0;
467                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
468
469                         DCPOMATIC_ASSERT (j->video_frame_rate ());
470                         double const cfr = j->video_frame_rate().get();
471                         Frame const trim_start = j->trim_start().frames_round (cfr);
472                         Frame const trim_end = j->trim_end().frames_round (cfr);
473                         int const ffr = _film->video_frame_rate ();
474
475                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
476                         if (j->reference_video ()) {
477                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
478                                 DCPOMATIC_ASSERT (ra);
479                                 ra->set_entry_point (ra->entry_point() + trim_start);
480                                 ra->set_duration (ra->duration() - trim_start - trim_end);
481                                 a.push_back (
482                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
483                                         );
484                         }
485
486                         if (j->reference_audio ()) {
487                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
488                                 DCPOMATIC_ASSERT (ra);
489                                 ra->set_entry_point (ra->entry_point() + trim_start);
490                                 ra->set_duration (ra->duration() - trim_start - trim_end);
491                                 a.push_back (
492                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
493                                         );
494                         }
495
496                         if (j->reference_subtitle ()) {
497                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
498                                 DCPOMATIC_ASSERT (ra);
499                                 ra->set_entry_point (ra->entry_point() + trim_start);
500                                 ra->set_duration (ra->duration() - trim_start - trim_end);
501                                 a.push_back (
502                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
503                                         );
504                         }
505
506                         /* Assume that main picture duration is the length of the reel */
507                         offset += k->main_picture()->duration ();
508                 }
509         }
510
511         return a;
512 }
513
514 list<shared_ptr<Piece> >
515 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
516 {
517         if (!_have_valid_pieces) {
518                 setup_pieces ();
519         }
520
521         list<shared_ptr<Piece> > overlaps;
522         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
523                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
524                         overlaps.push_back (i);
525                 }
526         }
527
528         return overlaps;
529 }
530
531 bool
532 Player::pass ()
533 {
534         if (!_have_valid_pieces) {
535                 setup_pieces ();
536         }
537
538         shared_ptr<Piece> earliest;
539         DCPTime earliest_content;
540
541         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
542                 if (!i->done) {
543                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
544                         if (!earliest || t < earliest_content) {
545                                 earliest_content = t;
546                                 earliest = i;
547                         }
548                 }
549         }
550
551         if (!earliest) {
552                 /* No more content; fill up with silent black */
553                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
554                 if (_last_time) {
555                         remaining_video.from = _last_time.get() + one_video_frame();
556                 }
557                 fill_video (remaining_video);
558                 fill_audio (DCPTimePeriod (_last_audio_time, _playlist->length()));
559                 return true;
560         }
561
562         earliest->done = earliest->decoder->pass ();
563         if (earliest->done && earliest->content->audio) {
564                 /* Flush the Player audio system for this piece */
565                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
566                         audio_flush (earliest, i);
567                 }
568         }
569
570         /* Emit any audio that is ready */
571
572         DCPTime pull_from = _playlist->length ();
573         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
574                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
575                         pull_from = i->second.last_push_end;
576                 }
577         }
578
579         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
580         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
581                 DCPOMATIC_ASSERT (i->second >= _last_audio_time);
582                 fill_audio (DCPTimePeriod (_last_audio_time, i->second));
583                 Audio (i->first, i->second);
584                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
585         }
586
587         return false;
588 }
589
590 void
591 Player::video (weak_ptr<Piece> wp, ContentVideo video)
592 {
593         shared_ptr<Piece> piece = wp.lock ();
594         if (!piece) {
595                 return;
596         }
597
598         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
599         if (frc.skip && (video.frame % 2) == 1) {
600                 return;
601         }
602
603         /* Time and period of the frame we will emit */
604         DCPTime const time = content_video_to_dcp (piece, video.frame);
605         DCPTimePeriod const period (time, time + one_video_frame());
606
607         /* Discard if it's outside the content's period */
608         if (time < piece->content->position() || time >= piece->content->end()) {
609                 return;
610         }
611
612         /* Get any subtitles */
613
614         optional<PositionImage> subtitles;
615
616         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
617
618                 if (!i->second.overlap (period)) {
619                         continue;
620                 }
621
622                 list<PositionImage> sub_images;
623
624                 /* Image subtitles */
625                 list<PositionImage> c = transform_image_subtitles (i->first.image);
626                 copy (c.begin(), c.end(), back_inserter (sub_images));
627
628                 /* Text subtitles (rendered to an image) */
629                 if (!i->first.text.empty ()) {
630                         list<PositionImage> s = render_subtitles (i->first.text, i->first.fonts, _video_container_size, time);
631                         copy (s.begin (), s.end (), back_inserter (sub_images));
632                 }
633
634                 if (!sub_images.empty ()) {
635                         subtitles = merge (sub_images);
636                 }
637         }
638
639         /* Fill gaps */
640
641         if (_last_time) {
642                 fill_video (DCPTimePeriod (_last_time.get() + one_video_frame(), time));
643         }
644
645         _last_video.reset (
646                 new PlayerVideo (
647                         video.image,
648                         piece->content->video->crop (),
649                         piece->content->video->fade (video.frame),
650                         piece->content->video->scale().size (
651                                 piece->content->video, _video_container_size, _film->frame_size ()
652                                 ),
653                         _video_container_size,
654                         video.eyes,
655                         video.part,
656                         piece->content->video->colour_conversion ()
657                         )
658                 );
659
660         if (subtitles) {
661                 _last_video->set_subtitle (subtitles.get ());
662         }
663
664         _last_time = time;
665
666         Video (_last_video, *_last_time);
667
668         /* Discard any subtitles we no longer need */
669
670         for (list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator i = _subtitles.begin (); i != _subtitles.end(); ) {
671                 list<pair<PlayerSubtitles, DCPTimePeriod> >::iterator tmp = i;
672                 ++tmp;
673
674                 if (i->second.to < time) {
675                         _subtitles.erase (i);
676                 }
677
678                 i = tmp;
679         }
680 }
681
682 void
683 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
684 {
685         shared_ptr<AudioContent> content = piece->content->audio;
686         DCPOMATIC_ASSERT (content);
687
688         shared_ptr<Resampler> r = resampler (content, stream, false);
689         if (!r) {
690                 return;
691         }
692
693         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
694         ContentAudio content_audio;
695         content_audio.audio = ro.first;
696         content_audio.frame = ro.second;
697
698         /* Compute time in the DCP */
699         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
700
701         audio_transform (content, stream, content_audio, time);
702 }
703
704 /** Do our common processing on some audio */
705 void
706 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
707 {
708         /* Gain */
709
710         if (content->gain() != 0) {
711                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
712                 gain->apply_gain (content->gain ());
713                 content_audio.audio = gain;
714         }
715
716         /* Remap */
717
718         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
719         dcp_mapped->make_silent ();
720
721         AudioMapping map = stream->mapping ();
722         for (int i = 0; i < map.input_channels(); ++i) {
723                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
724                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
725                                 dcp_mapped->accumulate_channel (
726                                         content_audio.audio.get(),
727                                         i,
728                                         static_cast<dcp::Channel> (j),
729                                         map.get (i, static_cast<dcp::Channel> (j))
730                                         );
731                         }
732                 }
733         }
734
735         content_audio.audio = dcp_mapped;
736
737         /* Process */
738
739         if (_audio_processor) {
740                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
741         }
742
743         /* Push */
744
745         _audio_merger.push (content_audio.audio, time);
746         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
747         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
748 }
749
750 void
751 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
752 {
753         shared_ptr<Piece> piece = wp.lock ();
754         if (!piece) {
755                 return;
756         }
757
758         shared_ptr<AudioContent> content = piece->content->audio;
759         DCPOMATIC_ASSERT (content);
760
761         /* Resample */
762         if (stream->frame_rate() != content->resampled_frame_rate()) {
763                 shared_ptr<Resampler> r = resampler (content, stream, true);
764                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
765                 content_audio.audio = ro.first;
766                 content_audio.frame = ro.second;
767         }
768
769         /* Compute time in the DCP */
770         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
771         /* And the end of this block in the DCP */
772         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
773
774         /* Remove anything that comes before the start or after the end of the content */
775         if (time < piece->content->position()) {
776                 DCPTime const discard_time = piece->content->position() - time;
777                 Frame discard_frames = discard_time.frames_round(_film->audio_frame_rate());
778                 Frame remaining_frames = content_audio.audio->frames() - discard_frames;
779                 if (remaining_frames <= 0) {
780                         /* This audio is entirely discarded */
781                         return;
782                 }
783                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
784                 cut->copy_from (content_audio.audio.get(), remaining_frames, discard_frames, 0);
785                 content_audio.audio = cut;
786                 time += discard_time;
787         } else if (time > piece->content->end()) {
788                 /* Discard it all */
789                 return;
790         } else if (end > piece->content->end()) {
791                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
792                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
793                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
794                 content_audio.audio = cut;
795         }
796
797         audio_transform (content, stream, content_audio, time);
798 }
799
800 void
801 Player::image_subtitle (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
802 {
803         shared_ptr<Piece> piece = wp.lock ();
804         if (!piece) {
805                 return;
806         }
807
808         /* Apply content's subtitle offsets */
809         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
810         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
811
812         /* Apply content's subtitle scale */
813         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
814         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
815
816         /* Apply a corrective translation to keep the subtitle centred after that scale */
817         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
818         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
819
820         PlayerSubtitles ps;
821         ps.image.push_back (subtitle.sub);
822         DCPTimePeriod period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
823
824         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
825                 _subtitles.push_back (make_pair (ps, period));
826         } else {
827                 Subtitle (ps, period);
828         }
829 }
830
831 void
832 Player::text_subtitle (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
833 {
834         shared_ptr<Piece> piece = wp.lock ();
835         if (!piece) {
836                 return;
837         }
838
839         PlayerSubtitles ps;
840         DCPTimePeriod const period (content_time_to_dcp (piece, subtitle.period().from), content_time_to_dcp (piece, subtitle.period().to));
841
842         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
843                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
844                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
845                 float const xs = piece->content->subtitle->x_scale();
846                 float const ys = piece->content->subtitle->y_scale();
847                 float size = s.size();
848
849                 /* Adjust size to express the common part of the scaling;
850                    e.g. if xs = ys = 0.5 we scale size by 2.
851                 */
852                 if (xs > 1e-5 && ys > 1e-5) {
853                         size *= 1 / min (1 / xs, 1 / ys);
854                 }
855                 s.set_size (size);
856
857                 /* Then express aspect ratio changes */
858                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
859                         s.set_aspect_adjust (xs / ys);
860                 }
861
862                 s.set_in (dcp::Time(period.from.seconds(), 1000));
863                 s.set_out (dcp::Time(period.to.seconds(), 1000));
864                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
865                 ps.add_fonts (piece->content->subtitle->fonts ());
866         }
867
868         if (piece->content->subtitle->use() && (piece->content->subtitle->burn() || _always_burn_subtitles)) {
869                 _subtitles.push_back (make_pair (ps, period));
870         } else {
871                 Subtitle (ps, period);
872         }
873 }
874
875 void
876 Player::seek (DCPTime time, bool accurate)
877 {
878         if (_audio_processor) {
879                 _audio_processor->flush ();
880         }
881
882         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
883                 if (i->content->position() <= time && time < i->content->end()) {
884                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
885                         i->done = false;
886                 }
887         }
888
889         if (accurate) {
890                 _last_time = time - one_video_frame ();
891         } else {
892                 _last_time = optional<DCPTime> ();
893         }
894 }
895
896 shared_ptr<Resampler>
897 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
898 {
899         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
900         if (i != _resamplers.end ()) {
901                 return i->second;
902         }
903
904         if (!create) {
905                 return shared_ptr<Resampler> ();
906         }
907
908         LOG_GENERAL (
909                 "Creating new resampler from %1 to %2 with %3 channels",
910                 stream->frame_rate(),
911                 content->resampled_frame_rate(),
912                 stream->channels()
913                 );
914
915         shared_ptr<Resampler> r (
916                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
917                 );
918
919         _resamplers[make_pair(content, stream)] = r;
920         return r;
921 }
922
923 void
924 Player::fill_video (DCPTimePeriod period)
925 {
926         /* XXX: this may not work for 3D */
927         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
928                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
929                         if (_playlist->video_content_at(j) && _last_video) {
930                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
931                         } else {
932                                 Video (black_player_video_frame(), j);
933                         }
934                 }
935         }
936 }
937
938 void
939 Player::fill_audio (DCPTimePeriod period)
940 {
941         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
942                 DCPTime t = i.from;
943                 while (t < i.to) {
944                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
945                         Frame const samples = block.frames_round(_film->audio_frame_rate());
946                         if (samples) {
947                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
948                                 silence->make_silent ();
949                                 Audio (silence, t);
950                         }
951                         t += block;
952                 }
953         }
954 }
955
956 DCPTime
957 Player::one_video_frame () const
958 {
959         return DCPTime::from_frames (1, _film->video_frame_rate ());
960 }