Initial work on removing storage of subtitle times.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "resampler.h"
51 #include "compose.hpp"
52 #include <dcp/reel.h>
53 #include <dcp/reel_sound_asset.h>
54 #include <dcp/reel_subtitle_asset.h>
55 #include <dcp/reel_picture_asset.h>
56 #include <boost/foreach.hpp>
57 #include <stdint.h>
58 #include <algorithm>
59 #include <iostream>
60
61 #include "i18n.h"
62
63 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
64
65 using std::list;
66 using std::cout;
67 using std::min;
68 using std::max;
69 using std::min;
70 using std::vector;
71 using std::pair;
72 using std::map;
73 using std::make_pair;
74 using std::copy;
75 using boost::shared_ptr;
76 using boost::weak_ptr;
77 using boost::dynamic_pointer_cast;
78 using boost::optional;
79 using boost::scoped_ptr;
80
81 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
82         : _film (film)
83         , _playlist (playlist)
84         , _have_valid_pieces (false)
85         , _ignore_video (false)
86         , _ignore_audio (false)
87         , _always_burn_subtitles (false)
88         , _fast (false)
89         , _play_referenced (false)
90         , _audio_merger (_film->audio_frame_rate())
91 {
92         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
93         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
94         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
95         set_video_container_size (_film->frame_size ());
96
97         film_changed (Film::AUDIO_PROCESSOR);
98
99         seek (DCPTime (), true);
100 }
101
102 void
103 Player::setup_pieces ()
104 {
105         _pieces.clear ();
106
107         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
108
109                 if (!i->paths_valid ()) {
110                         continue;
111                 }
112
113                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
114                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
115
116                 if (!decoder) {
117                         /* Not something that we can decode; e.g. Atmos content */
118                         continue;
119                 }
120
121                 if (decoder->video && _ignore_video) {
122                         decoder->video->set_ignore ();
123                 }
124
125                 if (decoder->audio && _ignore_audio) {
126                         decoder->audio->set_ignore ();
127                 }
128
129                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
130                 if (dcp && _play_referenced) {
131                         dcp->set_decode_referenced ();
132                 }
133
134                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
135                 _pieces.push_back (piece);
136
137                 if (decoder->video) {
138                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
139                 }
140
141                 if (decoder->audio) {
142                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
143                 }
144
145                 if (decoder->subtitle) {
146                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
148                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
149                 }
150         }
151
152         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
153                 if (i->content->audio) {
154                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
155                                 _stream_states[j] = StreamState (i, i->content->position ());
156                         }
157                 }
158         }
159
160         if (!_play_referenced) {
161                 BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
162                         shared_ptr<DCPContent> dc = dynamic_pointer_cast<DCPContent> (i->content);
163                         if (dc) {
164                                 if (dc->reference_video()) {
165                                         _no_video.push_back (DCPTimePeriod (dc->position(), dc->end()));
166                                 }
167                                 if (dc->reference_audio()) {
168                                         _no_audio.push_back (DCPTimePeriod (dc->position(), dc->end()));
169                                 }
170                         }
171                 }
172         }
173
174         _last_video_time = optional<DCPTime> ();
175         _last_audio_time = optional<DCPTime> ();
176         _have_valid_pieces = true;
177 }
178
179 void
180 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
181 {
182         shared_ptr<Content> c = w.lock ();
183         if (!c) {
184                 return;
185         }
186
187         if (
188                 property == ContentProperty::POSITION ||
189                 property == ContentProperty::LENGTH ||
190                 property == ContentProperty::TRIM_START ||
191                 property == ContentProperty::TRIM_END ||
192                 property == ContentProperty::PATH ||
193                 property == VideoContentProperty::FRAME_TYPE ||
194                 property == DCPContentProperty::NEEDS_ASSETS ||
195                 property == DCPContentProperty::NEEDS_KDM ||
196                 property == SubtitleContentProperty::COLOUR ||
197                 property == SubtitleContentProperty::OUTLINE ||
198                 property == SubtitleContentProperty::SHADOW ||
199                 property == SubtitleContentProperty::EFFECT_COLOUR ||
200                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
201                 property == VideoContentProperty::COLOUR_CONVERSION
202                 ) {
203
204                 _have_valid_pieces = false;
205                 Changed (frequent);
206
207         } else if (
208                 property == SubtitleContentProperty::LINE_SPACING ||
209                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
210                 property == SubtitleContentProperty::Y_SCALE ||
211                 property == SubtitleContentProperty::FADE_IN ||
212                 property == SubtitleContentProperty::FADE_OUT ||
213                 property == ContentProperty::VIDEO_FRAME_RATE ||
214                 property == SubtitleContentProperty::USE ||
215                 property == SubtitleContentProperty::X_OFFSET ||
216                 property == SubtitleContentProperty::Y_OFFSET ||
217                 property == SubtitleContentProperty::X_SCALE ||
218                 property == SubtitleContentProperty::FONTS ||
219                 property == VideoContentProperty::CROP ||
220                 property == VideoContentProperty::SCALE ||
221                 property == VideoContentProperty::FADE_IN ||
222                 property == VideoContentProperty::FADE_OUT
223                 ) {
224
225                 Changed (frequent);
226         }
227 }
228
229 void
230 Player::set_video_container_size (dcp::Size s)
231 {
232         if (s == _video_container_size) {
233                 return;
234         }
235
236         _video_container_size = s;
237
238         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
239         _black_image->make_black ();
240
241         Changed (false);
242 }
243
244 void
245 Player::playlist_changed ()
246 {
247         _have_valid_pieces = false;
248         Changed (false);
249 }
250
251 void
252 Player::film_changed (Film::Property p)
253 {
254         /* Here we should notice Film properties that affect our output, and
255            alert listeners that our output now would be different to how it was
256            last time we were run.
257         */
258
259         if (p == Film::CONTAINER) {
260                 Changed (false);
261         } else if (p == Film::VIDEO_FRAME_RATE) {
262                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
263                    so we need new pieces here.
264                 */
265                 _have_valid_pieces = false;
266                 Changed (false);
267         } else if (p == Film::AUDIO_PROCESSOR) {
268                 if (_film->audio_processor ()) {
269                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
270                 }
271         }
272 }
273
274 list<PositionImage>
275 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
276 {
277         list<PositionImage> all;
278
279         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
280                 if (!i->image) {
281                         continue;
282                 }
283
284                 /* We will scale the subtitle up to fit _video_container_size */
285                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
286
287                 /* Then we need a corrective translation, consisting of two parts:
288                  *
289                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
290                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
291                  *
292                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
293                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
294                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
295                  *
296                  * Combining these two translations gives these expressions.
297                  */
298
299                 all.push_back (
300                         PositionImage (
301                                 i->image->scale (
302                                         scaled_size,
303                                         dcp::YUV_TO_RGB_REC601,
304                                         i->image->pixel_format (),
305                                         true,
306                                         _fast
307                                         ),
308                                 Position<int> (
309                                         lrint (_video_container_size.width * i->rectangle.x),
310                                         lrint (_video_container_size.height * i->rectangle.y)
311                                         )
312                                 )
313                         );
314         }
315
316         return all;
317 }
318
319 shared_ptr<PlayerVideo>
320 Player::black_player_video_frame () const
321 {
322         return shared_ptr<PlayerVideo> (
323                 new PlayerVideo (
324                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
325                         Crop (),
326                         optional<double> (),
327                         _video_container_size,
328                         _video_container_size,
329                         EYES_BOTH,
330                         PART_WHOLE,
331                         PresetColourConversion::all().front().conversion
332                 )
333         );
334 }
335
336 Frame
337 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
338 {
339         DCPTime s = t - piece->content->position ();
340         s = min (piece->content->length_after_trim(), s);
341         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
342
343         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
344            then convert that ContentTime to frames at the content's rate.  However this fails for
345            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
346            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
347
348            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
349         */
350         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
351 }
352
353 DCPTime
354 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
355 {
356         /* See comment in dcp_to_content_video */
357         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
358         return max (DCPTime (), d + piece->content->position ());
359 }
360
361 Frame
362 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
363 {
364         DCPTime s = t - piece->content->position ();
365         s = min (piece->content->length_after_trim(), s);
366         /* See notes in dcp_to_content_video */
367         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
368 }
369
370 DCPTime
371 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
372 {
373         /* See comment in dcp_to_content_video */
374         DCPTime const d = DCPTime::from_frames (f, _film->audio_frame_rate()) - DCPTime (piece->content->trim_start(), piece->frc);
375         return max (DCPTime (), d + piece->content->position ());
376 }
377
378 ContentTime
379 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
380 {
381         DCPTime s = t - piece->content->position ();
382         s = min (piece->content->length_after_trim(), s);
383         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
384 }
385
386 DCPTime
387 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
388 {
389         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
390 }
391
392 list<shared_ptr<Font> >
393 Player::get_subtitle_fonts ()
394 {
395         if (!_have_valid_pieces) {
396                 setup_pieces ();
397         }
398
399         list<shared_ptr<Font> > fonts;
400         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
401                 if (p->content->subtitle) {
402                         /* XXX: things may go wrong if there are duplicate font IDs
403                            with different font files.
404                         */
405                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
406                         copy (f.begin(), f.end(), back_inserter (fonts));
407                 }
408         }
409
410         return fonts;
411 }
412
413 /** Set this player never to produce any video data */
414 void
415 Player::set_ignore_video ()
416 {
417         _ignore_video = true;
418 }
419
420 /** Set this player never to produce any audio data */
421 void
422 Player::set_ignore_audio ()
423 {
424         _ignore_audio = true;
425 }
426
427 /** Set whether or not this player should always burn text subtitles into the image,
428  *  regardless of the content settings.
429  *  @param burn true to always burn subtitles, false to obey content settings.
430  */
431 void
432 Player::set_always_burn_subtitles (bool burn)
433 {
434         _always_burn_subtitles = burn;
435 }
436
437 void
438 Player::set_fast ()
439 {
440         _fast = true;
441         _have_valid_pieces = false;
442 }
443
444 void
445 Player::set_play_referenced ()
446 {
447         _play_referenced = true;
448         _have_valid_pieces = false;
449 }
450
451 list<ReferencedReelAsset>
452 Player::get_reel_assets ()
453 {
454         list<ReferencedReelAsset> a;
455
456         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
457                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
458                 if (!j) {
459                         continue;
460                 }
461
462                 scoped_ptr<DCPDecoder> decoder;
463                 try {
464                         decoder.reset (new DCPDecoder (j, _film->log()));
465                 } catch (...) {
466                         return a;
467                 }
468
469                 int64_t offset = 0;
470                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
471
472                         DCPOMATIC_ASSERT (j->video_frame_rate ());
473                         double const cfr = j->video_frame_rate().get();
474                         Frame const trim_start = j->trim_start().frames_round (cfr);
475                         Frame const trim_end = j->trim_end().frames_round (cfr);
476                         int const ffr = _film->video_frame_rate ();
477
478                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
479                         if (j->reference_video ()) {
480                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
481                                 DCPOMATIC_ASSERT (ra);
482                                 ra->set_entry_point (ra->entry_point() + trim_start);
483                                 ra->set_duration (ra->duration() - trim_start - trim_end);
484                                 a.push_back (
485                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
486                                         );
487                         }
488
489                         if (j->reference_audio ()) {
490                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
491                                 DCPOMATIC_ASSERT (ra);
492                                 ra->set_entry_point (ra->entry_point() + trim_start);
493                                 ra->set_duration (ra->duration() - trim_start - trim_end);
494                                 a.push_back (
495                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
496                                         );
497                         }
498
499                         if (j->reference_subtitle ()) {
500                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
501                                 DCPOMATIC_ASSERT (ra);
502                                 ra->set_entry_point (ra->entry_point() + trim_start);
503                                 ra->set_duration (ra->duration() - trim_start - trim_end);
504                                 a.push_back (
505                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
506                                         );
507                         }
508
509                         /* Assume that main picture duration is the length of the reel */
510                         offset += k->main_picture()->duration ();
511                 }
512         }
513
514         return a;
515 }
516
517 list<shared_ptr<Piece> >
518 Player::overlaps (DCPTime from, DCPTime to, boost::function<bool (Content *)> valid)
519 {
520         if (!_have_valid_pieces) {
521                 setup_pieces ();
522         }
523
524         list<shared_ptr<Piece> > overlaps;
525         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
526                 if (valid (i->content.get ()) && i->content->position() < to && i->content->end() > from) {
527                         overlaps.push_back (i);
528                 }
529         }
530
531         return overlaps;
532 }
533
534 bool
535 Player::pass ()
536 {
537         if (!_have_valid_pieces) {
538                 setup_pieces ();
539         }
540
541         shared_ptr<Piece> earliest;
542         DCPTime earliest_content;
543
544         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
545                 if (!i->done) {
546                         DCPTime const t = i->content->position() + DCPTime (i->decoder->position(), i->frc);
547                         if (!earliest || t < earliest_content) {
548                                 earliest_content = t;
549                                 earliest = i;
550                         }
551                 }
552         }
553
554         if (!earliest) {
555                 /* No more content; fill up with silent black */
556                 DCPTimePeriod remaining_video (DCPTime(), _playlist->length());
557                 if (_last_video_time) {
558                         remaining_video.from = _last_video_time.get();
559                 }
560                 fill_video (remaining_video);
561                 DCPTimePeriod remaining_audio (DCPTime(), _playlist->length());
562                 if (_last_audio_time) {
563                         remaining_audio.from = _last_audio_time.get();
564                 }
565                 fill_audio (remaining_audio);
566                 return true;
567         }
568
569         earliest->done = earliest->decoder->pass ();
570         if (earliest->done && earliest->content->audio) {
571                 /* Flush the Player audio system for this piece */
572                 BOOST_FOREACH (AudioStreamPtr i, earliest->content->audio->streams()) {
573                         audio_flush (earliest, i);
574                 }
575         }
576
577         /* Emit any audio that is ready */
578
579         DCPTime pull_from = _playlist->length ();
580         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
581                 if (!i->second.piece->done && i->second.last_push_end < pull_from) {
582                         pull_from = i->second.last_push_end;
583                 }
584         }
585
586         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_from);
587         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
588                 if (_last_audio_time && i->second < _last_audio_time.get()) {
589                         /* There has been an accurate seek and we have received some audio before the seek time;
590                            discard it.
591                         */
592                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
593                         if (!cut.first) {
594                                 continue;
595                         }
596                         *i = cut;
597                 }
598
599                 if (_last_audio_time) {
600                         fill_audio (DCPTimePeriod (_last_audio_time.get(), i->second));
601                 }
602
603                 Audio (i->first, i->second);
604                 _last_audio_time = i->second + DCPTime::from_frames(i->first->frames(), _film->audio_frame_rate());
605         }
606
607         return false;
608 }
609
610 void
611 Player::video (weak_ptr<Piece> wp, ContentVideo video)
612 {
613         shared_ptr<Piece> piece = wp.lock ();
614         if (!piece) {
615                 return;
616         }
617
618         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
619         if (frc.skip && (video.frame % 2) == 1) {
620                 return;
621         }
622
623         /* Time and period of the frame we will emit */
624         DCPTime const time = content_video_to_dcp (piece, video.frame);
625         DCPTimePeriod const period (time, time + one_video_frame());
626
627         /* Discard if it's outside the content's period or if it's before the last accurate seek */
628         if (time < piece->content->position() || time >= piece->content->end() || (_last_video_time && time < _last_video_time)) {
629                 return;
630         }
631
632         /* Get any subtitles */
633
634         optional<PositionImage> subtitles;
635
636         for (ActiveSubtitles::const_iterator i = _active_subtitles.begin(); i != _active_subtitles.end(); ++i) {
637
638                 shared_ptr<Piece> sub_piece = i->first.lock ();
639                 if (!sub_piece) {
640                         continue;
641                 }
642
643                 if (!sub_piece->content->subtitle->use() || (!_always_burn_subtitles && !piece->content->subtitle->burn())) {
644                         continue;
645                 }
646
647                 pair<PlayerSubtitles, DCPTime> sub = i->second;
648
649                 list<PositionImage> sub_images;
650
651                 /* Image subtitles */
652                 list<PositionImage> c = transform_image_subtitles (sub.first.image);
653                 copy (c.begin(), c.end(), back_inserter (sub_images));
654
655                 /* Text subtitles (rendered to an image) */
656                 if (!sub.first.text.empty ()) {
657                         list<PositionImage> s = render_subtitles (sub.first.text, sub.first.fonts, _video_container_size, time);
658                         copy (s.begin (), s.end (), back_inserter (sub_images));
659                 }
660
661                 if (!sub_images.empty ()) {
662                         subtitles = merge (sub_images);
663                 }
664         }
665
666         /* Fill gaps */
667
668         if (_last_video_time) {
669                 fill_video (DCPTimePeriod (_last_video_time.get(), time));
670         }
671
672         _last_video.reset (
673                 new PlayerVideo (
674                         video.image,
675                         piece->content->video->crop (),
676                         piece->content->video->fade (video.frame),
677                         piece->content->video->scale().size (
678                                 piece->content->video, _video_container_size, _film->frame_size ()
679                                 ),
680                         _video_container_size,
681                         video.eyes,
682                         video.part,
683                         piece->content->video->colour_conversion ()
684                         )
685                 );
686
687         if (subtitles) {
688                 _last_video->set_subtitle (subtitles.get ());
689         }
690
691         Video (_last_video, time);
692
693         _last_video_time = time + one_video_frame ();
694 }
695
696 void
697 Player::audio_flush (shared_ptr<Piece> piece, AudioStreamPtr stream)
698 {
699         shared_ptr<AudioContent> content = piece->content->audio;
700         DCPOMATIC_ASSERT (content);
701
702         shared_ptr<Resampler> r = resampler (content, stream, false);
703         if (!r) {
704                 return;
705         }
706
707         pair<shared_ptr<const AudioBuffers>, Frame> ro = r->flush ();
708         if (ro.first->frames() == 0) {
709                 return;
710         }
711
712         ContentAudio content_audio;
713         content_audio.audio = ro.first;
714         content_audio.frame = ro.second;
715
716         /* Compute time in the DCP */
717         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
718
719         audio_transform (content, stream, content_audio, time);
720 }
721
722 /** Do our common processing on some audio */
723 void
724 Player::audio_transform (shared_ptr<AudioContent> content, AudioStreamPtr stream, ContentAudio content_audio, DCPTime time)
725 {
726         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
727
728         /* Gain */
729
730         if (content->gain() != 0) {
731                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
732                 gain->apply_gain (content->gain ());
733                 content_audio.audio = gain;
734         }
735
736         /* Remap */
737
738         shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), content_audio.audio->frames()));
739         dcp_mapped->make_silent ();
740
741         AudioMapping map = stream->mapping ();
742         for (int i = 0; i < map.input_channels(); ++i) {
743                 for (int j = 0; j < dcp_mapped->channels(); ++j) {
744                         if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
745                                 dcp_mapped->accumulate_channel (
746                                         content_audio.audio.get(),
747                                         i,
748                                         static_cast<dcp::Channel> (j),
749                                         map.get (i, static_cast<dcp::Channel> (j))
750                                         );
751                         }
752                 }
753         }
754
755         content_audio.audio = dcp_mapped;
756
757         /* Process */
758
759         if (_audio_processor) {
760                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
761         }
762
763         /* Push */
764
765         _audio_merger.push (content_audio.audio, time);
766         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
767         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
768 }
769
770 void
771 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
772 {
773         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
774
775         shared_ptr<Piece> piece = wp.lock ();
776         if (!piece) {
777                 return;
778         }
779
780         shared_ptr<AudioContent> content = piece->content->audio;
781         DCPOMATIC_ASSERT (content);
782
783         /* Resample */
784         if (stream->frame_rate() != content->resampled_frame_rate()) {
785                 shared_ptr<Resampler> r = resampler (content, stream, true);
786                 pair<shared_ptr<const AudioBuffers>, Frame> ro = r->run (content_audio.audio, content_audio.frame);
787                 if (ro.first->frames() == 0) {
788                         return;
789                 }
790                 content_audio.audio = ro.first;
791                 content_audio.frame = ro.second;
792         }
793
794         /* Compute time in the DCP */
795         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame) + DCPTime::from_seconds (content->delay() / 1000.0);
796         /* And the end of this block in the DCP */
797         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
798
799         /* Remove anything that comes before the start or after the end of the content */
800         if (time < piece->content->position()) {
801                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
802                 if (!cut.first) {
803                         /* This audio is entirely discarded */
804                         return;
805                 }
806                 content_audio.audio = cut.first;
807                 time = cut.second;
808         } else if (time > piece->content->end()) {
809                 /* Discard it all */
810                 return;
811         } else if (end > piece->content->end()) {
812                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
813                 DCPOMATIC_ASSERT (remaining_frames > 0);
814                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
815                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
816                 content_audio.audio = cut;
817         }
818
819         audio_transform (content, stream, content_audio, time);
820 }
821
822 void
823 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
824 {
825         shared_ptr<Piece> piece = wp.lock ();
826         if (!piece) {
827                 return;
828         }
829
830         /* Apply content's subtitle offsets */
831         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
832         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
833
834         /* Apply content's subtitle scale */
835         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
836         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
837
838         /* Apply a corrective translation to keep the subtitle centred after that scale */
839         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
840         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
841
842         PlayerSubtitles ps;
843         ps.image.push_back (subtitle.sub);
844         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
845
846         _active_subtitles[wp] = make_pair (ps, from);
847 }
848
849 void
850 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
851 {
852         shared_ptr<Piece> piece = wp.lock ();
853         if (!piece) {
854                 return;
855         }
856
857         PlayerSubtitles ps;
858         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
859
860         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
861                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
862                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
863                 float const xs = piece->content->subtitle->x_scale();
864                 float const ys = piece->content->subtitle->y_scale();
865                 float size = s.size();
866
867                 /* Adjust size to express the common part of the scaling;
868                    e.g. if xs = ys = 0.5 we scale size by 2.
869                 */
870                 if (xs > 1e-5 && ys > 1e-5) {
871                         size *= 1 / min (1 / xs, 1 / ys);
872                 }
873                 s.set_size (size);
874
875                 /* Then express aspect ratio changes */
876                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
877                         s.set_aspect_adjust (xs / ys);
878                 }
879
880                 s.set_in (dcp::Time(from.seconds(), 1000));
881                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
882                 ps.add_fonts (piece->content->subtitle->fonts ());
883         }
884
885         _active_subtitles[wp] = make_pair (ps, from);
886 }
887
888 void
889 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
890 {
891         if (_active_subtitles.find (wp) == _active_subtitles.end ()) {
892                 return;
893         }
894
895         shared_ptr<Piece> piece = wp.lock ();
896         if (!piece) {
897                 return;
898         }
899
900         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
901                 Subtitle (_active_subtitles[wp].first, DCPTimePeriod (_active_subtitles[wp].second, content_time_to_dcp (piece, to)));
902         }
903
904         _active_subtitles.erase (wp);
905 }
906
907 void
908 Player::seek (DCPTime time, bool accurate)
909 {
910         if (_audio_processor) {
911                 _audio_processor->flush ();
912         }
913
914         for (ResamplerMap::iterator i = _resamplers.begin(); i != _resamplers.end(); ++i) {
915                 i->second->flush ();
916                 i->second->reset ();
917         }
918
919         _audio_merger.clear ();
920         _active_subtitles.clear ();
921
922         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
923                 i->done = false;
924                 DCPTime const t = min(max(time, i->content->position()), i->content->end());
925                 i->decoder->seek (dcp_to_content_time (i, t), accurate);
926         }
927
928         if (accurate) {
929                 _last_video_time = time;
930                 _last_audio_time = time;
931         } else {
932                 _last_video_time = optional<DCPTime> ();
933                 _last_audio_time = optional<DCPTime> ();
934         }
935 }
936
937 shared_ptr<Resampler>
938 Player::resampler (shared_ptr<const AudioContent> content, AudioStreamPtr stream, bool create)
939 {
940         ResamplerMap::const_iterator i = _resamplers.find (make_pair (content, stream));
941         if (i != _resamplers.end ()) {
942                 return i->second;
943         }
944
945         if (!create) {
946                 return shared_ptr<Resampler> ();
947         }
948
949         LOG_GENERAL (
950                 "Creating new resampler from %1 to %2 with %3 channels",
951                 stream->frame_rate(),
952                 content->resampled_frame_rate(),
953                 stream->channels()
954                 );
955
956         shared_ptr<Resampler> r (
957                 new Resampler (stream->frame_rate(), content->resampled_frame_rate(), stream->channels())
958                 );
959
960         _resamplers[make_pair(content, stream)] = r;
961         return r;
962 }
963
964 void
965 Player::fill_video (DCPTimePeriod period)
966 {
967         /* XXX: this may not work for 3D */
968         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_video)) {
969                 for (DCPTime j = i.from; j < i.to; j += one_video_frame()) {
970                         if (_playlist->video_content_at(j) && _last_video) {
971                                 Video (shared_ptr<PlayerVideo> (new PlayerVideo (*_last_video)), j);
972                         } else {
973                                 Video (black_player_video_frame(), j);
974                         }
975                 }
976         }
977 }
978
979 void
980 Player::fill_audio (DCPTimePeriod period)
981 {
982         BOOST_FOREACH (DCPTimePeriod i, subtract(period, _no_audio)) {
983                 DCPTime t = i.from;
984                 while (t < i.to) {
985                         DCPTime block = min (DCPTime::from_seconds (0.5), i.to - t);
986                         Frame const samples = block.frames_round(_film->audio_frame_rate());
987                         if (samples) {
988                                 shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
989                                 silence->make_silent ();
990                                 Audio (silence, t);
991                         }
992                         t += block;
993                 }
994         }
995 }
996
997 DCPTime
998 Player::one_video_frame () const
999 {
1000         return DCPTime::from_frames (1, _film->video_frame_rate ());
1001 }
1002
1003 pair<shared_ptr<AudioBuffers>, DCPTime>
1004 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
1005 {
1006         DCPTime const discard_time = discard_to - time;
1007         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
1008         Frame remaining_frames = audio->frames() - discard_frames;
1009         if (remaining_frames <= 0) {
1010                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
1011         }
1012         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
1013         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
1014         return make_pair(cut, time + discard_time);
1015 }