Recreate player pieces when changing FFmpeg filters (#1019).
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_subtitle (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log(), _fast);
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->subtitle && _ignore_subtitle) {
125                         decoder->subtitle->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         if (_play_referenced) {
131                                 dcp->set_decode_referenced ();
132                         }
133                         dcp->set_forced_reduction (_dcp_decode_reduction);
134                 }
135
136                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
137                 _pieces.push_back (piece);
138
139                 if (decoder->video) {
140                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
141                 }
142
143                 if (decoder->audio) {
144                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
145                 }
146
147                 if (decoder->subtitle) {
148                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
149                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
150                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
151                 }
152         }
153
154         _stream_states.clear ();
155         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
156                 if (i->content->audio) {
157                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
158                                 _stream_states[j] = StreamState (i, i->content->position ());
159                         }
160                 }
161         }
162
163         _black = Empty (_film->content(), _film->length(), bind(&Content::video, _1));
164         _silent = Empty (_film->content(), _film->length(), bind(&Content::audio, _1));
165
166         _last_video_time = DCPTime ();
167         _last_audio_time = DCPTime ();
168         _have_valid_pieces = true;
169 }
170
171 void
172 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
173 {
174         shared_ptr<Content> c = w.lock ();
175         if (!c) {
176                 return;
177         }
178
179         if (
180                 property == ContentProperty::POSITION ||
181                 property == ContentProperty::LENGTH ||
182                 property == ContentProperty::TRIM_START ||
183                 property == ContentProperty::TRIM_END ||
184                 property == ContentProperty::PATH ||
185                 property == VideoContentProperty::FRAME_TYPE ||
186                 property == DCPContentProperty::NEEDS_ASSETS ||
187                 property == DCPContentProperty::NEEDS_KDM ||
188                 property == SubtitleContentProperty::COLOUR ||
189                 property == SubtitleContentProperty::OUTLINE ||
190                 property == SubtitleContentProperty::SHADOW ||
191                 property == SubtitleContentProperty::EFFECT_COLOUR ||
192                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
193                 property == FFmpegContentProperty::FILTERS ||
194                 property == VideoContentProperty::COLOUR_CONVERSION
195                 ) {
196
197                 _have_valid_pieces = false;
198                 Changed (frequent);
199
200         } else if (
201                 property == SubtitleContentProperty::LINE_SPACING ||
202                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
203                 property == SubtitleContentProperty::Y_SCALE ||
204                 property == SubtitleContentProperty::FADE_IN ||
205                 property == SubtitleContentProperty::FADE_OUT ||
206                 property == ContentProperty::VIDEO_FRAME_RATE ||
207                 property == SubtitleContentProperty::USE ||
208                 property == SubtitleContentProperty::X_OFFSET ||
209                 property == SubtitleContentProperty::Y_OFFSET ||
210                 property == SubtitleContentProperty::X_SCALE ||
211                 property == SubtitleContentProperty::FONTS ||
212                 property == VideoContentProperty::CROP ||
213                 property == VideoContentProperty::SCALE ||
214                 property == VideoContentProperty::FADE_IN ||
215                 property == VideoContentProperty::FADE_OUT
216                 ) {
217
218                 Changed (frequent);
219         }
220 }
221
222 void
223 Player::set_video_container_size (dcp::Size s)
224 {
225         if (s == _video_container_size) {
226                 return;
227         }
228
229         _video_container_size = s;
230
231         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
232         _black_image->make_black ();
233
234         Changed (false);
235 }
236
237 void
238 Player::playlist_changed ()
239 {
240         _have_valid_pieces = false;
241         Changed (false);
242 }
243
244 void
245 Player::film_changed (Film::Property p)
246 {
247         /* Here we should notice Film properties that affect our output, and
248            alert listeners that our output now would be different to how it was
249            last time we were run.
250         */
251
252         if (p == Film::CONTAINER) {
253                 Changed (false);
254         } else if (p == Film::VIDEO_FRAME_RATE) {
255                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
256                    so we need new pieces here.
257                 */
258                 _have_valid_pieces = false;
259                 Changed (false);
260         } else if (p == Film::AUDIO_PROCESSOR) {
261                 if (_film->audio_processor ()) {
262                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
263                 }
264         }
265 }
266
267 list<PositionImage>
268 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
269 {
270         list<PositionImage> all;
271
272         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
273                 if (!i->image) {
274                         continue;
275                 }
276
277                 /* We will scale the subtitle up to fit _video_container_size */
278                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
279
280                 /* Then we need a corrective translation, consisting of two parts:
281                  *
282                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
283                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
284                  *
285                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
286                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
287                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
288                  *
289                  * Combining these two translations gives these expressions.
290                  */
291
292                 all.push_back (
293                         PositionImage (
294                                 i->image->scale (
295                                         scaled_size,
296                                         dcp::YUV_TO_RGB_REC601,
297                                         i->image->pixel_format (),
298                                         true,
299                                         _fast
300                                         ),
301                                 Position<int> (
302                                         lrint (_video_container_size.width * i->rectangle.x),
303                                         lrint (_video_container_size.height * i->rectangle.y)
304                                         )
305                                 )
306                         );
307         }
308
309         return all;
310 }
311
312 shared_ptr<PlayerVideo>
313 Player::black_player_video_frame () const
314 {
315         return shared_ptr<PlayerVideo> (
316                 new PlayerVideo (
317                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
318                         Crop (),
319                         optional<double> (),
320                         _video_container_size,
321                         _video_container_size,
322                         EYES_BOTH,
323                         PART_WHOLE,
324                         PresetColourConversion::all().front().conversion
325                 )
326         );
327 }
328
329 Frame
330 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
331 {
332         DCPTime s = t - piece->content->position ();
333         s = min (piece->content->length_after_trim(), s);
334         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
335
336         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
337            then convert that ContentTime to frames at the content's rate.  However this fails for
338            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
339            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
340
341            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
342         */
343         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
344 }
345
346 DCPTime
347 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
348 {
349         /* See comment in dcp_to_content_video */
350         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime(piece->content->trim_start(), piece->frc);
351         return d + piece->content->position();
352 }
353
354 Frame
355 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
356 {
357         DCPTime s = t - piece->content->position ();
358         s = min (piece->content->length_after_trim(), s);
359         /* See notes in dcp_to_content_video */
360         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
361 }
362
363 DCPTime
364 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
365 {
366         /* See comment in dcp_to_content_video */
367         return DCPTime::from_frames (f, _film->audio_frame_rate())
368                 - DCPTime (piece->content->trim_start(), piece->frc)
369                 + piece->content->position();
370 }
371
372 ContentTime
373 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
374 {
375         DCPTime s = t - piece->content->position ();
376         s = min (piece->content->length_after_trim(), s);
377         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
378 }
379
380 DCPTime
381 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
382 {
383         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
384 }
385
386 list<shared_ptr<Font> >
387 Player::get_subtitle_fonts ()
388 {
389         if (!_have_valid_pieces) {
390                 setup_pieces ();
391         }
392
393         list<shared_ptr<Font> > fonts;
394         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
395                 if (p->content->subtitle) {
396                         /* XXX: things may go wrong if there are duplicate font IDs
397                            with different font files.
398                         */
399                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
400                         copy (f.begin(), f.end(), back_inserter (fonts));
401                 }
402         }
403
404         return fonts;
405 }
406
407 /** Set this player never to produce any video data */
408 void
409 Player::set_ignore_video ()
410 {
411         _ignore_video = true;
412 }
413
414 void
415 Player::set_ignore_subtitle ()
416 {
417         _ignore_subtitle = true;
418 }
419
420 /** Set whether or not this player should always burn text subtitles into the image,
421  *  regardless of the content settings.
422  *  @param burn true to always burn subtitles, false to obey content settings.
423  */
424 void
425 Player::set_always_burn_subtitles (bool burn)
426 {
427         _always_burn_subtitles = burn;
428 }
429
430 /** Sets up the player to be faster, possibly at the expense of quality */
431 void
432 Player::set_fast ()
433 {
434         _fast = true;
435         _have_valid_pieces = false;
436 }
437
438 void
439 Player::set_play_referenced ()
440 {
441         _play_referenced = true;
442         _have_valid_pieces = false;
443 }
444
445 list<ReferencedReelAsset>
446 Player::get_reel_assets ()
447 {
448         list<ReferencedReelAsset> a;
449
450         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
451                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
452                 if (!j) {
453                         continue;
454                 }
455
456                 scoped_ptr<DCPDecoder> decoder;
457                 try {
458                         decoder.reset (new DCPDecoder (j, _film->log(), false));
459                 } catch (...) {
460                         return a;
461                 }
462
463                 int64_t offset = 0;
464                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
465
466                         DCPOMATIC_ASSERT (j->video_frame_rate ());
467                         double const cfr = j->video_frame_rate().get();
468                         Frame const trim_start = j->trim_start().frames_round (cfr);
469                         Frame const trim_end = j->trim_end().frames_round (cfr);
470                         int const ffr = _film->video_frame_rate ();
471
472                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
473                         if (j->reference_video ()) {
474                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
475                                 DCPOMATIC_ASSERT (ra);
476                                 ra->set_entry_point (ra->entry_point() + trim_start);
477                                 ra->set_duration (ra->duration() - trim_start - trim_end);
478                                 a.push_back (
479                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
480                                         );
481                         }
482
483                         if (j->reference_audio ()) {
484                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
485                                 DCPOMATIC_ASSERT (ra);
486                                 ra->set_entry_point (ra->entry_point() + trim_start);
487                                 ra->set_duration (ra->duration() - trim_start - trim_end);
488                                 a.push_back (
489                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
490                                         );
491                         }
492
493                         if (j->reference_subtitle ()) {
494                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
495                                 DCPOMATIC_ASSERT (ra);
496                                 ra->set_entry_point (ra->entry_point() + trim_start);
497                                 ra->set_duration (ra->duration() - trim_start - trim_end);
498                                 a.push_back (
499                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
500                                         );
501                         }
502
503                         /* Assume that main picture duration is the length of the reel */
504                         offset += k->main_picture()->duration ();
505                 }
506         }
507
508         return a;
509 }
510
511 bool
512 Player::pass ()
513 {
514         if (!_have_valid_pieces) {
515                 setup_pieces ();
516         }
517
518         if (_playlist->length() == DCPTime()) {
519                 /* Special case of an empty Film; just give one black frame */
520                 emit_video (black_player_video_frame(), DCPTime());
521                 return true;
522         }
523
524         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
525
526         shared_ptr<Piece> earliest_content;
527         optional<DCPTime> earliest_time;
528
529         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
530                 if (i->done) {
531                         continue;
532                 }
533
534                 DCPTime const t = content_time_to_dcp (i, i->decoder->position());
535                 if (t > i->content->end()) {
536                         i->done = true;
537                 } else {
538                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
539                            the video.
540                         */
541                         if (!earliest_time || t < *earliest_time || (t == *earliest_time && i->decoder->subtitle)) {
542                                 earliest_time = t;
543                                 earliest_content = i;
544                         }
545                 }
546         }
547
548         bool done = false;
549
550         enum {
551                 NONE,
552                 CONTENT,
553                 BLACK,
554                 SILENT
555         } which = NONE;
556
557         if (earliest_content) {
558                 which = CONTENT;
559         }
560
561         if (!_black.done() && (!earliest_time || _black.position() < *earliest_time)) {
562                 earliest_time = _black.position ();
563                 which = BLACK;
564         }
565
566         if (!_silent.done() && (!earliest_time || _silent.position() < *earliest_time)) {
567                 earliest_time = _silent.position ();
568                 which = SILENT;
569         }
570
571         switch (which) {
572         case CONTENT:
573                 earliest_content->done = earliest_content->decoder->pass ();
574                 break;
575         case BLACK:
576                 emit_video (black_player_video_frame(), _black.position());
577                 _black.set_position (_black.position() + one_video_frame());
578                 break;
579         case SILENT:
580         {
581                 DCPTimePeriod period (_silent.period_at_position());
582                 if (period.duration() > one_video_frame()) {
583                         period.to = period.from + one_video_frame();
584                 }
585                 fill_audio (period);
586                 _silent.set_position (period.to);
587                 break;
588         }
589         case NONE:
590                 done = true;
591                 break;
592         }
593
594         /* Emit any audio that is ready */
595
596         DCPTime pull_to = _film->length ();
597         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
598                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
599                         pull_to = i->second.last_push_end;
600                 }
601         }
602
603         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
604         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
605                 if (_last_audio_time && i->second < *_last_audio_time) {
606                         /* This new data comes before the last we emitted (or the last seek); discard it */
607                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
608                         if (!cut.first) {
609                                 continue;
610                         }
611                         *i = cut;
612                 } else if (_last_audio_time && i->second > *_last_audio_time) {
613                         /* There's a gap between this data and the last we emitted; fill with silence */
614                         fill_audio (DCPTimePeriod (*_last_audio_time, i->second));
615                 }
616
617                 emit_audio (i->first, i->second);
618         }
619
620         return done;
621 }
622
623 optional<PositionImage>
624 Player::subtitles_for_frame (DCPTime time) const
625 {
626         list<PositionImage> subtitles;
627
628         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
629
630                 /* Image subtitles */
631                 list<PositionImage> c = transform_image_subtitles (i.image);
632                 copy (c.begin(), c.end(), back_inserter (subtitles));
633
634                 /* Text subtitles (rendered to an image) */
635                 if (!i.text.empty ()) {
636                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
637                         copy (s.begin(), s.end(), back_inserter (subtitles));
638                 }
639         }
640
641         if (subtitles.empty ()) {
642                 return optional<PositionImage> ();
643         }
644
645         return merge (subtitles);
646 }
647
648 void
649 Player::video (weak_ptr<Piece> wp, ContentVideo video)
650 {
651         shared_ptr<Piece> piece = wp.lock ();
652         if (!piece) {
653                 return;
654         }
655
656         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
657         if (frc.skip && (video.frame % 2) == 1) {
658                 return;
659         }
660
661         /* Time of the first frame we will emit */
662         DCPTime const time = content_video_to_dcp (piece, video.frame);
663
664         /* Discard if it's outside the content's period or if it's before the last accurate seek */
665         if (
666                 time < piece->content->position() ||
667                 time >= piece->content->end() ||
668                 (_last_video_time && time < *_last_video_time)) {
669                 return;
670         }
671
672         /* Fill gaps that we discover now that we have some video which needs to be emitted */
673
674         if (_last_video_time) {
675                 /* XXX: this may not work for 3D */
676                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
677                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
678                         LastVideoMap::const_iterator k = _last_video.find (wp);
679                         if (k != _last_video.end ()) {
680                                 emit_video (k->second, j);
681                         } else {
682                                 emit_video (black_player_video_frame(), j);
683                         }
684                 }
685         }
686
687         _last_video[wp].reset (
688                 new PlayerVideo (
689                         video.image,
690                         piece->content->video->crop (),
691                         piece->content->video->fade (video.frame),
692                         piece->content->video->scale().size (
693                                 piece->content->video, _video_container_size, _film->frame_size ()
694                                 ),
695                         _video_container_size,
696                         video.eyes,
697                         video.part,
698                         piece->content->video->colour_conversion ()
699                         )
700                 );
701
702         DCPTime t = time;
703         for (int i = 0; i < frc.repeat; ++i) {
704                 emit_video (_last_video[wp], t);
705                 t += one_video_frame ();
706         }
707 }
708
709 void
710 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
711 {
712         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
713
714         shared_ptr<Piece> piece = wp.lock ();
715         if (!piece) {
716                 return;
717         }
718
719         shared_ptr<AudioContent> content = piece->content->audio;
720         DCPOMATIC_ASSERT (content);
721
722         /* Compute time in the DCP */
723         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
724         /* And the end of this block in the DCP */
725         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
726
727         /* Remove anything that comes before the start or after the end of the content */
728         if (time < piece->content->position()) {
729                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
730                 if (!cut.first) {
731                         /* This audio is entirely discarded */
732                         return;
733                 }
734                 content_audio.audio = cut.first;
735                 time = cut.second;
736         } else if (time > piece->content->end()) {
737                 /* Discard it all */
738                 return;
739         } else if (end > piece->content->end()) {
740                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
741                 if (remaining_frames == 0) {
742                         return;
743                 }
744                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
745                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
746                 content_audio.audio = cut;
747         }
748
749         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
750
751         /* Gain */
752
753         if (content->gain() != 0) {
754                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
755                 gain->apply_gain (content->gain ());
756                 content_audio.audio = gain;
757         }
758
759         /* Remap */
760
761         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
762
763         /* Process */
764
765         if (_audio_processor) {
766                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
767         }
768
769         /* Push */
770
771         _audio_merger.push (content_audio.audio, time);
772         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
773         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
774 }
775
776 void
777 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
778 {
779         shared_ptr<Piece> piece = wp.lock ();
780         if (!piece) {
781                 return;
782         }
783
784         /* Apply content's subtitle offsets */
785         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
786         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
787
788         /* Apply content's subtitle scale */
789         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
790         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
791
792         /* Apply a corrective translation to keep the subtitle centred after that scale */
793         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
794         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
795
796         PlayerSubtitles ps;
797         ps.image.push_back (subtitle.sub);
798         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
799
800         _active_subtitles.add_from (wp, ps, from);
801 }
802
803 void
804 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
805 {
806         shared_ptr<Piece> piece = wp.lock ();
807         if (!piece) {
808                 return;
809         }
810
811         PlayerSubtitles ps;
812         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
813
814         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
815                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
816                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
817                 float const xs = piece->content->subtitle->x_scale();
818                 float const ys = piece->content->subtitle->y_scale();
819                 float size = s.size();
820
821                 /* Adjust size to express the common part of the scaling;
822                    e.g. if xs = ys = 0.5 we scale size by 2.
823                 */
824                 if (xs > 1e-5 && ys > 1e-5) {
825                         size *= 1 / min (1 / xs, 1 / ys);
826                 }
827                 s.set_size (size);
828
829                 /* Then express aspect ratio changes */
830                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
831                         s.set_aspect_adjust (xs / ys);
832                 }
833
834                 s.set_in (dcp::Time(from.seconds(), 1000));
835                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
836                 ps.add_fonts (piece->content->subtitle->fonts ());
837         }
838
839         _active_subtitles.add_from (wp, ps, from);
840 }
841
842 void
843 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
844 {
845         if (!_active_subtitles.have (wp)) {
846                 return;
847         }
848
849         shared_ptr<Piece> piece = wp.lock ();
850         if (!piece) {
851                 return;
852         }
853
854         DCPTime const dcp_to = content_time_to_dcp (piece, to);
855
856         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
857
858         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
859                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
860         }
861 }
862
863 void
864 Player::seek (DCPTime time, bool accurate)
865 {
866         if (!_have_valid_pieces) {
867                 setup_pieces ();
868         }
869
870         if (_audio_processor) {
871                 _audio_processor->flush ();
872         }
873
874         _audio_merger.clear ();
875         _active_subtitles.clear ();
876
877         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
878                 if (time < i->content->position()) {
879                         /* Before; seek to 0 */
880                         i->decoder->seek (ContentTime(), accurate);
881                         i->done = false;
882                 } else if (i->content->position() <= time && time < i->content->end()) {
883                         /* During; seek to position */
884                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
885                         i->done = false;
886                 } else {
887                         /* After; this piece is done */
888                         i->done = true;
889                 }
890         }
891
892         if (accurate) {
893                 _last_video_time = time;
894                 _last_audio_time = time;
895         } else {
896                 _last_video_time = optional<DCPTime>();
897                 _last_audio_time = optional<DCPTime>();
898         }
899
900         _black.set_position (time);
901         _silent.set_position (time);
902
903         _last_video.clear ();
904 }
905
906 void
907 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
908 {
909         optional<PositionImage> subtitles = subtitles_for_frame (time);
910         if (subtitles) {
911                 pv->set_subtitle (subtitles.get ());
912         }
913
914         Video (pv, time);
915
916         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
917                 _last_video_time = time + one_video_frame();
918                 _active_subtitles.clear_before (time);
919         }
920 }
921
922 void
923 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
924 {
925         Audio (data, time);
926         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
927 }
928
929 void
930 Player::fill_audio (DCPTimePeriod period)
931 {
932         if (period.from == period.to) {
933                 return;
934         }
935
936         DCPOMATIC_ASSERT (period.from < period.to);
937
938         DCPTime t = period.from;
939         while (t < period.to) {
940                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
941                 Frame const samples = block.frames_round(_film->audio_frame_rate());
942                 if (samples) {
943                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
944                         silence->make_silent ();
945                         emit_audio (silence, t);
946                 }
947                 t += block;
948         }
949 }
950
951 DCPTime
952 Player::one_video_frame () const
953 {
954         return DCPTime::from_frames (1, _film->video_frame_rate ());
955 }
956
957 pair<shared_ptr<AudioBuffers>, DCPTime>
958 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
959 {
960         DCPTime const discard_time = discard_to - time;
961         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
962         Frame remaining_frames = audio->frames() - discard_frames;
963         if (remaining_frames <= 0) {
964                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
965         }
966         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
967         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
968         return make_pair(cut, time + discard_time);
969 }
970
971 void
972 Player::set_dcp_decode_reduction (optional<int> reduction)
973 {
974         if (reduction == _dcp_decode_reduction) {
975                 return;
976         }
977
978         _dcp_decode_reduction = reduction;
979         _have_valid_pieces = false;
980         Changed (false);
981 }