Basics of forced reduction of JPEG2000 decode resolution.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_subtitle (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->subtitle && _ignore_subtitle) {
125                         decoder->subtitle->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         if (_play_referenced) {
131                                 dcp->set_decode_referenced ();
132                         }
133                         dcp->set_forced_reduction (_dcp_decode_reduction);
134                 }
135
136                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
137                 _pieces.push_back (piece);
138
139                 if (decoder->video) {
140                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
141                 }
142
143                 if (decoder->audio) {
144                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
145                 }
146
147                 if (decoder->subtitle) {
148                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
149                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
150                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
151                 }
152         }
153
154         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
155                 if (i->content->audio) {
156                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
157                                 _stream_states[j] = StreamState (i, i->content->position ());
158                         }
159                 }
160         }
161
162         _black = Empty (_film->content(), _film->length(), bind(&Content::video, _1));
163         _silent = Empty (_film->content(), _film->length(), bind(&Content::audio, _1));
164
165         _last_video_time = DCPTime ();
166         _last_audio_time = DCPTime ();
167         _have_valid_pieces = true;
168 }
169
170 void
171 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
172 {
173         shared_ptr<Content> c = w.lock ();
174         if (!c) {
175                 return;
176         }
177
178         if (
179                 property == ContentProperty::POSITION ||
180                 property == ContentProperty::LENGTH ||
181                 property == ContentProperty::TRIM_START ||
182                 property == ContentProperty::TRIM_END ||
183                 property == ContentProperty::PATH ||
184                 property == VideoContentProperty::FRAME_TYPE ||
185                 property == DCPContentProperty::NEEDS_ASSETS ||
186                 property == DCPContentProperty::NEEDS_KDM ||
187                 property == SubtitleContentProperty::COLOUR ||
188                 property == SubtitleContentProperty::OUTLINE ||
189                 property == SubtitleContentProperty::SHADOW ||
190                 property == SubtitleContentProperty::EFFECT_COLOUR ||
191                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
192                 property == VideoContentProperty::COLOUR_CONVERSION
193                 ) {
194
195                 _have_valid_pieces = false;
196                 Changed (frequent);
197
198         } else if (
199                 property == SubtitleContentProperty::LINE_SPACING ||
200                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
201                 property == SubtitleContentProperty::Y_SCALE ||
202                 property == SubtitleContentProperty::FADE_IN ||
203                 property == SubtitleContentProperty::FADE_OUT ||
204                 property == ContentProperty::VIDEO_FRAME_RATE ||
205                 property == SubtitleContentProperty::USE ||
206                 property == SubtitleContentProperty::X_OFFSET ||
207                 property == SubtitleContentProperty::Y_OFFSET ||
208                 property == SubtitleContentProperty::X_SCALE ||
209                 property == SubtitleContentProperty::FONTS ||
210                 property == VideoContentProperty::CROP ||
211                 property == VideoContentProperty::SCALE ||
212                 property == VideoContentProperty::FADE_IN ||
213                 property == VideoContentProperty::FADE_OUT
214                 ) {
215
216                 Changed (frequent);
217         }
218 }
219
220 void
221 Player::set_video_container_size (dcp::Size s)
222 {
223         if (s == _video_container_size) {
224                 return;
225         }
226
227         _video_container_size = s;
228
229         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
230         _black_image->make_black ();
231
232         Changed (false);
233 }
234
235 void
236 Player::playlist_changed ()
237 {
238         _have_valid_pieces = false;
239         Changed (false);
240 }
241
242 void
243 Player::film_changed (Film::Property p)
244 {
245         /* Here we should notice Film properties that affect our output, and
246            alert listeners that our output now would be different to how it was
247            last time we were run.
248         */
249
250         if (p == Film::CONTAINER) {
251                 Changed (false);
252         } else if (p == Film::VIDEO_FRAME_RATE) {
253                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
254                    so we need new pieces here.
255                 */
256                 _have_valid_pieces = false;
257                 Changed (false);
258         } else if (p == Film::AUDIO_PROCESSOR) {
259                 if (_film->audio_processor ()) {
260                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
261                 }
262         }
263 }
264
265 list<PositionImage>
266 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
267 {
268         list<PositionImage> all;
269
270         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
271                 if (!i->image) {
272                         continue;
273                 }
274
275                 /* We will scale the subtitle up to fit _video_container_size */
276                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
277
278                 /* Then we need a corrective translation, consisting of two parts:
279                  *
280                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
281                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
282                  *
283                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
284                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
285                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
286                  *
287                  * Combining these two translations gives these expressions.
288                  */
289
290                 all.push_back (
291                         PositionImage (
292                                 i->image->scale (
293                                         scaled_size,
294                                         dcp::YUV_TO_RGB_REC601,
295                                         i->image->pixel_format (),
296                                         true,
297                                         _fast
298                                         ),
299                                 Position<int> (
300                                         lrint (_video_container_size.width * i->rectangle.x),
301                                         lrint (_video_container_size.height * i->rectangle.y)
302                                         )
303                                 )
304                         );
305         }
306
307         return all;
308 }
309
310 shared_ptr<PlayerVideo>
311 Player::black_player_video_frame () const
312 {
313         return shared_ptr<PlayerVideo> (
314                 new PlayerVideo (
315                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
316                         Crop (),
317                         optional<double> (),
318                         _video_container_size,
319                         _video_container_size,
320                         EYES_BOTH,
321                         PART_WHOLE,
322                         PresetColourConversion::all().front().conversion
323                 )
324         );
325 }
326
327 Frame
328 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
329 {
330         DCPTime s = t - piece->content->position ();
331         s = min (piece->content->length_after_trim(), s);
332         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
333
334         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
335            then convert that ContentTime to frames at the content's rate.  However this fails for
336            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
337            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
338
339            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
340         */
341         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
342 }
343
344 DCPTime
345 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
346 {
347         /* See comment in dcp_to_content_video */
348         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
349         return max (DCPTime (), d + piece->content->position ());
350 }
351
352 Frame
353 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
354 {
355         DCPTime s = t - piece->content->position ();
356         s = min (piece->content->length_after_trim(), s);
357         /* See notes in dcp_to_content_video */
358         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
359 }
360
361 DCPTime
362 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
363 {
364         /* See comment in dcp_to_content_video */
365         return DCPTime::from_frames (f, _film->audio_frame_rate())
366                 - DCPTime (piece->content->trim_start(), piece->frc)
367                 + piece->content->position();
368 }
369
370 ContentTime
371 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
372 {
373         DCPTime s = t - piece->content->position ();
374         s = min (piece->content->length_after_trim(), s);
375         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
376 }
377
378 DCPTime
379 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
380 {
381         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
382 }
383
384 list<shared_ptr<Font> >
385 Player::get_subtitle_fonts ()
386 {
387         if (!_have_valid_pieces) {
388                 setup_pieces ();
389         }
390
391         list<shared_ptr<Font> > fonts;
392         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
393                 if (p->content->subtitle) {
394                         /* XXX: things may go wrong if there are duplicate font IDs
395                            with different font files.
396                         */
397                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
398                         copy (f.begin(), f.end(), back_inserter (fonts));
399                 }
400         }
401
402         return fonts;
403 }
404
405 /** Set this player never to produce any video data */
406 void
407 Player::set_ignore_video ()
408 {
409         _ignore_video = true;
410 }
411
412 void
413 Player::set_ignore_subtitle ()
414 {
415         _ignore_subtitle = true;
416 }
417
418 /** Set whether or not this player should always burn text subtitles into the image,
419  *  regardless of the content settings.
420  *  @param burn true to always burn subtitles, false to obey content settings.
421  */
422 void
423 Player::set_always_burn_subtitles (bool burn)
424 {
425         _always_burn_subtitles = burn;
426 }
427
428 void
429 Player::set_fast ()
430 {
431         _fast = true;
432         _have_valid_pieces = false;
433 }
434
435 void
436 Player::set_play_referenced ()
437 {
438         _play_referenced = true;
439         _have_valid_pieces = false;
440 }
441
442 list<ReferencedReelAsset>
443 Player::get_reel_assets ()
444 {
445         list<ReferencedReelAsset> a;
446
447         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
448                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
449                 if (!j) {
450                         continue;
451                 }
452
453                 scoped_ptr<DCPDecoder> decoder;
454                 try {
455                         decoder.reset (new DCPDecoder (j, _film->log()));
456                 } catch (...) {
457                         return a;
458                 }
459
460                 int64_t offset = 0;
461                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
462
463                         DCPOMATIC_ASSERT (j->video_frame_rate ());
464                         double const cfr = j->video_frame_rate().get();
465                         Frame const trim_start = j->trim_start().frames_round (cfr);
466                         Frame const trim_end = j->trim_end().frames_round (cfr);
467                         int const ffr = _film->video_frame_rate ();
468
469                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
470                         if (j->reference_video ()) {
471                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
472                                 DCPOMATIC_ASSERT (ra);
473                                 ra->set_entry_point (ra->entry_point() + trim_start);
474                                 ra->set_duration (ra->duration() - trim_start - trim_end);
475                                 a.push_back (
476                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
477                                         );
478                         }
479
480                         if (j->reference_audio ()) {
481                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
482                                 DCPOMATIC_ASSERT (ra);
483                                 ra->set_entry_point (ra->entry_point() + trim_start);
484                                 ra->set_duration (ra->duration() - trim_start - trim_end);
485                                 a.push_back (
486                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
487                                         );
488                         }
489
490                         if (j->reference_subtitle ()) {
491                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
492                                 DCPOMATIC_ASSERT (ra);
493                                 ra->set_entry_point (ra->entry_point() + trim_start);
494                                 ra->set_duration (ra->duration() - trim_start - trim_end);
495                                 a.push_back (
496                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
497                                         );
498                         }
499
500                         /* Assume that main picture duration is the length of the reel */
501                         offset += k->main_picture()->duration ();
502                 }
503         }
504
505         return a;
506 }
507
508 bool
509 Player::pass ()
510 {
511         if (!_have_valid_pieces) {
512                 setup_pieces ();
513         }
514
515         if (_playlist->length() == DCPTime()) {
516                 /* Special case of an empty Film; just give one black frame */
517                 emit_video (black_player_video_frame(), DCPTime());
518                 return true;
519         }
520
521         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
522
523         shared_ptr<Piece> earliest_content;
524         optional<DCPTime> earliest_time;
525
526         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
527                 if (!i->done) {
528                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
529                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
530                            the video.
531                         */
532                         if (!earliest_time || t < *earliest_time || (t == *earliest_time && i->decoder->subtitle)) {
533                                 earliest_time = t;
534                                 earliest_content = i;
535                         }
536                 }
537         }
538
539         bool done = false;
540
541         enum {
542                 NONE,
543                 CONTENT,
544                 BLACK,
545                 SILENT
546         } which = NONE;
547
548         if (earliest_content) {
549                 which = CONTENT;
550         }
551
552         if (!_black.done() && (!earliest_time || _black.position() < *earliest_time)) {
553                 earliest_time = _black.position ();
554                 which = BLACK;
555         }
556
557         if (!_silent.done() && (!earliest_time || _silent.position() < *earliest_time)) {
558                 earliest_time = _silent.position ();
559                 which = SILENT;
560         }
561
562         switch (which) {
563         case CONTENT:
564                 earliest_content->done = earliest_content->decoder->pass ();
565                 break;
566         case BLACK:
567                 emit_video (black_player_video_frame(), _black.position());
568                 _black.set_position (_black.position() + one_video_frame());
569                 break;
570         case SILENT:
571         {
572                 DCPTimePeriod period (_silent.period_at_position());
573                 if (period.duration() > one_video_frame()) {
574                         period.to = period.from + one_video_frame();
575                 }
576                 fill_audio (period);
577                 _silent.set_position (period.to);
578                 break;
579         }
580         case NONE:
581                 done = true;
582                 break;
583         }
584
585         /* Emit any audio that is ready */
586
587         DCPTime pull_to = _film->length ();
588         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
589                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
590                         pull_to = i->second.last_push_end;
591                 }
592         }
593
594         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
595         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
596                 if (_last_audio_time && i->second < *_last_audio_time) {
597                         /* There has been an accurate seek and we have received some audio before the seek time;
598                            discard it.
599                         */
600                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
601                         if (!cut.first) {
602                                 continue;
603                         }
604                         *i = cut;
605                 }
606
607                 emit_audio (i->first, i->second);
608         }
609
610         return done;
611 }
612
613 optional<PositionImage>
614 Player::subtitles_for_frame (DCPTime time) const
615 {
616         list<PositionImage> subtitles;
617
618         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
619
620                 /* Image subtitles */
621                 list<PositionImage> c = transform_image_subtitles (i.image);
622                 copy (c.begin(), c.end(), back_inserter (subtitles));
623
624                 /* Text subtitles (rendered to an image) */
625                 if (!i.text.empty ()) {
626                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
627                         copy (s.begin(), s.end(), back_inserter (subtitles));
628                 }
629         }
630
631         if (subtitles.empty ()) {
632                 return optional<PositionImage> ();
633         }
634
635         return merge (subtitles);
636 }
637
638 void
639 Player::video (weak_ptr<Piece> wp, ContentVideo video)
640 {
641         shared_ptr<Piece> piece = wp.lock ();
642         if (!piece) {
643                 return;
644         }
645
646         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
647         if (frc.skip && (video.frame % 2) == 1) {
648                 return;
649         }
650
651         /* Time of the first frame we will emit */
652         DCPTime const time = content_video_to_dcp (piece, video.frame);
653
654         /* Discard if it's outside the content's period or if it's before the last accurate seek */
655         if (
656                 time < piece->content->position() ||
657                 time >= piece->content->end() ||
658                 (_last_video_time && time < *_last_video_time)) {
659                 return;
660         }
661
662         /* Fill gaps that we discover now that we have some video which needs to be emitted */
663
664         if (_last_video_time) {
665                 /* XXX: this may not work for 3D */
666                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
667                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
668                         LastVideoMap::const_iterator k = _last_video.find (wp);
669                         if (k != _last_video.end ()) {
670                                 emit_video (k->second, j);
671                         } else {
672                                 emit_video (black_player_video_frame(), j);
673                         }
674                 }
675         }
676
677         _last_video[wp].reset (
678                 new PlayerVideo (
679                         video.image,
680                         piece->content->video->crop (),
681                         piece->content->video->fade (video.frame),
682                         piece->content->video->scale().size (
683                                 piece->content->video, _video_container_size, _film->frame_size ()
684                                 ),
685                         _video_container_size,
686                         video.eyes,
687                         video.part,
688                         piece->content->video->colour_conversion ()
689                         )
690                 );
691
692         DCPTime t = time;
693         for (int i = 0; i < frc.repeat; ++i) {
694                 emit_video (_last_video[wp], t);
695                 t += one_video_frame ();
696         }
697 }
698
699 void
700 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
701 {
702         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
703
704         shared_ptr<Piece> piece = wp.lock ();
705         if (!piece) {
706                 return;
707         }
708
709         shared_ptr<AudioContent> content = piece->content->audio;
710         DCPOMATIC_ASSERT (content);
711
712         /* Compute time in the DCP */
713         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
714         /* And the end of this block in the DCP */
715         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
716
717         /* Remove anything that comes before the start or after the end of the content */
718         if (time < piece->content->position()) {
719                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
720                 if (!cut.first) {
721                         /* This audio is entirely discarded */
722                         return;
723                 }
724                 content_audio.audio = cut.first;
725                 time = cut.second;
726         } else if (time > piece->content->end()) {
727                 /* Discard it all */
728                 return;
729         } else if (end > piece->content->end()) {
730                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
731                 if (remaining_frames == 0) {
732                         return;
733                 }
734                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
735                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
736                 content_audio.audio = cut;
737         }
738
739         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
740
741         /* Gain */
742
743         if (content->gain() != 0) {
744                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
745                 gain->apply_gain (content->gain ());
746                 content_audio.audio = gain;
747         }
748
749         /* Remap */
750
751         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
752
753         /* Process */
754
755         if (_audio_processor) {
756                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
757         }
758
759         /* Push */
760
761         _audio_merger.push (content_audio.audio, time);
762         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
763         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
764 }
765
766 void
767 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
768 {
769         shared_ptr<Piece> piece = wp.lock ();
770         if (!piece) {
771                 return;
772         }
773
774         /* Apply content's subtitle offsets */
775         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
776         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
777
778         /* Apply content's subtitle scale */
779         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
780         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
781
782         /* Apply a corrective translation to keep the subtitle centred after that scale */
783         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
784         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
785
786         PlayerSubtitles ps;
787         ps.image.push_back (subtitle.sub);
788         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
789
790         _active_subtitles.add_from (wp, ps, from);
791 }
792
793 void
794 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
795 {
796         shared_ptr<Piece> piece = wp.lock ();
797         if (!piece) {
798                 return;
799         }
800
801         PlayerSubtitles ps;
802         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
803
804         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
805                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
806                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
807                 float const xs = piece->content->subtitle->x_scale();
808                 float const ys = piece->content->subtitle->y_scale();
809                 float size = s.size();
810
811                 /* Adjust size to express the common part of the scaling;
812                    e.g. if xs = ys = 0.5 we scale size by 2.
813                 */
814                 if (xs > 1e-5 && ys > 1e-5) {
815                         size *= 1 / min (1 / xs, 1 / ys);
816                 }
817                 s.set_size (size);
818
819                 /* Then express aspect ratio changes */
820                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
821                         s.set_aspect_adjust (xs / ys);
822                 }
823
824                 s.set_in (dcp::Time(from.seconds(), 1000));
825                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
826                 ps.add_fonts (piece->content->subtitle->fonts ());
827         }
828
829         _active_subtitles.add_from (wp, ps, from);
830 }
831
832 void
833 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
834 {
835         if (!_active_subtitles.have (wp)) {
836                 return;
837         }
838
839         shared_ptr<Piece> piece = wp.lock ();
840         if (!piece) {
841                 return;
842         }
843
844         DCPTime const dcp_to = content_time_to_dcp (piece, to);
845
846         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
847
848         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
849                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
850         }
851 }
852
853 void
854 Player::seek (DCPTime time, bool accurate)
855 {
856         if (!_have_valid_pieces) {
857                 setup_pieces ();
858         }
859
860         if (_audio_processor) {
861                 _audio_processor->flush ();
862         }
863
864         _audio_merger.clear ();
865         _active_subtitles.clear ();
866
867         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
868                 if (time < i->content->position()) {
869                         /* Before; seek to 0 */
870                         i->decoder->seek (ContentTime(), accurate);
871                         i->done = false;
872                 } else if (i->content->position() <= time && time < i->content->end()) {
873                         /* During; seek to position */
874                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
875                         i->done = false;
876                 } else {
877                         /* After; this piece is done */
878                         i->done = true;
879                 }
880         }
881
882         if (accurate) {
883                 _last_video_time = time;
884                 _last_audio_time = time;
885         } else {
886                 _last_video_time = optional<DCPTime>();
887                 _last_audio_time = optional<DCPTime>();
888         }
889
890         _black.set_position (time);
891         _silent.set_position (time);
892
893         _last_video.clear ();
894 }
895
896 void
897 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
898 {
899         optional<PositionImage> subtitles = subtitles_for_frame (time);
900         if (subtitles) {
901                 pv->set_subtitle (subtitles.get ());
902         }
903
904         Video (pv, time);
905
906         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
907                 _last_video_time = time + one_video_frame();
908                 _active_subtitles.clear_before (time);
909         }
910 }
911
912 void
913 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
914 {
915         Audio (data, time);
916         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
917 }
918
919 void
920 Player::fill_audio (DCPTimePeriod period)
921 {
922         if (period.from == period.to) {
923                 return;
924         }
925
926         DCPOMATIC_ASSERT (period.from < period.to);
927
928         DCPTime t = period.from;
929         while (t < period.to) {
930                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
931                 Frame const samples = block.frames_round(_film->audio_frame_rate());
932                 if (samples) {
933                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
934                         silence->make_silent ();
935                         emit_audio (silence, t);
936                 }
937                 t += block;
938         }
939 }
940
941 DCPTime
942 Player::one_video_frame () const
943 {
944         return DCPTime::from_frames (1, _film->video_frame_rate ());
945 }
946
947 pair<shared_ptr<AudioBuffers>, DCPTime>
948 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
949 {
950         DCPTime const discard_time = discard_to - time;
951         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
952         Frame remaining_frames = audio->frames() - discard_frames;
953         if (remaining_frames <= 0) {
954                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
955         }
956         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
957         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
958         return make_pair(cut, time + discard_time);
959 }
960
961 void
962 Player::set_dcp_decode_reduction (optional<int> reduction)
963 {
964         if (reduction == _dcp_decode_reduction) {
965                 return;
966         }
967
968         _dcp_decode_reduction = reduction;
969         _have_valid_pieces = false;
970         Changed (false);
971 }