Remove Film dependency from Empty.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_subtitle (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->subtitle && _ignore_subtitle) {
125                         decoder->subtitle->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         dcp->set_decode_referenced ();
131                 }
132
133                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
134                 _pieces.push_back (piece);
135
136                 if (decoder->video) {
137                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
138                 }
139
140                 if (decoder->audio) {
141                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
142                 }
143
144                 if (decoder->subtitle) {
145                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
146                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         _black = Empty (_film->content(), _film->length(), bind(&Content::video, _1));
160         _silent = Empty (_film->content(), _film->length(), bind(&Content::audio, _1));
161
162         _last_video_time = DCPTime ();
163         _last_audio_time = DCPTime ();
164         _have_valid_pieces = true;
165 }
166
167 void
168 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
169 {
170         shared_ptr<Content> c = w.lock ();
171         if (!c) {
172                 return;
173         }
174
175         if (
176                 property == ContentProperty::POSITION ||
177                 property == ContentProperty::LENGTH ||
178                 property == ContentProperty::TRIM_START ||
179                 property == ContentProperty::TRIM_END ||
180                 property == ContentProperty::PATH ||
181                 property == VideoContentProperty::FRAME_TYPE ||
182                 property == DCPContentProperty::NEEDS_ASSETS ||
183                 property == DCPContentProperty::NEEDS_KDM ||
184                 property == SubtitleContentProperty::COLOUR ||
185                 property == SubtitleContentProperty::OUTLINE ||
186                 property == SubtitleContentProperty::SHADOW ||
187                 property == SubtitleContentProperty::EFFECT_COLOUR ||
188                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
189                 property == VideoContentProperty::COLOUR_CONVERSION
190                 ) {
191
192                 _have_valid_pieces = false;
193                 Changed (frequent);
194
195         } else if (
196                 property == SubtitleContentProperty::LINE_SPACING ||
197                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
198                 property == SubtitleContentProperty::Y_SCALE ||
199                 property == SubtitleContentProperty::FADE_IN ||
200                 property == SubtitleContentProperty::FADE_OUT ||
201                 property == ContentProperty::VIDEO_FRAME_RATE ||
202                 property == SubtitleContentProperty::USE ||
203                 property == SubtitleContentProperty::X_OFFSET ||
204                 property == SubtitleContentProperty::Y_OFFSET ||
205                 property == SubtitleContentProperty::X_SCALE ||
206                 property == SubtitleContentProperty::FONTS ||
207                 property == VideoContentProperty::CROP ||
208                 property == VideoContentProperty::SCALE ||
209                 property == VideoContentProperty::FADE_IN ||
210                 property == VideoContentProperty::FADE_OUT
211                 ) {
212
213                 Changed (frequent);
214         }
215 }
216
217 void
218 Player::set_video_container_size (dcp::Size s)
219 {
220         if (s == _video_container_size) {
221                 return;
222         }
223
224         _video_container_size = s;
225
226         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
227         _black_image->make_black ();
228
229         Changed (false);
230 }
231
232 void
233 Player::playlist_changed ()
234 {
235         _have_valid_pieces = false;
236         Changed (false);
237 }
238
239 void
240 Player::film_changed (Film::Property p)
241 {
242         /* Here we should notice Film properties that affect our output, and
243            alert listeners that our output now would be different to how it was
244            last time we were run.
245         */
246
247         if (p == Film::CONTAINER) {
248                 Changed (false);
249         } else if (p == Film::VIDEO_FRAME_RATE) {
250                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
251                    so we need new pieces here.
252                 */
253                 _have_valid_pieces = false;
254                 Changed (false);
255         } else if (p == Film::AUDIO_PROCESSOR) {
256                 if (_film->audio_processor ()) {
257                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
258                 }
259         }
260 }
261
262 list<PositionImage>
263 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
264 {
265         list<PositionImage> all;
266
267         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
268                 if (!i->image) {
269                         continue;
270                 }
271
272                 /* We will scale the subtitle up to fit _video_container_size */
273                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
274
275                 /* Then we need a corrective translation, consisting of two parts:
276                  *
277                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
278                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
279                  *
280                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
281                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
282                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
283                  *
284                  * Combining these two translations gives these expressions.
285                  */
286
287                 all.push_back (
288                         PositionImage (
289                                 i->image->scale (
290                                         scaled_size,
291                                         dcp::YUV_TO_RGB_REC601,
292                                         i->image->pixel_format (),
293                                         true,
294                                         _fast
295                                         ),
296                                 Position<int> (
297                                         lrint (_video_container_size.width * i->rectangle.x),
298                                         lrint (_video_container_size.height * i->rectangle.y)
299                                         )
300                                 )
301                         );
302         }
303
304         return all;
305 }
306
307 shared_ptr<PlayerVideo>
308 Player::black_player_video_frame () const
309 {
310         return shared_ptr<PlayerVideo> (
311                 new PlayerVideo (
312                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
313                         Crop (),
314                         optional<double> (),
315                         _video_container_size,
316                         _video_container_size,
317                         EYES_BOTH,
318                         PART_WHOLE,
319                         PresetColourConversion::all().front().conversion
320                 )
321         );
322 }
323
324 Frame
325 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
326 {
327         DCPTime s = t - piece->content->position ();
328         s = min (piece->content->length_after_trim(), s);
329         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
330
331         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
332            then convert that ContentTime to frames at the content's rate.  However this fails for
333            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
334            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
335
336            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
337         */
338         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
339 }
340
341 DCPTime
342 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
343 {
344         /* See comment in dcp_to_content_video */
345         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
346         return max (DCPTime (), d + piece->content->position ());
347 }
348
349 Frame
350 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
351 {
352         DCPTime s = t - piece->content->position ();
353         s = min (piece->content->length_after_trim(), s);
354         /* See notes in dcp_to_content_video */
355         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
356 }
357
358 DCPTime
359 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
360 {
361         /* See comment in dcp_to_content_video */
362         return DCPTime::from_frames (f, _film->audio_frame_rate())
363                 - DCPTime (piece->content->trim_start(), piece->frc)
364                 + piece->content->position();
365 }
366
367 ContentTime
368 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
369 {
370         DCPTime s = t - piece->content->position ();
371         s = min (piece->content->length_after_trim(), s);
372         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
373 }
374
375 DCPTime
376 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
377 {
378         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
379 }
380
381 list<shared_ptr<Font> >
382 Player::get_subtitle_fonts ()
383 {
384         if (!_have_valid_pieces) {
385                 setup_pieces ();
386         }
387
388         list<shared_ptr<Font> > fonts;
389         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
390                 if (p->content->subtitle) {
391                         /* XXX: things may go wrong if there are duplicate font IDs
392                            with different font files.
393                         */
394                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
395                         copy (f.begin(), f.end(), back_inserter (fonts));
396                 }
397         }
398
399         return fonts;
400 }
401
402 /** Set this player never to produce any video data */
403 void
404 Player::set_ignore_video ()
405 {
406         _ignore_video = true;
407 }
408
409 void
410 Player::set_ignore_subtitle ()
411 {
412         _ignore_subtitle = true;
413 }
414
415 /** Set whether or not this player should always burn text subtitles into the image,
416  *  regardless of the content settings.
417  *  @param burn true to always burn subtitles, false to obey content settings.
418  */
419 void
420 Player::set_always_burn_subtitles (bool burn)
421 {
422         _always_burn_subtitles = burn;
423 }
424
425 void
426 Player::set_fast ()
427 {
428         _fast = true;
429         _have_valid_pieces = false;
430 }
431
432 void
433 Player::set_play_referenced ()
434 {
435         _play_referenced = true;
436         _have_valid_pieces = false;
437 }
438
439 list<ReferencedReelAsset>
440 Player::get_reel_assets ()
441 {
442         list<ReferencedReelAsset> a;
443
444         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
445                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
446                 if (!j) {
447                         continue;
448                 }
449
450                 scoped_ptr<DCPDecoder> decoder;
451                 try {
452                         decoder.reset (new DCPDecoder (j, _film->log()));
453                 } catch (...) {
454                         return a;
455                 }
456
457                 int64_t offset = 0;
458                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
459
460                         DCPOMATIC_ASSERT (j->video_frame_rate ());
461                         double const cfr = j->video_frame_rate().get();
462                         Frame const trim_start = j->trim_start().frames_round (cfr);
463                         Frame const trim_end = j->trim_end().frames_round (cfr);
464                         int const ffr = _film->video_frame_rate ();
465
466                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
467                         if (j->reference_video ()) {
468                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
469                                 DCPOMATIC_ASSERT (ra);
470                                 ra->set_entry_point (ra->entry_point() + trim_start);
471                                 ra->set_duration (ra->duration() - trim_start - trim_end);
472                                 a.push_back (
473                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
474                                         );
475                         }
476
477                         if (j->reference_audio ()) {
478                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
479                                 DCPOMATIC_ASSERT (ra);
480                                 ra->set_entry_point (ra->entry_point() + trim_start);
481                                 ra->set_duration (ra->duration() - trim_start - trim_end);
482                                 a.push_back (
483                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
484                                         );
485                         }
486
487                         if (j->reference_subtitle ()) {
488                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
489                                 DCPOMATIC_ASSERT (ra);
490                                 ra->set_entry_point (ra->entry_point() + trim_start);
491                                 ra->set_duration (ra->duration() - trim_start - trim_end);
492                                 a.push_back (
493                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
494                                         );
495                         }
496
497                         /* Assume that main picture duration is the length of the reel */
498                         offset += k->main_picture()->duration ();
499                 }
500         }
501
502         return a;
503 }
504
505 bool
506 Player::pass ()
507 {
508         if (!_have_valid_pieces) {
509                 setup_pieces ();
510         }
511
512         if (_playlist->length() == DCPTime()) {
513                 /* Special case of an empty Film; just give one black frame */
514                 emit_video (black_player_video_frame(), DCPTime());
515                 return true;
516         }
517
518         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
519
520         shared_ptr<Piece> earliest_content;
521         optional<DCPTime> earliest_time;
522
523         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
524                 if (!i->done) {
525                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
526                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
527                            the video.
528                         */
529                         if (!earliest_time || t < *earliest_time || (t == *earliest_time && i->decoder->subtitle)) {
530                                 earliest_time = t;
531                                 earliest_content = i;
532                         }
533                 }
534         }
535
536         bool done = false;
537
538         enum {
539                 NONE,
540                 CONTENT,
541                 BLACK,
542                 SILENT
543         } which = NONE;
544
545         if (earliest_content) {
546                 which = CONTENT;
547         }
548
549         if (!_black.done() && (!earliest_time || _black.position() < *earliest_time)) {
550                 earliest_time = _black.position ();
551                 which = BLACK;
552         }
553
554         if (!_silent.done() && (!earliest_time || _silent.position() < *earliest_time)) {
555                 earliest_time = _silent.position ();
556                 which = SILENT;
557         }
558
559         switch (which) {
560         case CONTENT:
561                 earliest_content->done = earliest_content->decoder->pass ();
562                 break;
563         case BLACK:
564                 emit_video (black_player_video_frame(), _black.position());
565                 _black.set_position (_black.position() + one_video_frame());
566                 break;
567         case SILENT:
568         {
569                 DCPTimePeriod period (_silent.period_at_position());
570                 if (period.duration() > one_video_frame()) {
571                         period.to = period.from + one_video_frame();
572                 }
573                 fill_audio (period);
574                 _silent.set_position (period.to);
575                 break;
576         }
577         case NONE:
578                 done = true;
579                 break;
580         }
581
582         /* Emit any audio that is ready */
583
584         DCPTime pull_to = _film->length ();
585         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
586                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
587                         pull_to = i->second.last_push_end;
588                 }
589         }
590
591         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
592         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
593                 if (_last_audio_time && i->second < *_last_audio_time) {
594                         /* There has been an accurate seek and we have received some audio before the seek time;
595                            discard it.
596                         */
597                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
598                         if (!cut.first) {
599                                 continue;
600                         }
601                         *i = cut;
602                 }
603
604                 emit_audio (i->first, i->second);
605         }
606
607         return done;
608 }
609
610 optional<PositionImage>
611 Player::subtitles_for_frame (DCPTime time) const
612 {
613         list<PositionImage> subtitles;
614
615         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
616
617                 /* Image subtitles */
618                 list<PositionImage> c = transform_image_subtitles (i.image);
619                 copy (c.begin(), c.end(), back_inserter (subtitles));
620
621                 /* Text subtitles (rendered to an image) */
622                 if (!i.text.empty ()) {
623                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
624                         copy (s.begin(), s.end(), back_inserter (subtitles));
625                 }
626         }
627
628         if (subtitles.empty ()) {
629                 return optional<PositionImage> ();
630         }
631
632         return merge (subtitles);
633 }
634
635 void
636 Player::video (weak_ptr<Piece> wp, ContentVideo video)
637 {
638         shared_ptr<Piece> piece = wp.lock ();
639         if (!piece) {
640                 return;
641         }
642
643         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
644         if (frc.skip && (video.frame % 2) == 1) {
645                 return;
646         }
647
648         /* Time of the first frame we will emit */
649         DCPTime const time = content_video_to_dcp (piece, video.frame);
650
651         /* Discard if it's outside the content's period or if it's before the last accurate seek */
652         if (
653                 time < piece->content->position() ||
654                 time >= piece->content->end() ||
655                 (_last_video_time && time < *_last_video_time)) {
656                 return;
657         }
658
659         /* Fill gaps that we discover now that we have some video which needs to be emitted */
660
661         if (_last_video_time) {
662                 /* XXX: this may not work for 3D */
663                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
664                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
665                         LastVideoMap::const_iterator k = _last_video.find (wp);
666                         if (k != _last_video.end ()) {
667                                 emit_video (k->second, j);
668                         } else {
669                                 emit_video (black_player_video_frame(), j);
670                         }
671                 }
672         }
673
674         _last_video[wp].reset (
675                 new PlayerVideo (
676                         video.image,
677                         piece->content->video->crop (),
678                         piece->content->video->fade (video.frame),
679                         piece->content->video->scale().size (
680                                 piece->content->video, _video_container_size, _film->frame_size ()
681                                 ),
682                         _video_container_size,
683                         video.eyes,
684                         video.part,
685                         piece->content->video->colour_conversion ()
686                         )
687                 );
688
689         DCPTime t = time;
690         for (int i = 0; i < frc.repeat; ++i) {
691                 emit_video (_last_video[wp], t);
692                 t += one_video_frame ();
693         }
694 }
695
696 void
697 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
698 {
699         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
700
701         shared_ptr<Piece> piece = wp.lock ();
702         if (!piece) {
703                 return;
704         }
705
706         shared_ptr<AudioContent> content = piece->content->audio;
707         DCPOMATIC_ASSERT (content);
708
709         /* Compute time in the DCP */
710         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
711         /* And the end of this block in the DCP */
712         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
713
714         /* Remove anything that comes before the start or after the end of the content */
715         if (time < piece->content->position()) {
716                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
717                 if (!cut.first) {
718                         /* This audio is entirely discarded */
719                         return;
720                 }
721                 content_audio.audio = cut.first;
722                 time = cut.second;
723         } else if (time > piece->content->end()) {
724                 /* Discard it all */
725                 return;
726         } else if (end > piece->content->end()) {
727                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
728                 if (remaining_frames == 0) {
729                         return;
730                 }
731                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
732                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
733                 content_audio.audio = cut;
734         }
735
736         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
737
738         /* Gain */
739
740         if (content->gain() != 0) {
741                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
742                 gain->apply_gain (content->gain ());
743                 content_audio.audio = gain;
744         }
745
746         /* Remap */
747
748         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
749
750         /* Process */
751
752         if (_audio_processor) {
753                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
754         }
755
756         /* Push */
757
758         _audio_merger.push (content_audio.audio, time);
759         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
760         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
761 }
762
763 void
764 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
765 {
766         shared_ptr<Piece> piece = wp.lock ();
767         if (!piece) {
768                 return;
769         }
770
771         /* Apply content's subtitle offsets */
772         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
773         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
774
775         /* Apply content's subtitle scale */
776         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
777         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
778
779         /* Apply a corrective translation to keep the subtitle centred after that scale */
780         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
781         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
782
783         PlayerSubtitles ps;
784         ps.image.push_back (subtitle.sub);
785         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
786
787         _active_subtitles.add_from (wp, ps, from);
788 }
789
790 void
791 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
792 {
793         shared_ptr<Piece> piece = wp.lock ();
794         if (!piece) {
795                 return;
796         }
797
798         PlayerSubtitles ps;
799         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
800
801         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
802                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
803                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
804                 float const xs = piece->content->subtitle->x_scale();
805                 float const ys = piece->content->subtitle->y_scale();
806                 float size = s.size();
807
808                 /* Adjust size to express the common part of the scaling;
809                    e.g. if xs = ys = 0.5 we scale size by 2.
810                 */
811                 if (xs > 1e-5 && ys > 1e-5) {
812                         size *= 1 / min (1 / xs, 1 / ys);
813                 }
814                 s.set_size (size);
815
816                 /* Then express aspect ratio changes */
817                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
818                         s.set_aspect_adjust (xs / ys);
819                 }
820
821                 s.set_in (dcp::Time(from.seconds(), 1000));
822                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
823                 ps.add_fonts (piece->content->subtitle->fonts ());
824         }
825
826         _active_subtitles.add_from (wp, ps, from);
827 }
828
829 void
830 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
831 {
832         if (!_active_subtitles.have (wp)) {
833                 return;
834         }
835
836         shared_ptr<Piece> piece = wp.lock ();
837         if (!piece) {
838                 return;
839         }
840
841         DCPTime const dcp_to = content_time_to_dcp (piece, to);
842
843         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
844
845         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
846                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
847         }
848 }
849
850 void
851 Player::seek (DCPTime time, bool accurate)
852 {
853         if (_audio_processor) {
854                 _audio_processor->flush ();
855         }
856
857         _audio_merger.clear ();
858         _active_subtitles.clear ();
859
860         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
861                 if (time < i->content->position()) {
862                         /* Before; seek to 0 */
863                         i->decoder->seek (ContentTime(), accurate);
864                         i->done = false;
865                 } else if (i->content->position() <= time && time < i->content->end()) {
866                         /* During; seek to position */
867                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
868                         i->done = false;
869                 } else {
870                         /* After; this piece is done */
871                         i->done = true;
872                 }
873         }
874
875         if (accurate) {
876                 _last_video_time = time;
877                 _last_audio_time = time;
878         } else {
879                 _last_video_time = optional<DCPTime>();
880                 _last_audio_time = optional<DCPTime>();
881         }
882
883         _black.set_position (time);
884         _silent.set_position (time);
885
886         _last_video.clear ();
887 }
888
889 void
890 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
891 {
892         optional<PositionImage> subtitles = subtitles_for_frame (time);
893         if (subtitles) {
894                 pv->set_subtitle (subtitles.get ());
895         }
896
897         Video (pv, time);
898
899         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
900                 _last_video_time = time + one_video_frame();
901                 _active_subtitles.clear_before (time);
902         }
903 }
904
905 void
906 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
907 {
908         Audio (data, time);
909         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
910 }
911
912 void
913 Player::fill_audio (DCPTimePeriod period)
914 {
915         if (period.from == period.to) {
916                 return;
917         }
918
919         DCPOMATIC_ASSERT (period.from < period.to);
920
921         DCPTime t = period.from;
922         while (t < period.to) {
923                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
924                 Frame const samples = block.frames_round(_film->audio_frame_rate());
925                 if (samples) {
926                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
927                         silence->make_silent ();
928                         emit_audio (silence, t);
929                 }
930                 t += block;
931         }
932 }
933
934 DCPTime
935 Player::one_video_frame () const
936 {
937         return DCPTime::from_frames (1, _film->video_frame_rate ());
938 }
939
940 pair<shared_ptr<AudioBuffers>, DCPTime>
941 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
942 {
943         DCPTime const discard_time = discard_to - time;
944         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
945         Frame remaining_frames = audio->frames() - discard_frames;
946         if (remaining_frames <= 0) {
947                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
948         }
949         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
950         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
951         return make_pair(cut, time + discard_time);
952 }