Fixes for silence in projects, various cleanups.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_audio (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->audio && _ignore_audio) {
125                         decoder->audio->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         dcp->set_decode_referenced ();
131                 }
132
133                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
134                 _pieces.push_back (piece);
135
136                 if (decoder->video) {
137                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
138                 }
139
140                 if (decoder->audio) {
141                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
142                 }
143
144                 if (decoder->subtitle) {
145                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
146                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         _black = Empty (_playlist, bind(&Content::video, _1));
160         _silent = Empty (_playlist, bind(&Content::audio, _1));
161
162         _last_video_time = DCPTime ();
163         _last_audio_time = DCPTime ();
164         _have_valid_pieces = true;
165 }
166
167 void
168 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
169 {
170         shared_ptr<Content> c = w.lock ();
171         if (!c) {
172                 return;
173         }
174
175         if (
176                 property == ContentProperty::POSITION ||
177                 property == ContentProperty::LENGTH ||
178                 property == ContentProperty::TRIM_START ||
179                 property == ContentProperty::TRIM_END ||
180                 property == ContentProperty::PATH ||
181                 property == VideoContentProperty::FRAME_TYPE ||
182                 property == DCPContentProperty::NEEDS_ASSETS ||
183                 property == DCPContentProperty::NEEDS_KDM ||
184                 property == SubtitleContentProperty::COLOUR ||
185                 property == SubtitleContentProperty::OUTLINE ||
186                 property == SubtitleContentProperty::SHADOW ||
187                 property == SubtitleContentProperty::EFFECT_COLOUR ||
188                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
189                 property == VideoContentProperty::COLOUR_CONVERSION
190                 ) {
191
192                 _have_valid_pieces = false;
193                 Changed (frequent);
194
195         } else if (
196                 property == SubtitleContentProperty::LINE_SPACING ||
197                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
198                 property == SubtitleContentProperty::Y_SCALE ||
199                 property == SubtitleContentProperty::FADE_IN ||
200                 property == SubtitleContentProperty::FADE_OUT ||
201                 property == ContentProperty::VIDEO_FRAME_RATE ||
202                 property == SubtitleContentProperty::USE ||
203                 property == SubtitleContentProperty::X_OFFSET ||
204                 property == SubtitleContentProperty::Y_OFFSET ||
205                 property == SubtitleContentProperty::X_SCALE ||
206                 property == SubtitleContentProperty::FONTS ||
207                 property == VideoContentProperty::CROP ||
208                 property == VideoContentProperty::SCALE ||
209                 property == VideoContentProperty::FADE_IN ||
210                 property == VideoContentProperty::FADE_OUT
211                 ) {
212
213                 Changed (frequent);
214         }
215 }
216
217 void
218 Player::set_video_container_size (dcp::Size s)
219 {
220         if (s == _video_container_size) {
221                 return;
222         }
223
224         _video_container_size = s;
225
226         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
227         _black_image->make_black ();
228
229         Changed (false);
230 }
231
232 void
233 Player::playlist_changed ()
234 {
235         _have_valid_pieces = false;
236         Changed (false);
237 }
238
239 void
240 Player::film_changed (Film::Property p)
241 {
242         /* Here we should notice Film properties that affect our output, and
243            alert listeners that our output now would be different to how it was
244            last time we were run.
245         */
246
247         if (p == Film::CONTAINER) {
248                 Changed (false);
249         } else if (p == Film::VIDEO_FRAME_RATE) {
250                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
251                    so we need new pieces here.
252                 */
253                 _have_valid_pieces = false;
254                 Changed (false);
255         } else if (p == Film::AUDIO_PROCESSOR) {
256                 if (_film->audio_processor ()) {
257                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
258                 }
259         }
260 }
261
262 list<PositionImage>
263 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
264 {
265         list<PositionImage> all;
266
267         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
268                 if (!i->image) {
269                         continue;
270                 }
271
272                 /* We will scale the subtitle up to fit _video_container_size */
273                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
274
275                 /* Then we need a corrective translation, consisting of two parts:
276                  *
277                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
278                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
279                  *
280                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
281                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
282                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
283                  *
284                  * Combining these two translations gives these expressions.
285                  */
286
287                 all.push_back (
288                         PositionImage (
289                                 i->image->scale (
290                                         scaled_size,
291                                         dcp::YUV_TO_RGB_REC601,
292                                         i->image->pixel_format (),
293                                         true,
294                                         _fast
295                                         ),
296                                 Position<int> (
297                                         lrint (_video_container_size.width * i->rectangle.x),
298                                         lrint (_video_container_size.height * i->rectangle.y)
299                                         )
300                                 )
301                         );
302         }
303
304         return all;
305 }
306
307 shared_ptr<PlayerVideo>
308 Player::black_player_video_frame () const
309 {
310         return shared_ptr<PlayerVideo> (
311                 new PlayerVideo (
312                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
313                         Crop (),
314                         optional<double> (),
315                         _video_container_size,
316                         _video_container_size,
317                         EYES_BOTH,
318                         PART_WHOLE,
319                         PresetColourConversion::all().front().conversion
320                 )
321         );
322 }
323
324 Frame
325 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
326 {
327         DCPTime s = t - piece->content->position ();
328         s = min (piece->content->length_after_trim(), s);
329         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
330
331         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
332            then convert that ContentTime to frames at the content's rate.  However this fails for
333            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
334            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
335
336            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
337         */
338         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
339 }
340
341 DCPTime
342 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
343 {
344         /* See comment in dcp_to_content_video */
345         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
346         return max (DCPTime (), d + piece->content->position ());
347 }
348
349 Frame
350 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
351 {
352         DCPTime s = t - piece->content->position ();
353         s = min (piece->content->length_after_trim(), s);
354         /* See notes in dcp_to_content_video */
355         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
356 }
357
358 DCPTime
359 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
360 {
361         /* See comment in dcp_to_content_video */
362         return DCPTime::from_frames (f, _film->audio_frame_rate())
363                 - DCPTime (piece->content->trim_start(), piece->frc)
364                 + piece->content->position();
365 }
366
367 ContentTime
368 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
369 {
370         DCPTime s = t - piece->content->position ();
371         s = min (piece->content->length_after_trim(), s);
372         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
373 }
374
375 DCPTime
376 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
377 {
378         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
379 }
380
381 list<shared_ptr<Font> >
382 Player::get_subtitle_fonts ()
383 {
384         if (!_have_valid_pieces) {
385                 setup_pieces ();
386         }
387
388         list<shared_ptr<Font> > fonts;
389         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
390                 if (p->content->subtitle) {
391                         /* XXX: things may go wrong if there are duplicate font IDs
392                            with different font files.
393                         */
394                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
395                         copy (f.begin(), f.end(), back_inserter (fonts));
396                 }
397         }
398
399         return fonts;
400 }
401
402 /** Set this player never to produce any video data */
403 void
404 Player::set_ignore_video ()
405 {
406         _ignore_video = true;
407 }
408
409 /** Set whether or not this player should always burn text subtitles into the image,
410  *  regardless of the content settings.
411  *  @param burn true to always burn subtitles, false to obey content settings.
412  */
413 void
414 Player::set_always_burn_subtitles (bool burn)
415 {
416         _always_burn_subtitles = burn;
417 }
418
419 void
420 Player::set_fast ()
421 {
422         _fast = true;
423         _have_valid_pieces = false;
424 }
425
426 void
427 Player::set_play_referenced ()
428 {
429         _play_referenced = true;
430         _have_valid_pieces = false;
431 }
432
433 list<ReferencedReelAsset>
434 Player::get_reel_assets ()
435 {
436         list<ReferencedReelAsset> a;
437
438         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
439                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
440                 if (!j) {
441                         continue;
442                 }
443
444                 scoped_ptr<DCPDecoder> decoder;
445                 try {
446                         decoder.reset (new DCPDecoder (j, _film->log()));
447                 } catch (...) {
448                         return a;
449                 }
450
451                 int64_t offset = 0;
452                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
453
454                         DCPOMATIC_ASSERT (j->video_frame_rate ());
455                         double const cfr = j->video_frame_rate().get();
456                         Frame const trim_start = j->trim_start().frames_round (cfr);
457                         Frame const trim_end = j->trim_end().frames_round (cfr);
458                         int const ffr = _film->video_frame_rate ();
459
460                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
461                         if (j->reference_video ()) {
462                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
463                                 DCPOMATIC_ASSERT (ra);
464                                 ra->set_entry_point (ra->entry_point() + trim_start);
465                                 ra->set_duration (ra->duration() - trim_start - trim_end);
466                                 a.push_back (
467                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
468                                         );
469                         }
470
471                         if (j->reference_audio ()) {
472                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
473                                 DCPOMATIC_ASSERT (ra);
474                                 ra->set_entry_point (ra->entry_point() + trim_start);
475                                 ra->set_duration (ra->duration() - trim_start - trim_end);
476                                 a.push_back (
477                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
478                                         );
479                         }
480
481                         if (j->reference_subtitle ()) {
482                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
483                                 DCPOMATIC_ASSERT (ra);
484                                 ra->set_entry_point (ra->entry_point() + trim_start);
485                                 ra->set_duration (ra->duration() - trim_start - trim_end);
486                                 a.push_back (
487                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
488                                         );
489                         }
490
491                         /* Assume that main picture duration is the length of the reel */
492                         offset += k->main_picture()->duration ();
493                 }
494         }
495
496         return a;
497 }
498
499 bool
500 Player::pass ()
501 {
502         if (!_have_valid_pieces) {
503                 setup_pieces ();
504         }
505
506         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
507
508         shared_ptr<Piece> earliest;
509         DCPTime earliest_content;
510
511         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
512                 if (!i->done) {
513                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
514                         if (!earliest || t < earliest_content) {
515                                 earliest_content = t;
516                                 earliest = i;
517                         }
518                 }
519         }
520
521         bool done = false;
522
523         if (!_black.done() && (!earliest ||_black.position() < earliest_content)) {
524                 /* There is some black that must be emitted */
525                 emit_video (black_player_video_frame(), _black.position());
526                 _black.set_position (_black.position() + one_video_frame());
527         } else if (!_silent.done() && (!earliest || _silent.position() < earliest_content)) {
528                 /* There is some silence that must be emitted */
529                 DCPTimePeriod period (_silent.period_at_position());
530                 if (period.duration() > one_video_frame()) {
531                         period.to = period.from + one_video_frame();
532                 }
533                 fill_audio (period);
534                 _silent.set_position (period.to);
535         } else if (_playlist->length() == DCPTime()) {
536                 /* Special case of an empty Film; just give one black frame */
537                 emit_video (black_player_video_frame(), DCPTime());
538         } else if (earliest) {
539                 earliest->done = earliest->decoder->pass ();
540         } else {
541                 done = true;
542         }
543
544         /* Emit any audio that is ready */
545
546         DCPTime pull_to = _playlist->length ();
547         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
548                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
549                         pull_to = i->second.last_push_end;
550                 }
551         }
552
553         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
554         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
555                 if (_last_audio_time && i->second < *_last_audio_time) {
556                         /* There has been an accurate seek and we have received some audio before the seek time;
557                            discard it.
558                         */
559                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
560                         if (!cut.first) {
561                                 continue;
562                         }
563                         *i = cut;
564                 }
565
566                 emit_audio (i->first, i->second);
567         }
568
569         return done;
570 }
571
572 optional<PositionImage>
573 Player::subtitles_for_frame (DCPTime time) const
574 {
575         list<PositionImage> subtitles;
576
577         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
578
579                 /* Image subtitles */
580                 list<PositionImage> c = transform_image_subtitles (i.image);
581                 copy (c.begin(), c.end(), back_inserter (subtitles));
582
583                 /* Text subtitles (rendered to an image) */
584                 if (!i.text.empty ()) {
585                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
586                         copy (s.begin(), s.end(), back_inserter (subtitles));
587                 }
588         }
589
590         if (subtitles.empty ()) {
591                 return optional<PositionImage> ();
592         }
593
594         return merge (subtitles);
595 }
596
597 void
598 Player::video (weak_ptr<Piece> wp, ContentVideo video)
599 {
600         shared_ptr<Piece> piece = wp.lock ();
601         if (!piece) {
602                 return;
603         }
604
605         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
606         if (frc.skip && (video.frame % 2) == 1) {
607                 return;
608         }
609
610         /* Time and period of the frame we will emit */
611         DCPTime const time = content_video_to_dcp (piece, video.frame);
612         DCPTimePeriod const period (time, time + one_video_frame());
613
614         /* Fill gaps that we discover now that we have some video which needs to be emitted */
615
616         if (_last_video_time) {
617                 /* XXX: this may not work for 3D */
618                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
619                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
620                         LastVideoMap::const_iterator k = _last_video.find (wp);
621                         if (k != _last_video.end ()) {
622                                 emit_video (k->second, j);
623                         } else {
624                                 emit_video (black_player_video_frame(), j);
625                         }
626                 }
627         }
628
629         /* Discard if it's outside the content's period or if it's before the last accurate seek */
630         if (
631                 time < piece->content->position() ||
632                 time >= piece->content->end() ||
633                 (_last_video_time && time < *_last_video_time)) {
634                 return;
635         }
636
637         _last_video[wp].reset (
638                 new PlayerVideo (
639                         video.image,
640                         piece->content->video->crop (),
641                         piece->content->video->fade (video.frame),
642                         piece->content->video->scale().size (
643                                 piece->content->video, _video_container_size, _film->frame_size ()
644                                 ),
645                         _video_container_size,
646                         video.eyes,
647                         video.part,
648                         piece->content->video->colour_conversion ()
649                         )
650                 );
651
652         emit_video (_last_video[wp], time);
653 }
654
655 void
656 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
657 {
658         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
659
660         shared_ptr<Piece> piece = wp.lock ();
661         if (!piece) {
662                 return;
663         }
664
665         shared_ptr<AudioContent> content = piece->content->audio;
666         DCPOMATIC_ASSERT (content);
667
668         /* Compute time in the DCP */
669         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
670         /* And the end of this block in the DCP */
671         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
672
673         /* Remove anything that comes before the start or after the end of the content */
674         if (time < piece->content->position()) {
675                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
676                 if (!cut.first) {
677                         /* This audio is entirely discarded */
678                         return;
679                 }
680                 content_audio.audio = cut.first;
681                 time = cut.second;
682         } else if (time > piece->content->end()) {
683                 /* Discard it all */
684                 return;
685         } else if (end > piece->content->end()) {
686                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
687                 if (remaining_frames == 0) {
688                         return;
689                 }
690                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
691                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
692                 content_audio.audio = cut;
693         }
694
695         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
696
697         /* Gain */
698
699         if (content->gain() != 0) {
700                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
701                 gain->apply_gain (content->gain ());
702                 content_audio.audio = gain;
703         }
704
705         /* Remap */
706
707         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
708
709         /* Process */
710
711         if (_audio_processor) {
712                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
713         }
714
715         /* Push */
716
717         _audio_merger.push (content_audio.audio, time);
718         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
719         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
720 }
721
722 void
723 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
724 {
725         shared_ptr<Piece> piece = wp.lock ();
726         if (!piece) {
727                 return;
728         }
729
730         /* Apply content's subtitle offsets */
731         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
732         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
733
734         /* Apply content's subtitle scale */
735         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
736         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
737
738         /* Apply a corrective translation to keep the subtitle centred after that scale */
739         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
740         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
741
742         PlayerSubtitles ps;
743         ps.image.push_back (subtitle.sub);
744         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
745
746         _active_subtitles.add_from (wp, ps, from);
747 }
748
749 void
750 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
751 {
752         shared_ptr<Piece> piece = wp.lock ();
753         if (!piece) {
754                 return;
755         }
756
757         PlayerSubtitles ps;
758         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
759
760         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
761                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
762                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
763                 float const xs = piece->content->subtitle->x_scale();
764                 float const ys = piece->content->subtitle->y_scale();
765                 float size = s.size();
766
767                 /* Adjust size to express the common part of the scaling;
768                    e.g. if xs = ys = 0.5 we scale size by 2.
769                 */
770                 if (xs > 1e-5 && ys > 1e-5) {
771                         size *= 1 / min (1 / xs, 1 / ys);
772                 }
773                 s.set_size (size);
774
775                 /* Then express aspect ratio changes */
776                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
777                         s.set_aspect_adjust (xs / ys);
778                 }
779
780                 s.set_in (dcp::Time(from.seconds(), 1000));
781                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
782                 ps.add_fonts (piece->content->subtitle->fonts ());
783         }
784
785         _active_subtitles.add_from (wp, ps, from);
786 }
787
788 void
789 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
790 {
791         if (!_active_subtitles.have (wp)) {
792                 return;
793         }
794
795         shared_ptr<Piece> piece = wp.lock ();
796         if (!piece) {
797                 return;
798         }
799
800         DCPTime const dcp_to = content_time_to_dcp (piece, to);
801
802         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
803
804         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
805                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
806         }
807 }
808
809 void
810 Player::seek (DCPTime time, bool accurate)
811 {
812         if (_audio_processor) {
813                 _audio_processor->flush ();
814         }
815
816         _audio_merger.clear ();
817         _active_subtitles.clear ();
818
819         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
820                 if (time < i->content->position()) {
821                         /* Before; seek to 0 */
822                         i->decoder->seek (ContentTime(), accurate);
823                         i->done = false;
824                 } else if (i->content->position() <= time && time < i->content->end()) {
825                         /* During; seek to position */
826                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
827                         i->done = false;
828                 } else {
829                         /* After; this piece is done */
830                         i->done = true;
831                 }
832         }
833
834         if (accurate) {
835                 _last_video_time = time;
836                 _last_audio_time = time;
837         } else {
838                 _last_video_time = optional<DCPTime>();
839                 _last_audio_time = optional<DCPTime>();
840         }
841
842         _black.set_position (time);
843         _silent.set_position (time);
844
845         _last_video.clear ();
846 }
847
848 void
849 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
850 {
851         optional<PositionImage> subtitles = subtitles_for_frame (time);
852         if (subtitles) {
853                 pv->set_subtitle (subtitles.get ());
854         }
855
856         Video (pv, time);
857
858         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
859                 _last_video_time = time + one_video_frame();
860                 _active_subtitles.clear_before (time);
861         }
862 }
863
864 void
865 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
866 {
867         Audio (data, time);
868         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
869 }
870
871 void
872 Player::fill_audio (DCPTimePeriod period)
873 {
874         if (period.from == period.to) {
875                 return;
876         }
877
878         DCPOMATIC_ASSERT (period.from < period.to);
879
880         DCPTime t = period.from;
881         while (t < period.to) {
882                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
883                 Frame const samples = block.frames_round(_film->audio_frame_rate());
884                 if (samples) {
885                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
886                         silence->make_silent ();
887                         emit_audio (silence, t);
888                 }
889                 t += block;
890         }
891 }
892
893 DCPTime
894 Player::one_video_frame () const
895 {
896         return DCPTime::from_frames (1, _film->video_frame_rate ());
897 }
898
899 pair<shared_ptr<AudioBuffers>, DCPTime>
900 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
901 {
902         DCPTime const discard_time = discard_to - time;
903         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
904         Frame remaining_frames = audio->frames() - discard_frames;
905         if (remaining_frames <= 0) {
906                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
907         }
908         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
909         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
910         return make_pair(cut, time + discard_time);
911 }