Remove unused Player::_ignore_audio. Ignore position
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2017 Carl Hetherington <cth@carlh.net>
3
4     This file is part of DCP-o-matic.
5
6     DCP-o-matic is free software; you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation; either version 2 of the License, or
9     (at your option) any later version.
10
11     DCP-o-matic is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15
16     You should have received a copy of the GNU General Public License
17     along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
18
19 */
20
21 #include "player.h"
22 #include "film.h"
23 #include "audio_buffers.h"
24 #include "content_audio.h"
25 #include "dcp_content.h"
26 #include "job.h"
27 #include "image.h"
28 #include "raw_image_proxy.h"
29 #include "ratio.h"
30 #include "log.h"
31 #include "render_subtitles.h"
32 #include "config.h"
33 #include "content_video.h"
34 #include "player_video.h"
35 #include "frame_rate_change.h"
36 #include "audio_processor.h"
37 #include "playlist.h"
38 #include "referenced_reel_asset.h"
39 #include "decoder_factory.h"
40 #include "decoder.h"
41 #include "video_decoder.h"
42 #include "audio_decoder.h"
43 #include "subtitle_content.h"
44 #include "subtitle_decoder.h"
45 #include "ffmpeg_content.h"
46 #include "audio_content.h"
47 #include "content_subtitle.h"
48 #include "dcp_decoder.h"
49 #include "image_decoder.h"
50 #include "compose.hpp"
51 #include <dcp/reel.h>
52 #include <dcp/reel_sound_asset.h>
53 #include <dcp/reel_subtitle_asset.h>
54 #include <dcp/reel_picture_asset.h>
55 #include <boost/foreach.hpp>
56 #include <stdint.h>
57 #include <algorithm>
58 #include <iostream>
59
60 #include "i18n.h"
61
62 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
63
64 using std::list;
65 using std::cout;
66 using std::min;
67 using std::max;
68 using std::min;
69 using std::vector;
70 using std::pair;
71 using std::map;
72 using std::make_pair;
73 using std::copy;
74 using boost::shared_ptr;
75 using boost::weak_ptr;
76 using boost::dynamic_pointer_cast;
77 using boost::optional;
78 using boost::scoped_ptr;
79
80 Player::Player (shared_ptr<const Film> film, shared_ptr<const Playlist> playlist)
81         : _film (film)
82         , _playlist (playlist)
83         , _have_valid_pieces (false)
84         , _ignore_video (false)
85         , _ignore_subtitle (false)
86         , _always_burn_subtitles (false)
87         , _fast (false)
88         , _play_referenced (false)
89         , _audio_merger (_film->audio_frame_rate())
90 {
91         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
92         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
93         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::playlist_content_changed, this, _1, _2, _3));
94         set_video_container_size (_film->frame_size ());
95
96         film_changed (Film::AUDIO_PROCESSOR);
97
98         seek (DCPTime (), true);
99 }
100
101 void
102 Player::setup_pieces ()
103 {
104         _pieces.clear ();
105
106         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
107
108                 if (!i->paths_valid ()) {
109                         continue;
110                 }
111
112                 shared_ptr<Decoder> decoder = decoder_factory (i, _film->log());
113                 FrameRateChange frc (i->active_video_frame_rate(), _film->video_frame_rate());
114
115                 if (!decoder) {
116                         /* Not something that we can decode; e.g. Atmos content */
117                         continue;
118                 }
119
120                 if (decoder->video && _ignore_video) {
121                         decoder->video->set_ignore ();
122                 }
123
124                 if (decoder->subtitle && _ignore_subtitle) {
125                         decoder->subtitle->set_ignore ();
126                 }
127
128                 shared_ptr<DCPDecoder> dcp = dynamic_pointer_cast<DCPDecoder> (decoder);
129                 if (dcp && _play_referenced) {
130                         dcp->set_decode_referenced ();
131                 }
132
133                 shared_ptr<Piece> piece (new Piece (i, decoder, frc));
134                 _pieces.push_back (piece);
135
136                 if (decoder->video) {
137                         decoder->video->Data.connect (bind (&Player::video, this, weak_ptr<Piece> (piece), _1));
138                 }
139
140                 if (decoder->audio) {
141                         decoder->audio->Data.connect (bind (&Player::audio, this, weak_ptr<Piece> (piece), _1, _2));
142                 }
143
144                 if (decoder->subtitle) {
145                         decoder->subtitle->ImageStart.connect (bind (&Player::image_subtitle_start, this, weak_ptr<Piece> (piece), _1));
146                         decoder->subtitle->TextStart.connect (bind (&Player::text_subtitle_start, this, weak_ptr<Piece> (piece), _1));
147                         decoder->subtitle->Stop.connect (bind (&Player::subtitle_stop, this, weak_ptr<Piece> (piece), _1));
148                 }
149         }
150
151         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
152                 if (i->content->audio) {
153                         BOOST_FOREACH (AudioStreamPtr j, i->content->audio->streams()) {
154                                 _stream_states[j] = StreamState (i, i->content->position ());
155                         }
156                 }
157         }
158
159         _black = Empty (_playlist, bind(&Content::video, _1));
160         _silent = Empty (_playlist, bind(&Content::audio, _1));
161
162         _last_video_time = DCPTime ();
163         _last_audio_time = DCPTime ();
164         _have_valid_pieces = true;
165 }
166
167 void
168 Player::playlist_content_changed (weak_ptr<Content> w, int property, bool frequent)
169 {
170         shared_ptr<Content> c = w.lock ();
171         if (!c) {
172                 return;
173         }
174
175         if (
176                 property == ContentProperty::POSITION ||
177                 property == ContentProperty::LENGTH ||
178                 property == ContentProperty::TRIM_START ||
179                 property == ContentProperty::TRIM_END ||
180                 property == ContentProperty::PATH ||
181                 property == VideoContentProperty::FRAME_TYPE ||
182                 property == DCPContentProperty::NEEDS_ASSETS ||
183                 property == DCPContentProperty::NEEDS_KDM ||
184                 property == SubtitleContentProperty::COLOUR ||
185                 property == SubtitleContentProperty::OUTLINE ||
186                 property == SubtitleContentProperty::SHADOW ||
187                 property == SubtitleContentProperty::EFFECT_COLOUR ||
188                 property == FFmpegContentProperty::SUBTITLE_STREAM ||
189                 property == VideoContentProperty::COLOUR_CONVERSION
190                 ) {
191
192                 _have_valid_pieces = false;
193                 Changed (frequent);
194
195         } else if (
196                 property == SubtitleContentProperty::LINE_SPACING ||
197                 property == SubtitleContentProperty::OUTLINE_WIDTH ||
198                 property == SubtitleContentProperty::Y_SCALE ||
199                 property == SubtitleContentProperty::FADE_IN ||
200                 property == SubtitleContentProperty::FADE_OUT ||
201                 property == ContentProperty::VIDEO_FRAME_RATE ||
202                 property == SubtitleContentProperty::USE ||
203                 property == SubtitleContentProperty::X_OFFSET ||
204                 property == SubtitleContentProperty::Y_OFFSET ||
205                 property == SubtitleContentProperty::X_SCALE ||
206                 property == SubtitleContentProperty::FONTS ||
207                 property == VideoContentProperty::CROP ||
208                 property == VideoContentProperty::SCALE ||
209                 property == VideoContentProperty::FADE_IN ||
210                 property == VideoContentProperty::FADE_OUT
211                 ) {
212
213                 Changed (frequent);
214         }
215 }
216
217 void
218 Player::set_video_container_size (dcp::Size s)
219 {
220         if (s == _video_container_size) {
221                 return;
222         }
223
224         _video_container_size = s;
225
226         _black_image.reset (new Image (AV_PIX_FMT_RGB24, _video_container_size, true));
227         _black_image->make_black ();
228
229         Changed (false);
230 }
231
232 void
233 Player::playlist_changed ()
234 {
235         _have_valid_pieces = false;
236         Changed (false);
237 }
238
239 void
240 Player::film_changed (Film::Property p)
241 {
242         /* Here we should notice Film properties that affect our output, and
243            alert listeners that our output now would be different to how it was
244            last time we were run.
245         */
246
247         if (p == Film::CONTAINER) {
248                 Changed (false);
249         } else if (p == Film::VIDEO_FRAME_RATE) {
250                 /* Pieces contain a FrameRateChange which contains the DCP frame rate,
251                    so we need new pieces here.
252                 */
253                 _have_valid_pieces = false;
254                 Changed (false);
255         } else if (p == Film::AUDIO_PROCESSOR) {
256                 if (_film->audio_processor ()) {
257                         _audio_processor = _film->audio_processor()->clone (_film->audio_frame_rate ());
258                 }
259         }
260 }
261
262 list<PositionImage>
263 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
264 {
265         list<PositionImage> all;
266
267         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
268                 if (!i->image) {
269                         continue;
270                 }
271
272                 /* We will scale the subtitle up to fit _video_container_size */
273                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
274
275                 /* Then we need a corrective translation, consisting of two parts:
276                  *
277                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
278                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
279                  *
280                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
281                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
282                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
283                  *
284                  * Combining these two translations gives these expressions.
285                  */
286
287                 all.push_back (
288                         PositionImage (
289                                 i->image->scale (
290                                         scaled_size,
291                                         dcp::YUV_TO_RGB_REC601,
292                                         i->image->pixel_format (),
293                                         true,
294                                         _fast
295                                         ),
296                                 Position<int> (
297                                         lrint (_video_container_size.width * i->rectangle.x),
298                                         lrint (_video_container_size.height * i->rectangle.y)
299                                         )
300                                 )
301                         );
302         }
303
304         return all;
305 }
306
307 shared_ptr<PlayerVideo>
308 Player::black_player_video_frame () const
309 {
310         return shared_ptr<PlayerVideo> (
311                 new PlayerVideo (
312                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image)),
313                         Crop (),
314                         optional<double> (),
315                         _video_container_size,
316                         _video_container_size,
317                         EYES_BOTH,
318                         PART_WHOLE,
319                         PresetColourConversion::all().front().conversion
320                 )
321         );
322 }
323
324 Frame
325 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
326 {
327         DCPTime s = t - piece->content->position ();
328         s = min (piece->content->length_after_trim(), s);
329         s = max (DCPTime(), s + DCPTime (piece->content->trim_start(), piece->frc));
330
331         /* It might seem more logical here to convert s to a ContentTime (using the FrameRateChange)
332            then convert that ContentTime to frames at the content's rate.  However this fails for
333            situations like content at 29.9978733fps, DCP at 30fps.  The accuracy of the Time type is not
334            enough to distinguish between the two with low values of time (e.g. 3200 in Time units).
335
336            Instead we convert the DCPTime using the DCP video rate then account for any skip/repeat.
337         */
338         return s.frames_floor (piece->frc.dcp) / piece->frc.factor ();
339 }
340
341 DCPTime
342 Player::content_video_to_dcp (shared_ptr<const Piece> piece, Frame f) const
343 {
344         /* See comment in dcp_to_content_video */
345         DCPTime const d = DCPTime::from_frames (f * piece->frc.factor(), piece->frc.dcp) - DCPTime (piece->content->trim_start (), piece->frc);
346         return max (DCPTime (), d + piece->content->position ());
347 }
348
349 Frame
350 Player::dcp_to_resampled_audio (shared_ptr<const Piece> piece, DCPTime t) const
351 {
352         DCPTime s = t - piece->content->position ();
353         s = min (piece->content->length_after_trim(), s);
354         /* See notes in dcp_to_content_video */
355         return max (DCPTime (), DCPTime (piece->content->trim_start (), piece->frc) + s).frames_floor (_film->audio_frame_rate ());
356 }
357
358 DCPTime
359 Player::resampled_audio_to_dcp (shared_ptr<const Piece> piece, Frame f) const
360 {
361         /* See comment in dcp_to_content_video */
362         return DCPTime::from_frames (f, _film->audio_frame_rate())
363                 - DCPTime (piece->content->trim_start(), piece->frc)
364                 + piece->content->position();
365 }
366
367 ContentTime
368 Player::dcp_to_content_time (shared_ptr<const Piece> piece, DCPTime t) const
369 {
370         DCPTime s = t - piece->content->position ();
371         s = min (piece->content->length_after_trim(), s);
372         return max (ContentTime (), ContentTime (s, piece->frc) + piece->content->trim_start());
373 }
374
375 DCPTime
376 Player::content_time_to_dcp (shared_ptr<const Piece> piece, ContentTime t) const
377 {
378         return max (DCPTime (), DCPTime (t - piece->content->trim_start(), piece->frc) + piece->content->position());
379 }
380
381 list<shared_ptr<Font> >
382 Player::get_subtitle_fonts ()
383 {
384         if (!_have_valid_pieces) {
385                 setup_pieces ();
386         }
387
388         list<shared_ptr<Font> > fonts;
389         BOOST_FOREACH (shared_ptr<Piece>& p, _pieces) {
390                 if (p->content->subtitle) {
391                         /* XXX: things may go wrong if there are duplicate font IDs
392                            with different font files.
393                         */
394                         list<shared_ptr<Font> > f = p->content->subtitle->fonts ();
395                         copy (f.begin(), f.end(), back_inserter (fonts));
396                 }
397         }
398
399         return fonts;
400 }
401
402 /** Set this player never to produce any video data */
403 void
404 Player::set_ignore_video ()
405 {
406         _ignore_video = true;
407 }
408
409 void
410 Player::set_ignore_subtitle ()
411 {
412         _ignore_subtitle = true;
413 }
414
415 /** Set whether or not this player should always burn text subtitles into the image,
416  *  regardless of the content settings.
417  *  @param burn true to always burn subtitles, false to obey content settings.
418  */
419 void
420 Player::set_always_burn_subtitles (bool burn)
421 {
422         _always_burn_subtitles = burn;
423 }
424
425 void
426 Player::set_fast ()
427 {
428         _fast = true;
429         _have_valid_pieces = false;
430 }
431
432 void
433 Player::set_play_referenced ()
434 {
435         _play_referenced = true;
436         _have_valid_pieces = false;
437 }
438
439 list<ReferencedReelAsset>
440 Player::get_reel_assets ()
441 {
442         list<ReferencedReelAsset> a;
443
444         BOOST_FOREACH (shared_ptr<Content> i, _playlist->content ()) {
445                 shared_ptr<DCPContent> j = dynamic_pointer_cast<DCPContent> (i);
446                 if (!j) {
447                         continue;
448                 }
449
450                 scoped_ptr<DCPDecoder> decoder;
451                 try {
452                         decoder.reset (new DCPDecoder (j, _film->log()));
453                 } catch (...) {
454                         return a;
455                 }
456
457                 int64_t offset = 0;
458                 BOOST_FOREACH (shared_ptr<dcp::Reel> k, decoder->reels()) {
459
460                         DCPOMATIC_ASSERT (j->video_frame_rate ());
461                         double const cfr = j->video_frame_rate().get();
462                         Frame const trim_start = j->trim_start().frames_round (cfr);
463                         Frame const trim_end = j->trim_end().frames_round (cfr);
464                         int const ffr = _film->video_frame_rate ();
465
466                         DCPTime const from = i->position() + DCPTime::from_frames (offset, _film->video_frame_rate());
467                         if (j->reference_video ()) {
468                                 shared_ptr<dcp::ReelAsset> ra = k->main_picture ();
469                                 DCPOMATIC_ASSERT (ra);
470                                 ra->set_entry_point (ra->entry_point() + trim_start);
471                                 ra->set_duration (ra->duration() - trim_start - trim_end);
472                                 a.push_back (
473                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
474                                         );
475                         }
476
477                         if (j->reference_audio ()) {
478                                 shared_ptr<dcp::ReelAsset> ra = k->main_sound ();
479                                 DCPOMATIC_ASSERT (ra);
480                                 ra->set_entry_point (ra->entry_point() + trim_start);
481                                 ra->set_duration (ra->duration() - trim_start - trim_end);
482                                 a.push_back (
483                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
484                                         );
485                         }
486
487                         if (j->reference_subtitle ()) {
488                                 shared_ptr<dcp::ReelAsset> ra = k->main_subtitle ();
489                                 DCPOMATIC_ASSERT (ra);
490                                 ra->set_entry_point (ra->entry_point() + trim_start);
491                                 ra->set_duration (ra->duration() - trim_start - trim_end);
492                                 a.push_back (
493                                         ReferencedReelAsset (ra, DCPTimePeriod (from, from + DCPTime::from_frames (ra->duration(), ffr)))
494                                         );
495                         }
496
497                         /* Assume that main picture duration is the length of the reel */
498                         offset += k->main_picture()->duration ();
499                 }
500         }
501
502         return a;
503 }
504
505 bool
506 Player::pass ()
507 {
508         if (!_have_valid_pieces) {
509                 setup_pieces ();
510         }
511
512         /* Find the decoder or empty which is farthest behind where we are and make it emit some data */
513
514         shared_ptr<Piece> earliest;
515         DCPTime earliest_content;
516
517         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
518                 if (!i->done) {
519                         DCPTime const t = content_time_to_dcp (i, i->decoder->position());
520                         /* Given two choices at the same time, pick the one with a subtitle so we see it before
521                            the video.
522                         */
523                         if (!earliest || t < earliest_content || (t == earliest_content && i->decoder->subtitle)) {
524                                 earliest_content = t;
525                                 earliest = i;
526                         }
527                 }
528         }
529
530         bool done = false;
531
532         if (!_black.done() && (!earliest || _black.position() < earliest_content)) {
533                 /* There is some black that must be emitted */
534                 emit_video (black_player_video_frame(), _black.position());
535                 _black.set_position (_black.position() + one_video_frame());
536         } else if (!_silent.done() && (!earliest || _silent.position() < earliest_content)) {
537                 /* There is some silence that must be emitted */
538                 DCPTimePeriod period (_silent.period_at_position());
539                 if (period.duration() > one_video_frame()) {
540                         period.to = period.from + one_video_frame();
541                 }
542                 fill_audio (period);
543                 _silent.set_position (period.to);
544         } else if (_playlist->length() == DCPTime()) {
545                 /* Special case of an empty Film; just give one black frame */
546                 emit_video (black_player_video_frame(), DCPTime());
547         } else if (earliest) {
548                 earliest->done = earliest->decoder->pass ();
549         } else {
550                 done = true;
551         }
552
553         /* Emit any audio that is ready */
554
555         DCPTime pull_to = _playlist->length ();
556         for (map<AudioStreamPtr, StreamState>::const_iterator i = _stream_states.begin(); i != _stream_states.end(); ++i) {
557                 if (!i->second.piece->done && i->second.last_push_end < pull_to) {
558                         pull_to = i->second.last_push_end;
559                 }
560         }
561
562         list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
563         for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
564                 if (_last_audio_time && i->second < *_last_audio_time) {
565                         /* There has been an accurate seek and we have received some audio before the seek time;
566                            discard it.
567                         */
568                         pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
569                         if (!cut.first) {
570                                 continue;
571                         }
572                         *i = cut;
573                 }
574
575                 emit_audio (i->first, i->second);
576         }
577
578         return done;
579 }
580
581 optional<PositionImage>
582 Player::subtitles_for_frame (DCPTime time) const
583 {
584         list<PositionImage> subtitles;
585
586         BOOST_FOREACH (PlayerSubtitles i, _active_subtitles.get_burnt (time, _always_burn_subtitles)) {
587
588                 /* Image subtitles */
589                 list<PositionImage> c = transform_image_subtitles (i.image);
590                 copy (c.begin(), c.end(), back_inserter (subtitles));
591
592                 /* Text subtitles (rendered to an image) */
593                 if (!i.text.empty ()) {
594                         list<PositionImage> s = render_subtitles (i.text, i.fonts, _video_container_size, time);
595                         copy (s.begin(), s.end(), back_inserter (subtitles));
596                 }
597         }
598
599         if (subtitles.empty ()) {
600                 return optional<PositionImage> ();
601         }
602
603         return merge (subtitles);
604 }
605
606 void
607 Player::video (weak_ptr<Piece> wp, ContentVideo video)
608 {
609         shared_ptr<Piece> piece = wp.lock ();
610         if (!piece) {
611                 return;
612         }
613
614         FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
615         if (frc.skip && (video.frame % 2) == 1) {
616                 return;
617         }
618
619         /* Time and period of the frame we will emit */
620         DCPTime const time = content_video_to_dcp (piece, video.frame);
621         DCPTimePeriod const period (time, time + one_video_frame());
622
623         /* Fill gaps that we discover now that we have some video which needs to be emitted */
624
625         if (_last_video_time) {
626                 /* XXX: this may not work for 3D */
627                 DCPTime fill_from = max (*_last_video_time, piece->content->position());
628                 for (DCPTime j = fill_from; j < time; j += one_video_frame()) {
629                         LastVideoMap::const_iterator k = _last_video.find (wp);
630                         if (k != _last_video.end ()) {
631                                 emit_video (k->second, j);
632                         } else {
633                                 emit_video (black_player_video_frame(), j);
634                         }
635                 }
636         }
637
638         /* Discard if it's outside the content's period or if it's before the last accurate seek */
639         if (
640                 time < piece->content->position() ||
641                 time >= piece->content->end() ||
642                 (_last_video_time && time < *_last_video_time)) {
643                 return;
644         }
645
646         _last_video[wp].reset (
647                 new PlayerVideo (
648                         video.image,
649                         piece->content->video->crop (),
650                         piece->content->video->fade (video.frame),
651                         piece->content->video->scale().size (
652                                 piece->content->video, _video_container_size, _film->frame_size ()
653                                 ),
654                         _video_container_size,
655                         video.eyes,
656                         video.part,
657                         piece->content->video->colour_conversion ()
658                         )
659                 );
660
661         emit_video (_last_video[wp], time);
662 }
663
664 void
665 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
666 {
667         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
668
669         shared_ptr<Piece> piece = wp.lock ();
670         if (!piece) {
671                 return;
672         }
673
674         shared_ptr<AudioContent> content = piece->content->audio;
675         DCPOMATIC_ASSERT (content);
676
677         /* Compute time in the DCP */
678         DCPTime time = resampled_audio_to_dcp (piece, content_audio.frame);
679         /* And the end of this block in the DCP */
680         DCPTime end = time + DCPTime::from_frames(content_audio.audio->frames(), content->resampled_frame_rate());
681
682         /* Remove anything that comes before the start or after the end of the content */
683         if (time < piece->content->position()) {
684                 pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
685                 if (!cut.first) {
686                         /* This audio is entirely discarded */
687                         return;
688                 }
689                 content_audio.audio = cut.first;
690                 time = cut.second;
691         } else if (time > piece->content->end()) {
692                 /* Discard it all */
693                 return;
694         } else if (end > piece->content->end()) {
695                 Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
696                 if (remaining_frames == 0) {
697                         return;
698                 }
699                 shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
700                 cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
701                 content_audio.audio = cut;
702         }
703
704         DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
705
706         /* Gain */
707
708         if (content->gain() != 0) {
709                 shared_ptr<AudioBuffers> gain (new AudioBuffers (content_audio.audio));
710                 gain->apply_gain (content->gain ());
711                 content_audio.audio = gain;
712         }
713
714         /* Remap */
715
716         content_audio.audio = remap (content_audio.audio, _film->audio_channels(), stream->mapping());
717
718         /* Process */
719
720         if (_audio_processor) {
721                 content_audio.audio = _audio_processor->run (content_audio.audio, _film->audio_channels ());
722         }
723
724         /* Push */
725
726         _audio_merger.push (content_audio.audio, time);
727         DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
728         _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
729 }
730
731 void
732 Player::image_subtitle_start (weak_ptr<Piece> wp, ContentImageSubtitle subtitle)
733 {
734         shared_ptr<Piece> piece = wp.lock ();
735         if (!piece) {
736                 return;
737         }
738
739         /* Apply content's subtitle offsets */
740         subtitle.sub.rectangle.x += piece->content->subtitle->x_offset ();
741         subtitle.sub.rectangle.y += piece->content->subtitle->y_offset ();
742
743         /* Apply content's subtitle scale */
744         subtitle.sub.rectangle.width *= piece->content->subtitle->x_scale ();
745         subtitle.sub.rectangle.height *= piece->content->subtitle->y_scale ();
746
747         /* Apply a corrective translation to keep the subtitle centred after that scale */
748         subtitle.sub.rectangle.x -= subtitle.sub.rectangle.width * (piece->content->subtitle->x_scale() - 1);
749         subtitle.sub.rectangle.y -= subtitle.sub.rectangle.height * (piece->content->subtitle->y_scale() - 1);
750
751         PlayerSubtitles ps;
752         ps.image.push_back (subtitle.sub);
753         DCPTime from (content_time_to_dcp (piece, subtitle.from()));
754
755         _active_subtitles.add_from (wp, ps, from);
756 }
757
758 void
759 Player::text_subtitle_start (weak_ptr<Piece> wp, ContentTextSubtitle subtitle)
760 {
761         shared_ptr<Piece> piece = wp.lock ();
762         if (!piece) {
763                 return;
764         }
765
766         PlayerSubtitles ps;
767         DCPTime const from (content_time_to_dcp (piece, subtitle.from()));
768
769         BOOST_FOREACH (dcp::SubtitleString s, subtitle.subs) {
770                 s.set_h_position (s.h_position() + piece->content->subtitle->x_offset ());
771                 s.set_v_position (s.v_position() + piece->content->subtitle->y_offset ());
772                 float const xs = piece->content->subtitle->x_scale();
773                 float const ys = piece->content->subtitle->y_scale();
774                 float size = s.size();
775
776                 /* Adjust size to express the common part of the scaling;
777                    e.g. if xs = ys = 0.5 we scale size by 2.
778                 */
779                 if (xs > 1e-5 && ys > 1e-5) {
780                         size *= 1 / min (1 / xs, 1 / ys);
781                 }
782                 s.set_size (size);
783
784                 /* Then express aspect ratio changes */
785                 if (fabs (1.0 - xs / ys) > dcp::ASPECT_ADJUST_EPSILON) {
786                         s.set_aspect_adjust (xs / ys);
787                 }
788
789                 s.set_in (dcp::Time(from.seconds(), 1000));
790                 ps.text.push_back (SubtitleString (s, piece->content->subtitle->outline_width()));
791                 ps.add_fonts (piece->content->subtitle->fonts ());
792         }
793
794         _active_subtitles.add_from (wp, ps, from);
795 }
796
797 void
798 Player::subtitle_stop (weak_ptr<Piece> wp, ContentTime to)
799 {
800         if (!_active_subtitles.have (wp)) {
801                 return;
802         }
803
804         shared_ptr<Piece> piece = wp.lock ();
805         if (!piece) {
806                 return;
807         }
808
809         DCPTime const dcp_to = content_time_to_dcp (piece, to);
810
811         pair<PlayerSubtitles, DCPTime> from = _active_subtitles.add_to (wp, dcp_to);
812
813         if (piece->content->subtitle->use() && !_always_burn_subtitles && !piece->content->subtitle->burn()) {
814                 Subtitle (from.first, DCPTimePeriod (from.second, dcp_to));
815         }
816 }
817
818 void
819 Player::seek (DCPTime time, bool accurate)
820 {
821         if (_audio_processor) {
822                 _audio_processor->flush ();
823         }
824
825         _audio_merger.clear ();
826         _active_subtitles.clear ();
827
828         BOOST_FOREACH (shared_ptr<Piece> i, _pieces) {
829                 if (time < i->content->position()) {
830                         /* Before; seek to 0 */
831                         i->decoder->seek (ContentTime(), accurate);
832                         i->done = false;
833                 } else if (i->content->position() <= time && time < i->content->end()) {
834                         /* During; seek to position */
835                         i->decoder->seek (dcp_to_content_time (i, time), accurate);
836                         i->done = false;
837                 } else {
838                         /* After; this piece is done */
839                         i->done = true;
840                 }
841         }
842
843         if (accurate) {
844                 _last_video_time = time;
845                 _last_audio_time = time;
846         } else {
847                 _last_video_time = optional<DCPTime>();
848                 _last_audio_time = optional<DCPTime>();
849         }
850
851         _black.set_position (time);
852         _silent.set_position (time);
853
854         _last_video.clear ();
855 }
856
857 void
858 Player::emit_video (shared_ptr<PlayerVideo> pv, DCPTime time)
859 {
860         optional<PositionImage> subtitles = subtitles_for_frame (time);
861         if (subtitles) {
862                 pv->set_subtitle (subtitles.get ());
863         }
864
865         Video (pv, time);
866
867         if (pv->eyes() == EYES_BOTH || pv->eyes() == EYES_RIGHT) {
868                 _last_video_time = time + one_video_frame();
869                 _active_subtitles.clear_before (time);
870         }
871 }
872
873 void
874 Player::emit_audio (shared_ptr<AudioBuffers> data, DCPTime time)
875 {
876         Audio (data, time);
877         _last_audio_time = time + DCPTime::from_frames (data->frames(), _film->audio_frame_rate());
878 }
879
880 void
881 Player::fill_audio (DCPTimePeriod period)
882 {
883         if (period.from == period.to) {
884                 return;
885         }
886
887         DCPOMATIC_ASSERT (period.from < period.to);
888
889         DCPTime t = period.from;
890         while (t < period.to) {
891                 DCPTime block = min (DCPTime::from_seconds (0.5), period.to - t);
892                 Frame const samples = block.frames_round(_film->audio_frame_rate());
893                 if (samples) {
894                         shared_ptr<AudioBuffers> silence (new AudioBuffers (_film->audio_channels(), samples));
895                         silence->make_silent ();
896                         emit_audio (silence, t);
897                 }
898                 t += block;
899         }
900 }
901
902 DCPTime
903 Player::one_video_frame () const
904 {
905         return DCPTime::from_frames (1, _film->video_frame_rate ());
906 }
907
908 pair<shared_ptr<AudioBuffers>, DCPTime>
909 Player::discard_audio (shared_ptr<const AudioBuffers> audio, DCPTime time, DCPTime discard_to) const
910 {
911         DCPTime const discard_time = discard_to - time;
912         Frame const discard_frames = discard_time.frames_round(_film->audio_frame_rate());
913         Frame remaining_frames = audio->frames() - discard_frames;
914         if (remaining_frames <= 0) {
915                 return make_pair(shared_ptr<AudioBuffers>(), DCPTime());
916         }
917         shared_ptr<AudioBuffers> cut (new AudioBuffers (audio->channels(), remaining_frames));
918         cut->copy_from (audio.get(), remaining_frames, discard_frames, 0);
919         return make_pair(cut, time + discard_time);
920 }