Merge remote-tracking branch 'origin/master' into 2.0
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2014 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include <stdint.h>
21 #include <algorithm>
22 #include "player.h"
23 #include "film.h"
24 #include "ffmpeg_decoder.h"
25 #include "audio_buffers.h"
26 #include "ffmpeg_content.h"
27 #include "image_decoder.h"
28 #include "image_content.h"
29 #include "sndfile_decoder.h"
30 #include "sndfile_content.h"
31 #include "subtitle_content.h"
32 #include "subrip_decoder.h"
33 #include "subrip_content.h"
34 #include "dcp_content.h"
35 #include "playlist.h"
36 #include "job.h"
37 #include "image.h"
38 #include "raw_image_proxy.h"
39 #include "ratio.h"
40 #include "log.h"
41 #include "scaler.h"
42 #include "render_subtitles.h"
43 #include "config.h"
44 #include "content_video.h"
45 #include "player_video.h"
46 #include "frame_rate_change.h"
47 #include "dcp_content.h"
48 #include "dcp_decoder.h"
49 #include "dcp_subtitle_content.h"
50 #include "dcp_subtitle_decoder.h"
51
52 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
53
54 using std::list;
55 using std::cout;
56 using std::min;
57 using std::max;
58 using std::min;
59 using std::vector;
60 using std::pair;
61 using std::map;
62 using std::make_pair;
63 using boost::shared_ptr;
64 using boost::weak_ptr;
65 using boost::dynamic_pointer_cast;
66 using boost::optional;
67
68 Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
69         : _film (f)
70         , _playlist (p)
71         , _have_valid_pieces (false)
72         , _approximate_size (false)
73 {
74         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
75         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
76         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
77         set_video_container_size (_film->frame_size ());
78 }
79
80 void
81 Player::setup_pieces ()
82 {
83         list<shared_ptr<Piece> > old_pieces = _pieces;
84         _pieces.clear ();
85
86         ContentList content = _playlist->content ();
87
88         for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
89
90                 if (!(*i)->paths_valid ()) {
91                         continue;
92                 }
93                 
94                 shared_ptr<Decoder> decoder;
95                 optional<FrameRateChange> frc;
96
97                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
98                 DCPTime best_overlap_t;
99                 shared_ptr<VideoContent> best_overlap;
100                 for (ContentList::iterator j = content.begin(); j != content.end(); ++j) {
101                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*j);
102                         if (!vc) {
103                                 continue;
104                         }
105                         
106                         DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
107                         if (overlap > best_overlap_t) {
108                                 best_overlap = vc;
109                                 best_overlap_t = overlap;
110                         }
111                 }
112
113                 optional<FrameRateChange> best_overlap_frc;
114                 if (best_overlap) {
115                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
116                 } else {
117                         /* No video overlap; e.g. if the DCP is just audio */
118                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
119                 }
120
121                 /* FFmpeg */
122                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (*i);
123                 if (fc) {
124                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
125                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
126                 }
127
128                 shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (*i);
129                 if (dc) {
130                         decoder.reset (new DCPDecoder (dc, _film->log ()));
131                         frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate());
132                 }
133
134                 /* ImageContent */
135                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
136                 if (ic) {
137                         /* See if we can re-use an old ImageDecoder */
138                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
139                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
140                                 if (imd && imd->content() == ic) {
141                                         decoder = imd;
142                                 }
143                         }
144
145                         if (!decoder) {
146                                 decoder.reset (new ImageDecoder (ic));
147                         }
148
149                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
150                 }
151
152                 /* SndfileContent */
153                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
154                 if (sc) {
155                         decoder.reset (new SndfileDecoder (sc));
156                         frc = best_overlap_frc;
157                 }
158
159                 /* SubRipContent */
160                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (*i);
161                 if (rc) {
162                         decoder.reset (new SubRipDecoder (rc));
163                         frc = best_overlap_frc;
164                 }
165
166                 /* DCPSubtitleContent */
167                 shared_ptr<const DCPSubtitleContent> dsc = dynamic_pointer_cast<const DCPSubtitleContent> (*i);
168                 if (dsc) {
169                         decoder.reset (new DCPSubtitleDecoder (dsc));
170                         frc = best_overlap_frc;
171                 }
172
173                 _pieces.push_back (shared_ptr<Piece> (new Piece (*i, decoder, frc.get ())));
174         }
175
176         _have_valid_pieces = true;
177 }
178
179 void
180 Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
181 {
182         shared_ptr<Content> c = w.lock ();
183         if (!c) {
184                 return;
185         }
186
187         if (
188                 property == ContentProperty::POSITION ||
189                 property == ContentProperty::LENGTH ||
190                 property == ContentProperty::TRIM_START ||
191                 property == ContentProperty::TRIM_END ||
192                 property == ContentProperty::PATH ||
193                 property == VideoContentProperty::VIDEO_FRAME_TYPE ||
194                 property == DCPContentProperty::CAN_BE_PLAYED
195                 ) {
196                 
197                 _have_valid_pieces = false;
198                 Changed (frequent);
199
200         } else if (
201                 property == SubtitleContentProperty::USE_SUBTITLES ||
202                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
203                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
204                 property == SubtitleContentProperty::SUBTITLE_X_SCALE ||
205                 property == SubtitleContentProperty::SUBTITLE_Y_SCALE ||
206                 property == VideoContentProperty::VIDEO_CROP ||
207                 property == VideoContentProperty::VIDEO_SCALE ||
208                 property == VideoContentProperty::VIDEO_FRAME_RATE
209                 ) {
210                 
211                 Changed (frequent);
212         }
213 }
214
215 /** @param already_resampled true if this data has already been through the chain up to the resampler */
216 void
217 Player::playlist_changed ()
218 {
219         _have_valid_pieces = false;
220         Changed (false);
221 }
222
223 void
224 Player::set_video_container_size (dcp::Size s)
225 {
226         _video_container_size = s;
227
228         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
229         _black_image->make_black ();
230 }
231
232 void
233 Player::film_changed (Film::Property p)
234 {
235         /* Here we should notice Film properties that affect our output, and
236            alert listeners that our output now would be different to how it was
237            last time we were run.
238         */
239
240         if (p == Film::SCALER || p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
241                 Changed (false);
242         }
243 }
244
245 list<PositionImage>
246 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
247 {
248         list<PositionImage> all;
249         
250         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
251                 if (!i->image) {
252                         continue;
253                 }
254
255                 /* We will scale the subtitle up to fit _video_container_size */
256                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
257                 
258                 /* Then we need a corrective translation, consisting of two parts:
259                  *
260                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
261                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
262                  *
263                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
264                  *     (width_before_subtitle_scale * (1 - subtitle_x_scale) / 2) and
265                  *     (height_before_subtitle_scale * (1 - subtitle_y_scale) / 2).
266                  *
267                  * Combining these two translations gives these expressions.
268                  */
269
270                 all.push_back (
271                         PositionImage (
272                                 i->image->scale (
273                                         scaled_size,
274                                         Scaler::from_id ("bicubic"),
275                                         i->image->pixel_format (),
276                                         true
277                                         ),
278                                 Position<int> (
279                                         rint (_video_container_size.width * i->rectangle.x),
280                                         rint (_video_container_size.height * i->rectangle.y)
281                                         )
282                                 )
283                         );
284         }
285
286         return all;
287 }
288
289 void
290 Player::set_approximate_size ()
291 {
292         _approximate_size = true;
293 }
294
295 shared_ptr<PlayerVideo>
296 Player::black_player_video_frame (DCPTime time) const
297 {
298         return shared_ptr<PlayerVideo> (
299                 new PlayerVideo (
300                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image, _film->log ())),
301                         time,
302                         Crop (),
303                         optional<float> (),
304                         _video_container_size,
305                         _video_container_size,
306                         Scaler::from_id ("bicubic"),
307                         EYES_BOTH,
308                         PART_WHOLE,
309                         Config::instance()->colour_conversions().front().conversion
310                 )
311         );
312 }
313
314 /** @return All PlayerVideos at the given time (there may be two frames for 3D) */
315 list<shared_ptr<PlayerVideo> >
316 Player::get_video (DCPTime time, bool accurate)
317 {
318         if (!_have_valid_pieces) {
319                 setup_pieces ();
320         }
321         
322         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
323                 time,
324                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
325                 );
326
327         list<shared_ptr<PlayerVideo> > pvf;
328
329         if (ov.empty ()) {
330                 /* No video content at this time */
331                 pvf.push_back (black_player_video_frame (time));
332         } else {
333                 /* Create a PlayerVideo from the content's video at this time */
334
335                 shared_ptr<Piece> piece = ov.back ();
336                 shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
337                 assert (decoder);
338                 shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
339                 assert (content);
340
341                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
342                 if (content_video.empty ()) {
343                         pvf.push_back (black_player_video_frame (time));
344                         return pvf;
345                 }
346                 
347                 dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size (), _approximate_size ? 4 : 1);
348                 if (_approximate_size) {
349                         image_size.width &= ~3;
350                         image_size.height &= ~3;
351                 }
352                 
353                 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
354                         pvf.push_back (
355                                 shared_ptr<PlayerVideo> (
356                                         new PlayerVideo (
357                                                 i->image,
358                                                 content_video_to_dcp (piece, i->frame),
359                                                 content->crop (),
360                                                 content->fade (i->frame),
361                                                 image_size,
362                                                 _video_container_size,
363                                                 _film->scaler(),
364                                                 i->eyes,
365                                                 i->part,
366                                                 content->colour_conversion ()
367                                                 )
368                                         )
369                                 );
370                 }
371         }
372
373         /* Add subtitles (for possible burn-in) to whatever PlayerVideos we got */
374
375         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false);
376
377         list<PositionImage> sub_images;
378
379         /* Image subtitles */
380         list<PositionImage> c = transform_image_subtitles (ps.image);
381         copy (c.begin(), c.end(), back_inserter (sub_images));
382
383         /* Text subtitles (rendered to images) */
384         sub_images.push_back (render_subtitles (ps.text, _video_container_size));
385         
386         if (!sub_images.empty ()) {
387                 for (list<shared_ptr<PlayerVideo> >::const_iterator i = pvf.begin(); i != pvf.end(); ++i) {
388                         (*i)->set_subtitle (merge (sub_images));
389                 }
390         }       
391                 
392         return pvf;
393 }
394
395 shared_ptr<AudioBuffers>
396 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
397 {
398         if (!_have_valid_pieces) {
399                 setup_pieces ();
400         }
401
402         AudioFrame const length_frames = length.frames (_film->audio_frame_rate ());
403
404         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
405         audio->make_silent ();
406         
407         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
408         if (ov.empty ()) {
409                 return audio;
410         }
411
412         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
413
414                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
415                 assert (content);
416                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
417                 assert (decoder);
418
419                 if (content->audio_frame_rate() == 0) {
420                         /* This AudioContent has no audio (e.g. if it is an FFmpegContent with no
421                          * audio stream).
422                          */
423                         continue;
424                 }
425
426                 /* The time that we should request from the content */
427                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
428                 DCPTime offset;
429                 if (request < DCPTime ()) {
430                         /* We went off the start of the content, so we will need to offset
431                            the stuff we get back.
432                         */
433                         offset = -request;
434                         request = DCPTime ();
435                 }
436
437                 AudioFrame const content_frame = dcp_to_content_audio (*i, request);
438
439                 /* Audio from this piece's decoder (which might be more or less than what we asked for) */
440                 shared_ptr<ContentAudio> all = decoder->get_audio (content_frame, length_frames, accurate);
441
442                 /* Gain */
443                 if (content->audio_gain() != 0) {
444                         shared_ptr<AudioBuffers> gain (new AudioBuffers (all->audio));
445                         gain->apply_gain (content->audio_gain ());
446                         all->audio = gain;
447                 }
448
449                 /* Remap channels */
450                 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all->audio->frames()));
451                 dcp_mapped->make_silent ();
452                 AudioMapping map = content->audio_mapping ();
453                 for (int i = 0; i < map.content_channels(); ++i) {
454                         for (int j = 0; j < _film->audio_channels(); ++j) {
455                                 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
456                                         dcp_mapped->accumulate_channel (
457                                                 all->audio.get(),
458                                                 i,
459                                                 j,
460                                                 map.get (i, static_cast<dcp::Channel> (j))
461                                                 );
462                                 }
463                         }
464                 }
465                 
466                 all->audio = dcp_mapped;
467
468                 audio->accumulate_frames (
469                         all->audio.get(),
470                         content_frame - all->frame,
471                         offset.frames (_film->audio_frame_rate()),
472                         min (AudioFrame (all->audio->frames()), length_frames) - offset.frames (_film->audio_frame_rate ())
473                         );
474         }
475
476         return audio;
477 }
478
479 VideoFrame
480 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
481 {
482         /* s is the offset of t from the start position of this content */
483         DCPTime s = t - piece->content->position ();
484         s = DCPTime (max (DCPTime::Type (0), s.get ()));
485         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
486
487         /* Convert this to the content frame */
488         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) * piece->frc.factor ();
489 }
490
491 DCPTime
492 Player::content_video_to_dcp (shared_ptr<const Piece> piece, VideoFrame f) const
493 {
494         DCPTime t = DCPTime::from_frames (f / piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position ();
495         if (t < DCPTime ()) {
496                 t = DCPTime ();
497         }
498
499         return t;
500 }
501
502 AudioFrame
503 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
504 {
505         /* s is the offset of t from the start position of this content */
506         DCPTime s = t - piece->content->position ();
507         s = DCPTime (max (DCPTime::Type (0), s.get ()));
508         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
509
510         /* Convert this to the content frame */
511         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
512 }
513
514 ContentTime
515 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
516 {
517         /* s is the offset of t from the start position of this content */
518         DCPTime s = t - piece->content->position ();
519         s = DCPTime (max (DCPTime::Type (0), s.get ()));
520         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
521
522         return ContentTime (s + piece->content->trim_start(), piece->frc);
523 }
524
525 void
526 PlayerStatistics::dump (shared_ptr<Log> log) const
527 {
528         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
529         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
530 }
531
532 PlayerStatistics const &
533 Player::statistics () const
534 {
535         return _statistics;
536 }
537
538 PlayerSubtitles
539 Player::get_subtitles (DCPTime time, DCPTime length, bool starting)
540 {
541         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (time, time + length);
542
543         PlayerSubtitles ps (time, length);
544
545         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
546                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
547                 if (!subtitle_content->use_subtitles ()) {
548                         continue;
549                 }
550
551                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
552                 ContentTime const from = dcp_to_content_subtitle (*j, time);
553                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
554                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
555
556                 list<ContentImageSubtitle> image = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to), starting);
557                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
558                         
559                         /* Apply content's subtitle offsets */
560                         i->sub.rectangle.x += subtitle_content->subtitle_x_offset ();
561                         i->sub.rectangle.y += subtitle_content->subtitle_y_offset ();
562
563                         /* Apply content's subtitle scale */
564                         i->sub.rectangle.width *= subtitle_content->subtitle_x_scale ();
565                         i->sub.rectangle.height *= subtitle_content->subtitle_y_scale ();
566
567                         /* Apply a corrective translation to keep the subtitle centred after that scale */
568                         i->sub.rectangle.x -= i->sub.rectangle.width * (subtitle_content->subtitle_x_scale() - 1);
569                         i->sub.rectangle.y -= i->sub.rectangle.height * (subtitle_content->subtitle_y_scale() - 1);
570                         
571                         ps.image.push_back (i->sub);
572                 }
573
574                 list<ContentTextSubtitle> text = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to), starting);
575                 for (list<ContentTextSubtitle>::const_iterator i = text.begin(); i != text.end(); ++i) {
576                         copy (i->subs.begin(), i->subs.end(), back_inserter (ps.text));
577                 }
578         }
579
580         return ps;
581 }