Merge master.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2014 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include <stdint.h>
21 #include <algorithm>
22 #include "player.h"
23 #include "film.h"
24 #include "ffmpeg_decoder.h"
25 #include "audio_buffers.h"
26 #include "ffmpeg_content.h"
27 #include "image_decoder.h"
28 #include "image_content.h"
29 #include "sndfile_decoder.h"
30 #include "sndfile_content.h"
31 #include "subtitle_content.h"
32 #include "subrip_decoder.h"
33 #include "subrip_content.h"
34 #include "playlist.h"
35 #include "job.h"
36 #include "image.h"
37 #include "image_proxy.h"
38 #include "ratio.h"
39 #include "log.h"
40 #include "scaler.h"
41 #include "render_subtitles.h"
42 #include "config.h"
43 #include "content_video.h"
44 #include "player_video_frame.h"
45
46 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
47
48 using std::list;
49 using std::cout;
50 using std::min;
51 using std::max;
52 using std::min;
53 using std::vector;
54 using std::pair;
55 using std::map;
56 using std::make_pair;
57 using boost::shared_ptr;
58 using boost::weak_ptr;
59 using boost::dynamic_pointer_cast;
60 using boost::optional;
61
62 Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
63         : _film (f)
64         , _playlist (p)
65         , _have_valid_pieces (false)
66         , _approximate_size (false)
67         , _burn_subtitles (false)
68 {
69         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
70         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
71         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
72         set_video_container_size (_film->frame_size ());
73 }
74
75 void
76 Player::setup_pieces ()
77 {
78         list<shared_ptr<Piece> > old_pieces = _pieces;
79         _pieces.clear ();
80
81         ContentList content = _playlist->content ();
82
83         for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
84
85                 if (!(*i)->paths_valid ()) {
86                         continue;
87                 }
88                 
89                 shared_ptr<Decoder> decoder;
90                 optional<FrameRateChange> frc;
91
92                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
93                 DCPTime best_overlap_t;
94                 shared_ptr<VideoContent> best_overlap;
95                 for (ContentList::iterator j = content.begin(); j != content.end(); ++j) {
96                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*j);
97                         if (!vc) {
98                                 continue;
99                         }
100                         
101                         DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
102                         if (overlap > best_overlap_t) {
103                                 best_overlap = vc;
104                                 best_overlap_t = overlap;
105                         }
106                 }
107
108                 optional<FrameRateChange> best_overlap_frc;
109                 if (best_overlap) {
110                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
111                 } else {
112                         /* No video overlap; e.g. if the DCP is just audio */
113                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
114                 }
115
116                 /* FFmpeg */
117                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (*i);
118                 if (fc) {
119                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
120                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
121                 }
122
123                 /* ImageContent */
124                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
125                 if (ic) {
126                         /* See if we can re-use an old ImageDecoder */
127                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
128                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
129                                 if (imd && imd->content() == ic) {
130                                         decoder = imd;
131                                 }
132                         }
133
134                         if (!decoder) {
135                                 decoder.reset (new ImageDecoder (ic));
136                         }
137
138                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
139                 }
140
141                 /* SndfileContent */
142                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
143                 if (sc) {
144                         decoder.reset (new SndfileDecoder (sc));
145                         frc = best_overlap_frc;
146                 }
147
148                 /* SubRipContent */
149                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (*i);
150                 if (rc) {
151                         decoder.reset (new SubRipDecoder (rc));
152                         frc = best_overlap_frc;
153                 }
154
155                 _pieces.push_back (shared_ptr<Piece> (new Piece (*i, decoder, frc.get ())));
156         }
157
158         _have_valid_pieces = true;
159 }
160
161 void
162 Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
163 {
164         shared_ptr<Content> c = w.lock ();
165         if (!c) {
166                 return;
167         }
168
169         if (
170                 property == ContentProperty::POSITION ||
171                 property == ContentProperty::LENGTH ||
172                 property == ContentProperty::TRIM_START ||
173                 property == ContentProperty::TRIM_END ||
174                 property == ContentProperty::PATH ||
175                 property == VideoContentProperty::VIDEO_FRAME_TYPE
176                 ) {
177                 
178                 _have_valid_pieces = false;
179                 Changed (frequent);
180
181         } else if (
182                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
183                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
184                 property == SubtitleContentProperty::SUBTITLE_SCALE ||
185                 property == VideoContentProperty::VIDEO_CROP ||
186                 property == VideoContentProperty::VIDEO_SCALE ||
187                 property == VideoContentProperty::VIDEO_FRAME_RATE
188                 ) {
189                 
190                 Changed (frequent);
191         }
192 }
193
194 void
195 Player::playlist_changed ()
196 {
197         _have_valid_pieces = false;
198         Changed (false);
199 }
200
201 void
202 Player::set_video_container_size (dcp::Size s)
203 {
204         _video_container_size = s;
205
206         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
207         _black_image->make_black ();
208 }
209
210 void
211 Player::film_changed (Film::Property p)
212 {
213         /* Here we should notice Film properties that affect our output, and
214            alert listeners that our output now would be different to how it was
215            last time we were run.
216         */
217
218         if (p == Film::SCALER || p == Film::WITH_SUBTITLES || p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
219                 Changed (false);
220         }
221 }
222
223 list<PositionImage>
224 Player::process_content_image_subtitles (shared_ptr<SubtitleContent> content, list<shared_ptr<ContentImageSubtitle> > subs) const
225 {
226         list<PositionImage> all;
227         
228         for (list<shared_ptr<ContentImageSubtitle> >::const_iterator i = subs.begin(); i != subs.end(); ++i) {
229                 if (!(*i)->image) {
230                         continue;
231                 }
232
233                 dcpomatic::Rect<double> in_rect = (*i)->rectangle;
234                 dcp::Size scaled_size;
235                 
236                 in_rect.x += content->subtitle_x_offset ();
237                 in_rect.y += content->subtitle_y_offset ();
238                 
239                 /* We will scale the subtitle up to fit _video_container_size, and also by the additional subtitle_scale */
240                 scaled_size.width = in_rect.width * _video_container_size.width * content->subtitle_scale ();
241                 scaled_size.height = in_rect.height * _video_container_size.height * content->subtitle_scale ();
242                 
243                 /* Then we need a corrective translation, consisting of two parts:
244                  *
245                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
246                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
247                  *
248                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
249                  *     (width_before_subtitle_scale * (1 - subtitle_scale) / 2) and
250                  *     (height_before_subtitle_scale * (1 - subtitle_scale) / 2).
251                  *
252                  * Combining these two translations gives these expressions.
253                  */
254
255                 all.push_back (
256                         PositionImage (
257                                 (*i)->image->scale (
258                                         scaled_size,
259                                         Scaler::from_id ("bicubic"),
260                                         (*i)->image->pixel_format (),
261                                         true
262                                         ),
263                                 Position<int> (
264                                         rint (_video_container_size.width * (in_rect.x + (in_rect.width * (1 - content->subtitle_scale ()) / 2))),
265                                         rint (_video_container_size.height * (in_rect.y + (in_rect.height * (1 - content->subtitle_scale ()) / 2)))
266                                         )
267                                 )
268                         );
269         }
270
271         return all;
272 }
273
274 list<PositionImage>
275 Player::process_content_text_subtitles (list<shared_ptr<ContentTextSubtitle> > sub) const
276 {
277         list<PositionImage> all;
278         for (list<shared_ptr<ContentTextSubtitle> >::const_iterator i = sub.begin(); i != sub.end(); ++i) {
279                 if (!(*i)->subs.empty ()) {
280                         all.push_back (render_subtitles ((*i)->subs, _video_container_size));
281                 }
282         }
283
284         return all;
285 }
286
287 void
288 Player::set_approximate_size ()
289 {
290         _approximate_size = true;
291 }
292
293 shared_ptr<PlayerVideoFrame>
294 Player::black_player_video_frame () const
295 {
296         return shared_ptr<PlayerVideoFrame> (
297                 new PlayerVideoFrame (
298                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image, _film->log ())),
299                         Crop (),
300                         _video_container_size,
301                         _video_container_size,
302                         Scaler::from_id ("bicubic"),
303                         EYES_BOTH,
304                         PART_WHOLE,
305                         Config::instance()->colour_conversions().front().conversion
306                 )
307         );
308 }
309
310 shared_ptr<PlayerVideoFrame>
311 Player::content_to_player_video_frame (
312         shared_ptr<VideoContent> content,
313         ContentVideo content_video,
314         list<shared_ptr<Piece> > subs,
315         DCPTime time,
316         dcp::Size image_size) const
317 {
318         shared_ptr<PlayerVideoFrame> pvf (
319                 new PlayerVideoFrame (
320                         content_video.image,
321                         content->crop (),
322                         image_size,
323                         _video_container_size,
324                         _film->scaler(),
325                         content_video.eyes,
326                         content_video.part,
327                         content->colour_conversion ()
328                         )
329                 );
330         
331         
332         /* Add subtitles */
333         
334         list<PositionImage> sub_images;
335         
336         for (list<shared_ptr<Piece> >::const_iterator i = subs.begin(); i != subs.end(); ++i) {
337                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*i)->decoder);
338                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*i)->content);
339                 ContentTime const from = dcp_to_content_subtitle (*i, time);
340                 ContentTime const to = from + ContentTime::from_frames (1, content->video_frame_rate ());
341                 
342                 list<shared_ptr<ContentImageSubtitle> > image_subtitles = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to));
343                 if (!image_subtitles.empty ()) {
344                         list<PositionImage> im = process_content_image_subtitles (
345                                 subtitle_content,
346                                 image_subtitles
347                                 );
348                         
349                         copy (im.begin(), im.end(), back_inserter (sub_images));
350                 }
351                 
352                 if (_burn_subtitles) {
353                         list<shared_ptr<ContentTextSubtitle> > text_subtitles = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to));
354                         if (!text_subtitles.empty ()) {
355                                 list<PositionImage> im = process_content_text_subtitles (text_subtitles);
356                                 copy (im.begin(), im.end(), back_inserter (sub_images));
357                         }
358                 }
359         }
360         
361         if (!sub_images.empty ()) {
362                 pvf->set_subtitle (merge (sub_images));
363         }
364
365         return pvf;
366 }
367
368 /** @return All PlayerVideoFrames at the given time (there may be two frames for 3D) */
369 list<shared_ptr<PlayerVideoFrame> >
370 Player::get_video (DCPTime time, bool accurate)
371 {
372         if (!_have_valid_pieces) {
373                 setup_pieces ();
374         }
375         
376         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
377                 time,
378                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
379                 );
380
381         list<shared_ptr<PlayerVideoFrame> > pvf;
382                 
383         if (ov.empty ()) {
384                 /* No video content at this time */
385                 pvf.push_back (black_player_video_frame ());
386                 return pvf;
387         }
388
389         /* Create a PlayerVideoFrame from the content's video at this time */
390
391         shared_ptr<Piece> piece = ov.back ();
392         shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
393         assert (decoder);
394         shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
395         assert (content);
396
397         list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
398         if (content_video.empty ()) {
399                 pvf.push_back (black_player_video_frame ());
400                 return pvf;
401         }
402
403         dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
404         if (_approximate_size) {
405                 image_size.width &= ~3;
406                 image_size.height &= ~3;
407         }
408
409         for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
410                 list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (
411                         time,
412                         time + DCPTime::from_frames (1, _film->video_frame_rate ())
413                         );
414                 
415                 pvf.push_back (content_to_player_video_frame (content, *i, subs, time, image_size));
416         }
417                 
418         return pvf;
419 }
420
421 shared_ptr<AudioBuffers>
422 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
423 {
424         if (!_have_valid_pieces) {
425                 setup_pieces ();
426         }
427
428         AudioFrame const length_frames = length.frames (_film->audio_frame_rate ());
429
430         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
431         audio->make_silent ();
432         
433         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
434         if (ov.empty ()) {
435                 return audio;
436         }
437
438         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
439
440                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
441                 assert (content);
442                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
443                 assert (decoder);
444
445                 if (content->audio_frame_rate() == 0) {
446                         /* This AudioContent has no audio (e.g. if it is an FFmpegContent with no
447                          * audio stream).
448                          */
449                         continue;
450                 }
451
452                 /* The time that we should request from the content */
453                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
454                 DCPTime offset;
455                 if (request < DCPTime ()) {
456                         /* We went off the start of the content, so we will need to offset
457                            the stuff we get back.
458                         */
459                         offset = -request;
460                         request = DCPTime ();
461                 }
462
463                 AudioFrame const content_frame = dcp_to_content_audio (*i, request);
464
465                 /* Audio from this piece's decoder (which might be more or less than what we asked for) */
466                 shared_ptr<ContentAudio> all = decoder->get_audio (content_frame, length_frames, accurate);
467
468                 /* Gain */
469                 if (content->audio_gain() != 0) {
470                         shared_ptr<AudioBuffers> gain (new AudioBuffers (all->audio));
471                         gain->apply_gain (content->audio_gain ());
472                         all->audio = gain;
473                 }
474
475                 /* Remap channels */
476                 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all->audio->frames()));
477                 dcp_mapped->make_silent ();
478                 AudioMapping map = content->audio_mapping ();
479                 for (int i = 0; i < map.content_channels(); ++i) {
480                         for (int j = 0; j < _film->audio_channels(); ++j) {
481                                 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
482                                         dcp_mapped->accumulate_channel (
483                                                 all->audio.get(),
484                                                 i,
485                                                 j,
486                                                 map.get (i, static_cast<dcp::Channel> (j))
487                                                 );
488                                 }
489                         }
490                 }
491                 
492                 all->audio = dcp_mapped;
493
494                 audio->accumulate_frames (
495                         all->audio.get(),
496                         content_frame - all->frame,
497                         offset.frames (_film->audio_frame_rate()),
498                         min (AudioFrame (all->audio->frames()), length_frames) - offset.frames (_film->audio_frame_rate ())
499                         );
500         }
501 }
502
503 VideoFrame
504 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
505 {
506         /* s is the offset of t from the start position of this content */
507         DCPTime s = t - piece->content->position ();
508         s = DCPTime (max (int64_t (0), s.get ()));
509         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
510
511         /* Convert this to the content frame */
512         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) * piece->frc.factor ();
513 }
514
515 AudioFrame
516 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
517 {
518         /* s is the offset of t from the start position of this content */
519         DCPTime s = t - piece->content->position ();
520         s = DCPTime (max (int64_t (0), s.get ()));
521         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
522
523         /* Convert this to the content frame */
524         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
525 }
526
527 ContentTime
528 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
529 {
530         /* s is the offset of t from the start position of this content */
531         DCPTime s = t - piece->content->position ();
532         s = DCPTime (max (int64_t (0), s.get ()));
533         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
534
535         return ContentTime (s + piece->content->trim_start(), piece->frc);
536 }
537
538 void
539 PlayerStatistics::dump (shared_ptr<Log> log) const
540 {
541         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
542         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
543 }
544
545 PlayerStatistics const &
546 Player::statistics () const
547 {
548         return _statistics;
549 }