DCPVideoFrame -> DCPVideo and PlayerVideoFrame -> PlayerVideo.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2014 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include <stdint.h>
21 #include <algorithm>
22 #include "player.h"
23 #include "film.h"
24 #include "ffmpeg_decoder.h"
25 #include "audio_buffers.h"
26 #include "ffmpeg_content.h"
27 #include "image_decoder.h"
28 #include "image_content.h"
29 #include "sndfile_decoder.h"
30 #include "sndfile_content.h"
31 #include "subtitle_content.h"
32 #include "subrip_decoder.h"
33 #include "subrip_content.h"
34 #include "playlist.h"
35 #include "job.h"
36 #include "image.h"
37 #include "image_proxy.h"
38 #include "ratio.h"
39 #include "log.h"
40 #include "scaler.h"
41 #include "render_subtitles.h"
42 #include "config.h"
43 #include "content_video.h"
44 #include "player_video.h"
45 #include "frame_rate_change.h"
46
47 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
48
49 using std::list;
50 using std::cout;
51 using std::min;
52 using std::max;
53 using std::min;
54 using std::vector;
55 using std::pair;
56 using std::map;
57 using std::make_pair;
58 using boost::shared_ptr;
59 using boost::weak_ptr;
60 using boost::dynamic_pointer_cast;
61 using boost::optional;
62
63 Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
64         : _film (f)
65         , _playlist (p)
66         , _have_valid_pieces (false)
67         , _approximate_size (false)
68 {
69         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
70         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
71         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
72         set_video_container_size (_film->frame_size ());
73 }
74
75 void
76 Player::setup_pieces ()
77 {
78         list<shared_ptr<Piece> > old_pieces = _pieces;
79         _pieces.clear ();
80
81         ContentList content = _playlist->content ();
82
83         for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
84
85                 if (!(*i)->paths_valid ()) {
86                         continue;
87                 }
88                 
89                 shared_ptr<Decoder> decoder;
90                 optional<FrameRateChange> frc;
91
92                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
93                 DCPTime best_overlap_t;
94                 shared_ptr<VideoContent> best_overlap;
95                 for (ContentList::iterator j = content.begin(); j != content.end(); ++j) {
96                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*j);
97                         if (!vc) {
98                                 continue;
99                         }
100                         
101                         DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
102                         if (overlap > best_overlap_t) {
103                                 best_overlap = vc;
104                                 best_overlap_t = overlap;
105                         }
106                 }
107
108                 optional<FrameRateChange> best_overlap_frc;
109                 if (best_overlap) {
110                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
111                 } else {
112                         /* No video overlap; e.g. if the DCP is just audio */
113                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
114                 }
115
116                 /* FFmpeg */
117                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (*i);
118                 if (fc) {
119                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
120                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
121                 }
122
123                 /* ImageContent */
124                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
125                 if (ic) {
126                         /* See if we can re-use an old ImageDecoder */
127                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
128                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
129                                 if (imd && imd->content() == ic) {
130                                         decoder = imd;
131                                 }
132                         }
133
134                         if (!decoder) {
135                                 decoder.reset (new ImageDecoder (ic));
136                         }
137
138                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
139                 }
140
141                 /* SndfileContent */
142                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
143                 if (sc) {
144                         decoder.reset (new SndfileDecoder (sc));
145                         frc = best_overlap_frc;
146                 }
147
148                 /* SubRipContent */
149                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (*i);
150                 if (rc) {
151                         decoder.reset (new SubRipDecoder (rc));
152                         frc = best_overlap_frc;
153                 }
154
155                 _pieces.push_back (shared_ptr<Piece> (new Piece (*i, decoder, frc.get ())));
156         }
157
158         _have_valid_pieces = true;
159 }
160
161 void
162 Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
163 {
164         shared_ptr<Content> c = w.lock ();
165         if (!c) {
166                 return;
167         }
168
169         if (
170                 property == ContentProperty::POSITION ||
171                 property == ContentProperty::LENGTH ||
172                 property == ContentProperty::TRIM_START ||
173                 property == ContentProperty::TRIM_END ||
174                 property == ContentProperty::PATH ||
175                 property == VideoContentProperty::VIDEO_FRAME_TYPE
176                 ) {
177                 
178                 _have_valid_pieces = false;
179                 Changed (frequent);
180
181         } else if (
182                 property == SubtitleContentProperty::SUBTITLE_USE ||
183                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
184                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
185                 property == SubtitleContentProperty::SUBTITLE_SCALE ||
186                 property == VideoContentProperty::VIDEO_CROP ||
187                 property == VideoContentProperty::VIDEO_SCALE ||
188                 property == VideoContentProperty::VIDEO_FRAME_RATE
189                 ) {
190                 
191                 Changed (frequent);
192         }
193 }
194
195 /** @param already_resampled true if this data has already been through the chain up to the resampler */
196 void
197 Player::playlist_changed ()
198 {
199         _have_valid_pieces = false;
200         Changed (false);
201 }
202
203 void
204 Player::set_video_container_size (dcp::Size s)
205 {
206         _video_container_size = s;
207
208         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
209         _black_image->make_black ();
210 }
211
212 void
213 Player::film_changed (Film::Property p)
214 {
215         /* Here we should notice Film properties that affect our output, and
216            alert listeners that our output now would be different to how it was
217            last time we were run.
218         */
219
220         if (p == Film::SCALER || p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
221                 Changed (false);
222         }
223 }
224
225 list<PositionImage>
226 Player::process_content_image_subtitles (shared_ptr<SubtitleContent> content, list<shared_ptr<ContentImageSubtitle> > subs) const
227 {
228         list<PositionImage> all;
229         
230         for (list<shared_ptr<ContentImageSubtitle> >::const_iterator i = subs.begin(); i != subs.end(); ++i) {
231                 if (!(*i)->image) {
232                         continue;
233                 }
234
235                 dcpomatic::Rect<double> in_rect = (*i)->rectangle;
236                 dcp::Size scaled_size;
237                 
238                 in_rect.x += content->subtitle_x_offset ();
239                 in_rect.y += content->subtitle_y_offset ();
240                 
241                 /* We will scale the subtitle up to fit _video_container_size, and also by the additional subtitle_scale */
242                 scaled_size.width = in_rect.width * _video_container_size.width * content->subtitle_scale ();
243                 scaled_size.height = in_rect.height * _video_container_size.height * content->subtitle_scale ();
244                 
245                 /* Then we need a corrective translation, consisting of two parts:
246                  *
247                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
248                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
249                  *
250                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
251                  *     (width_before_subtitle_scale * (1 - subtitle_scale) / 2) and
252                  *     (height_before_subtitle_scale * (1 - subtitle_scale) / 2).
253                  *
254                  * Combining these two translations gives these expressions.
255                  */
256
257                 all.push_back (
258                         PositionImage (
259                                 (*i)->image->scale (
260                                         scaled_size,
261                                         Scaler::from_id ("bicubic"),
262                                         (*i)->image->pixel_format (),
263                                         true
264                                         ),
265                                 Position<int> (
266                                         rint (_video_container_size.width * (in_rect.x + (in_rect.width * (1 - content->subtitle_scale ()) / 2))),
267                                         rint (_video_container_size.height * (in_rect.y + (in_rect.height * (1 - content->subtitle_scale ()) / 2)))
268                                         )
269                                 )
270                         );
271         }
272
273         return all;
274 }
275
276 list<PositionImage>
277 Player::process_content_text_subtitles (list<shared_ptr<ContentTextSubtitle> > sub) const
278 {
279         list<PositionImage> all;
280         for (list<shared_ptr<ContentTextSubtitle> >::const_iterator i = sub.begin(); i != sub.end(); ++i) {
281                 if (!(*i)->subs.empty ()) {
282                         all.push_back (render_subtitles ((*i)->subs, _video_container_size));
283                 }
284         }
285
286         return all;
287 }
288
289 void
290 Player::set_approximate_size ()
291 {
292         _approximate_size = true;
293 }
294
295 shared_ptr<PlayerVideo>
296 Player::black_player_video_frame () const
297 {
298         return shared_ptr<PlayerVideo> (
299                 new PlayerVideo (
300                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image, _film->log ())),
301                         Crop (),
302                         _video_container_size,
303                         _video_container_size,
304                         Scaler::from_id ("bicubic"),
305                         EYES_BOTH,
306                         PART_WHOLE,
307                         Config::instance()->colour_conversions().front().conversion
308                 )
309         );
310 }
311
312 /** @return All PlayerVideos at the given time (there may be two frames for 3D) */
313 list<shared_ptr<PlayerVideo> >
314 Player::get_video (DCPTime time, bool accurate)
315 {
316         if (!_have_valid_pieces) {
317                 setup_pieces ();
318         }
319         
320         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
321                 time,
322                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
323                 );
324
325         list<shared_ptr<PlayerVideo> > pvf;
326
327         if (ov.empty ()) {
328                 /* No video content at this time */
329                 pvf.push_back (black_player_video_frame ());
330         } else {
331                 /* Create a PlayerVideo from the content's video at this time */
332
333                 shared_ptr<Piece> piece = ov.back ();
334                 shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
335                 assert (decoder);
336                 shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
337                 assert (content);
338
339                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
340                 if (content_video.empty ()) {
341                         pvf.push_back (black_player_video_frame ());
342                         return pvf;
343                 }
344                 
345                 dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
346                 if (_approximate_size) {
347                         image_size.width &= ~3;
348                         image_size.height &= ~3;
349                 }
350                 
351                 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
352                         pvf.push_back (
353                                 shared_ptr<PlayerVideo> (
354                                         new PlayerVideo (
355                                                 i->image,
356                                                 content->crop (),
357                                                 image_size,
358                                                 _video_container_size,
359                                                 _film->scaler(),
360                                                 i->eyes,
361                                                 i->part,
362                                                 content->colour_conversion ()
363                                                 )
364                                         )
365                                 );
366                 }
367         }
368
369         /* Add subtitles to whatever PlayerVideos we got */
370         
371         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (
372                 time,
373                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
374                 );
375
376         list<PositionImage> sub_images;
377         
378         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
379                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
380                 if (!subtitle_content->subtitle_use ()) {
381                         continue;
382                 }
383
384                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
385                 ContentTime const from = dcp_to_content_subtitle (*j, time);
386                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
387                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
388
389                 list<shared_ptr<ContentImageSubtitle> > image_subtitles = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to));
390                 if (!image_subtitles.empty ()) {
391                         list<PositionImage> im = process_content_image_subtitles (
392                                 subtitle_content,
393                                 image_subtitles
394                                 );
395                         
396                         copy (im.begin(), im.end(), back_inserter (sub_images));
397                 }
398                 
399                 list<shared_ptr<ContentTextSubtitle> > text_subtitles = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to));
400                 if (!text_subtitles.empty ()) {
401                         list<PositionImage> im = process_content_text_subtitles (text_subtitles);
402                         copy (im.begin(), im.end(), back_inserter (sub_images));
403                 }
404         }
405         
406         if (!sub_images.empty ()) {
407                 for (list<shared_ptr<PlayerVideo> >::const_iterator i = pvf.begin(); i != pvf.end(); ++i) {
408                         (*i)->set_subtitle (merge (sub_images));
409                 }
410         }       
411                 
412         return pvf;
413 }
414
415 shared_ptr<AudioBuffers>
416 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
417 {
418         if (!_have_valid_pieces) {
419                 setup_pieces ();
420         }
421
422         AudioFrame const length_frames = length.frames (_film->audio_frame_rate ());
423
424         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
425         audio->make_silent ();
426         
427         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
428         if (ov.empty ()) {
429                 return audio;
430         }
431
432         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
433
434                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
435                 assert (content);
436                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
437                 assert (decoder);
438
439                 if (content->audio_frame_rate() == 0) {
440                         /* This AudioContent has no audio (e.g. if it is an FFmpegContent with no
441                          * audio stream).
442                          */
443                         continue;
444                 }
445
446                 /* The time that we should request from the content */
447                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
448                 DCPTime offset;
449                 if (request < DCPTime ()) {
450                         /* We went off the start of the content, so we will need to offset
451                            the stuff we get back.
452                         */
453                         offset = -request;
454                         request = DCPTime ();
455                 }
456
457                 AudioFrame const content_frame = dcp_to_content_audio (*i, request);
458
459                 /* Audio from this piece's decoder (which might be more or less than what we asked for) */
460                 shared_ptr<ContentAudio> all = decoder->get_audio (content_frame, length_frames, accurate);
461
462                 /* Gain */
463                 if (content->audio_gain() != 0) {
464                         shared_ptr<AudioBuffers> gain (new AudioBuffers (all->audio));
465                         gain->apply_gain (content->audio_gain ());
466                         all->audio = gain;
467                 }
468
469                 /* Remap channels */
470                 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all->audio->frames()));
471                 dcp_mapped->make_silent ();
472                 AudioMapping map = content->audio_mapping ();
473                 for (int i = 0; i < map.content_channels(); ++i) {
474                         for (int j = 0; j < _film->audio_channels(); ++j) {
475                                 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
476                                         dcp_mapped->accumulate_channel (
477                                                 all->audio.get(),
478                                                 i,
479                                                 j,
480                                                 map.get (i, static_cast<dcp::Channel> (j))
481                                                 );
482                                 }
483                         }
484                 }
485                 
486                 all->audio = dcp_mapped;
487
488                 audio->accumulate_frames (
489                         all->audio.get(),
490                         content_frame - all->frame,
491                         offset.frames (_film->audio_frame_rate()),
492                         min (AudioFrame (all->audio->frames()), length_frames) - offset.frames (_film->audio_frame_rate ())
493                         );
494         }
495
496         return audio;
497 }
498
499 VideoFrame
500 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
501 {
502         /* s is the offset of t from the start position of this content */
503         DCPTime s = t - piece->content->position ();
504         s = DCPTime (max (DCPTime::Type (0), s.get ()));
505         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
506
507         /* Convert this to the content frame */
508         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) * piece->frc.factor ();
509 }
510
511 AudioFrame
512 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
513 {
514         /* s is the offset of t from the start position of this content */
515         DCPTime s = t - piece->content->position ();
516         s = DCPTime (max (DCPTime::Type (0), s.get ()));
517         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
518
519         /* Convert this to the content frame */
520         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
521 }
522
523 ContentTime
524 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
525 {
526         /* s is the offset of t from the start position of this content */
527         DCPTime s = t - piece->content->position ();
528         s = DCPTime (max (DCPTime::Type (0), s.get ()));
529         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
530
531         return ContentTime (s + piece->content->trim_start(), piece->frc);
532 }
533
534 void
535 PlayerStatistics::dump (shared_ptr<Log> log) const
536 {
537         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
538         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
539 }
540
541 PlayerStatistics const &
542 Player::statistics () const
543 {
544         return _statistics;
545 }