Put times in subtitle view.
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2014 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include <stdint.h>
21 #include <algorithm>
22 #include "player.h"
23 #include "film.h"
24 #include "ffmpeg_decoder.h"
25 #include "audio_buffers.h"
26 #include "ffmpeg_content.h"
27 #include "image_decoder.h"
28 #include "image_content.h"
29 #include "sndfile_decoder.h"
30 #include "sndfile_content.h"
31 #include "subtitle_content.h"
32 #include "subrip_decoder.h"
33 #include "subrip_content.h"
34 #include "playlist.h"
35 #include "job.h"
36 #include "image.h"
37 #include "image_proxy.h"
38 #include "ratio.h"
39 #include "log.h"
40 #include "scaler.h"
41 #include "render_subtitles.h"
42 #include "config.h"
43 #include "content_video.h"
44 #include "player_video_frame.h"
45 #include "frame_rate_change.h"
46
47 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
48
49 using std::list;
50 using std::cout;
51 using std::min;
52 using std::max;
53 using std::min;
54 using std::vector;
55 using std::pair;
56 using std::map;
57 using std::make_pair;
58 using boost::shared_ptr;
59 using boost::weak_ptr;
60 using boost::dynamic_pointer_cast;
61 using boost::optional;
62
63 Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
64         : _film (f)
65         , _playlist (p)
66         , _have_valid_pieces (false)
67         , _approximate_size (false)
68 {
69         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
70         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
71         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
72         set_video_container_size (_film->frame_size ());
73 }
74
75 void
76 Player::setup_pieces ()
77 {
78         list<shared_ptr<Piece> > old_pieces = _pieces;
79         _pieces.clear ();
80
81         ContentList content = _playlist->content ();
82
83         for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
84
85                 if (!(*i)->paths_valid ()) {
86                         continue;
87                 }
88                 
89                 shared_ptr<Decoder> decoder;
90                 optional<FrameRateChange> frc;
91
92                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
93                 DCPTime best_overlap_t;
94                 shared_ptr<VideoContent> best_overlap;
95                 for (ContentList::iterator j = content.begin(); j != content.end(); ++j) {
96                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*j);
97                         if (!vc) {
98                                 continue;
99                         }
100                         
101                         DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
102                         if (overlap > best_overlap_t) {
103                                 best_overlap = vc;
104                                 best_overlap_t = overlap;
105                         }
106                 }
107
108                 optional<FrameRateChange> best_overlap_frc;
109                 if (best_overlap) {
110                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
111                 } else {
112                         /* No video overlap; e.g. if the DCP is just audio */
113                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
114                 }
115
116                 /* FFmpeg */
117                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (*i);
118                 if (fc) {
119                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
120                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
121                 }
122
123                 /* ImageContent */
124                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
125                 if (ic) {
126                         /* See if we can re-use an old ImageDecoder */
127                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
128                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
129                                 if (imd && imd->content() == ic) {
130                                         decoder = imd;
131                                 }
132                         }
133
134                         if (!decoder) {
135                                 decoder.reset (new ImageDecoder (ic));
136                         }
137
138                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
139                 }
140
141                 /* SndfileContent */
142                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
143                 if (sc) {
144                         decoder.reset (new SndfileDecoder (sc));
145                         frc = best_overlap_frc;
146                 }
147
148                 /* SubRipContent */
149                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (*i);
150                 if (rc) {
151                         decoder.reset (new SubRipDecoder (rc));
152                         frc = best_overlap_frc;
153                 }
154
155                 _pieces.push_back (shared_ptr<Piece> (new Piece (*i, decoder, frc.get ())));
156         }
157
158         _have_valid_pieces = true;
159 }
160
161 void
162 Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
163 {
164         shared_ptr<Content> c = w.lock ();
165         if (!c) {
166                 return;
167         }
168
169         if (
170                 property == ContentProperty::POSITION ||
171                 property == ContentProperty::LENGTH ||
172                 property == ContentProperty::TRIM_START ||
173                 property == ContentProperty::TRIM_END ||
174                 property == ContentProperty::PATH ||
175                 property == VideoContentProperty::VIDEO_FRAME_TYPE
176                 ) {
177                 
178                 _have_valid_pieces = false;
179                 Changed (frequent);
180
181         } else if (
182                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
183                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
184                 property == SubtitleContentProperty::SUBTITLE_SCALE ||
185                 property == VideoContentProperty::VIDEO_CROP ||
186                 property == VideoContentProperty::VIDEO_SCALE ||
187                 property == VideoContentProperty::VIDEO_FRAME_RATE
188                 ) {
189                 
190                 Changed (frequent);
191         }
192 }
193
194 /** @param already_resampled true if this data has already been through the chain up to the resampler */
195 void
196 Player::playlist_changed ()
197 {
198         _have_valid_pieces = false;
199         Changed (false);
200 }
201
202 void
203 Player::set_video_container_size (dcp::Size s)
204 {
205         _video_container_size = s;
206
207         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
208         _black_image->make_black ();
209 }
210
211 void
212 Player::film_changed (Film::Property p)
213 {
214         /* Here we should notice Film properties that affect our output, and
215            alert listeners that our output now would be different to how it was
216            last time we were run.
217         */
218
219         if (p == Film::SCALER || p == Film::WITH_SUBTITLES || p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
220                 Changed (false);
221         }
222 }
223
224 list<PositionImage>
225 Player::process_content_image_subtitles (shared_ptr<SubtitleContent> content, list<shared_ptr<ContentImageSubtitle> > subs) const
226 {
227         list<PositionImage> all;
228         
229         for (list<shared_ptr<ContentImageSubtitle> >::const_iterator i = subs.begin(); i != subs.end(); ++i) {
230                 if (!(*i)->image) {
231                         continue;
232                 }
233
234                 dcpomatic::Rect<double> in_rect = (*i)->rectangle;
235                 dcp::Size scaled_size;
236                 
237                 in_rect.x += content->subtitle_x_offset ();
238                 in_rect.y += content->subtitle_y_offset ();
239                 
240                 /* We will scale the subtitle up to fit _video_container_size, and also by the additional subtitle_scale */
241                 scaled_size.width = in_rect.width * _video_container_size.width * content->subtitle_scale ();
242                 scaled_size.height = in_rect.height * _video_container_size.height * content->subtitle_scale ();
243                 
244                 /* Then we need a corrective translation, consisting of two parts:
245                  *
246                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
247                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
248                  *
249                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
250                  *     (width_before_subtitle_scale * (1 - subtitle_scale) / 2) and
251                  *     (height_before_subtitle_scale * (1 - subtitle_scale) / 2).
252                  *
253                  * Combining these two translations gives these expressions.
254                  */
255
256                 all.push_back (
257                         PositionImage (
258                                 (*i)->image->scale (
259                                         scaled_size,
260                                         Scaler::from_id ("bicubic"),
261                                         (*i)->image->pixel_format (),
262                                         true
263                                         ),
264                                 Position<int> (
265                                         rint (_video_container_size.width * (in_rect.x + (in_rect.width * (1 - content->subtitle_scale ()) / 2))),
266                                         rint (_video_container_size.height * (in_rect.y + (in_rect.height * (1 - content->subtitle_scale ()) / 2)))
267                                         )
268                                 )
269                         );
270         }
271
272         return all;
273 }
274
275 list<PositionImage>
276 Player::process_content_text_subtitles (list<shared_ptr<ContentTextSubtitle> > sub) const
277 {
278         list<PositionImage> all;
279         for (list<shared_ptr<ContentTextSubtitle> >::const_iterator i = sub.begin(); i != sub.end(); ++i) {
280                 if (!(*i)->subs.empty ()) {
281                         all.push_back (render_subtitles ((*i)->subs, _video_container_size));
282                 }
283         }
284
285         return all;
286 }
287
288 void
289 Player::set_approximate_size ()
290 {
291         _approximate_size = true;
292 }
293
294 shared_ptr<PlayerVideoFrame>
295 Player::black_player_video_frame () const
296 {
297         return shared_ptr<PlayerVideoFrame> (
298                 new PlayerVideoFrame (
299                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image, _film->log ())),
300                         Crop (),
301                         _video_container_size,
302                         _video_container_size,
303                         Scaler::from_id ("bicubic"),
304                         EYES_BOTH,
305                         PART_WHOLE,
306                         Config::instance()->colour_conversions().front().conversion
307                 )
308         );
309 }
310
311 /** @return All PlayerVideoFrames at the given time (there may be two frames for 3D) */
312 list<shared_ptr<PlayerVideoFrame> >
313 Player::get_video (DCPTime time, bool accurate)
314 {
315         if (!_have_valid_pieces) {
316                 setup_pieces ();
317         }
318         
319         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
320                 time,
321                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
322                 );
323
324         list<shared_ptr<PlayerVideoFrame> > pvf;
325
326         if (ov.empty ()) {
327                 /* No video content at this time */
328                 pvf.push_back (black_player_video_frame ());
329         } else {
330                 /* Create a PlayerVideoFrame from the content's video at this time */
331
332                 shared_ptr<Piece> piece = ov.back ();
333                 shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
334                 assert (decoder);
335                 shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
336                 assert (content);
337
338                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
339                 if (content_video.empty ()) {
340                         pvf.push_back (black_player_video_frame ());
341                         return pvf;
342                 }
343                 
344                 dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
345                 if (_approximate_size) {
346                         image_size.width &= ~3;
347                         image_size.height &= ~3;
348                 }
349                 
350                 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
351                         pvf.push_back (
352                                 shared_ptr<PlayerVideoFrame> (
353                                         new PlayerVideoFrame (
354                                                 i->image,
355                                                 content->crop (),
356                                                 image_size,
357                                                 _video_container_size,
358                                                 _film->scaler(),
359                                                 i->eyes,
360                                                 i->part,
361                                                 content->colour_conversion ()
362                                                 )
363                                         )
364                                 );
365                 }
366         }
367
368         /* Add subtitles to whatever PlayerVideoFrames we got */
369         
370         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (
371                 time,
372                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
373                 );
374
375         list<PositionImage> sub_images;
376         
377         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
378                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
379                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
380                 ContentTime const from = dcp_to_content_subtitle (*j, time);
381                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
382                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
383
384                 list<shared_ptr<ContentImageSubtitle> > image_subtitles = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to));
385                 if (!image_subtitles.empty ()) {
386                         list<PositionImage> im = process_content_image_subtitles (
387                                 subtitle_content,
388                                 image_subtitles
389                                 );
390                         
391                         copy (im.begin(), im.end(), back_inserter (sub_images));
392                 }
393                 
394                 list<shared_ptr<ContentTextSubtitle> > text_subtitles = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to));
395                 if (!text_subtitles.empty ()) {
396                         list<PositionImage> im = process_content_text_subtitles (text_subtitles);
397                         copy (im.begin(), im.end(), back_inserter (sub_images));
398                 }
399         }
400         
401         if (!sub_images.empty ()) {
402                 for (list<shared_ptr<PlayerVideoFrame> >::const_iterator i = pvf.begin(); i != pvf.end(); ++i) {
403                         (*i)->set_subtitle (merge (sub_images));
404                 }
405         }       
406                 
407         return pvf;
408 }
409
410 shared_ptr<AudioBuffers>
411 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
412 {
413         if (!_have_valid_pieces) {
414                 setup_pieces ();
415         }
416
417         AudioFrame const length_frames = length.frames (_film->audio_frame_rate ());
418
419         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
420         audio->make_silent ();
421         
422         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
423         if (ov.empty ()) {
424                 return audio;
425         }
426
427         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
428
429                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
430                 assert (content);
431                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
432                 assert (decoder);
433
434                 if (content->audio_frame_rate() == 0) {
435                         /* This AudioContent has no audio (e.g. if it is an FFmpegContent with no
436                          * audio stream).
437                          */
438                         continue;
439                 }
440
441                 /* The time that we should request from the content */
442                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
443                 DCPTime offset;
444                 if (request < DCPTime ()) {
445                         /* We went off the start of the content, so we will need to offset
446                            the stuff we get back.
447                         */
448                         offset = -request;
449                         request = DCPTime ();
450                 }
451
452                 AudioFrame const content_frame = dcp_to_content_audio (*i, request);
453
454                 /* Audio from this piece's decoder (which might be more or less than what we asked for) */
455                 shared_ptr<ContentAudio> all = decoder->get_audio (content_frame, length_frames, accurate);
456
457                 /* Gain */
458                 if (content->audio_gain() != 0) {
459                         shared_ptr<AudioBuffers> gain (new AudioBuffers (all->audio));
460                         gain->apply_gain (content->audio_gain ());
461                         all->audio = gain;
462                 }
463
464                 /* Remap channels */
465                 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all->audio->frames()));
466                 dcp_mapped->make_silent ();
467                 AudioMapping map = content->audio_mapping ();
468                 for (int i = 0; i < map.content_channels(); ++i) {
469                         for (int j = 0; j < _film->audio_channels(); ++j) {
470                                 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
471                                         dcp_mapped->accumulate_channel (
472                                                 all->audio.get(),
473                                                 i,
474                                                 j,
475                                                 map.get (i, static_cast<dcp::Channel> (j))
476                                                 );
477                                 }
478                         }
479                 }
480                 
481                 all->audio = dcp_mapped;
482
483                 audio->accumulate_frames (
484                         all->audio.get(),
485                         content_frame - all->frame,
486                         offset.frames (_film->audio_frame_rate()),
487                         min (AudioFrame (all->audio->frames()), length_frames) - offset.frames (_film->audio_frame_rate ())
488                         );
489         }
490
491         return audio;
492 }
493
494 VideoFrame
495 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
496 {
497         /* s is the offset of t from the start position of this content */
498         DCPTime s = t - piece->content->position ();
499         s = DCPTime (max (int64_t (0), s.get ()));
500         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
501
502         /* Convert this to the content frame */
503         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) * piece->frc.factor ();
504 }
505
506 AudioFrame
507 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
508 {
509         /* s is the offset of t from the start position of this content */
510         DCPTime s = t - piece->content->position ();
511         s = DCPTime (max (int64_t (0), s.get ()));
512         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
513
514         /* Convert this to the content frame */
515         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
516 }
517
518 ContentTime
519 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
520 {
521         /* s is the offset of t from the start position of this content */
522         DCPTime s = t - piece->content->position ();
523         s = DCPTime (max (int64_t (0), s.get ()));
524         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
525
526         return ContentTime (s + piece->content->trim_start(), piece->frc);
527 }
528
529 void
530 PlayerStatistics::dump (shared_ptr<Log> log) const
531 {
532         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
533         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
534 }
535
536 PlayerStatistics const &
537 Player::statistics () const
538 {
539         return _statistics;
540 }