Split up image_proxy.{cc,h}
[dcpomatic.git] / src / lib / player.cc
1 /*
2     Copyright (C) 2013-2014 Carl Hetherington <cth@carlh.net>
3
4     This program is free software; you can redistribute it and/or modify
5     it under the terms of the GNU General Public License as published by
6     the Free Software Foundation; either version 2 of the License, or
7     (at your option) any later version.
8
9     This program is distributed in the hope that it will be useful,
10     but WITHOUT ANY WARRANTY; without even the implied warranty of
11     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12     GNU General Public License for more details.
13
14     You should have received a copy of the GNU General Public License
15     along with this program; if not, write to the Free Software
16     Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17
18 */
19
20 #include <stdint.h>
21 #include <algorithm>
22 #include "player.h"
23 #include "film.h"
24 #include "ffmpeg_decoder.h"
25 #include "audio_buffers.h"
26 #include "ffmpeg_content.h"
27 #include "image_decoder.h"
28 #include "image_content.h"
29 #include "sndfile_decoder.h"
30 #include "sndfile_content.h"
31 #include "subtitle_content.h"
32 #include "subrip_decoder.h"
33 #include "subrip_content.h"
34 #include "playlist.h"
35 #include "job.h"
36 #include "image.h"
37 #include "raw_image_proxy.h"
38 #include "ratio.h"
39 #include "log.h"
40 #include "scaler.h"
41 #include "render_subtitles.h"
42 #include "config.h"
43 #include "content_video.h"
44 #include "player_video.h"
45 #include "frame_rate_change.h"
46 #include "dcp_content.h"
47 #include "dcp_decoder.h"
48
49 #define LOG_GENERAL(...) _film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
50
51 using std::list;
52 using std::cout;
53 using std::min;
54 using std::max;
55 using std::min;
56 using std::vector;
57 using std::pair;
58 using std::map;
59 using std::make_pair;
60 using boost::shared_ptr;
61 using boost::weak_ptr;
62 using boost::dynamic_pointer_cast;
63 using boost::optional;
64
65 Player::Player (shared_ptr<const Film> f, shared_ptr<const Playlist> p)
66         : _film (f)
67         , _playlist (p)
68         , _have_valid_pieces (false)
69         , _approximate_size (false)
70 {
71         _playlist_changed_connection = _playlist->Changed.connect (bind (&Player::playlist_changed, this));
72         _playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Player::content_changed, this, _1, _2, _3));
73         _film_changed_connection = _film->Changed.connect (bind (&Player::film_changed, this, _1));
74         set_video_container_size (_film->frame_size ());
75 }
76
77 void
78 Player::setup_pieces ()
79 {
80         list<shared_ptr<Piece> > old_pieces = _pieces;
81         _pieces.clear ();
82
83         ContentList content = _playlist->content ();
84
85         for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
86
87                 if (!(*i)->paths_valid ()) {
88                         continue;
89                 }
90                 
91                 shared_ptr<Decoder> decoder;
92                 optional<FrameRateChange> frc;
93
94                 /* Work out a FrameRateChange for the best overlap video for this content, in case we need it below */
95                 DCPTime best_overlap_t;
96                 shared_ptr<VideoContent> best_overlap;
97                 for (ContentList::iterator j = content.begin(); j != content.end(); ++j) {
98                         shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (*j);
99                         if (!vc) {
100                                 continue;
101                         }
102                         
103                         DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
104                         if (overlap > best_overlap_t) {
105                                 best_overlap = vc;
106                                 best_overlap_t = overlap;
107                         }
108                 }
109
110                 optional<FrameRateChange> best_overlap_frc;
111                 if (best_overlap) {
112                         best_overlap_frc = FrameRateChange (best_overlap->video_frame_rate(), _film->video_frame_rate ());
113                 } else {
114                         /* No video overlap; e.g. if the DCP is just audio */
115                         best_overlap_frc = FrameRateChange (_film->video_frame_rate(), _film->video_frame_rate ());
116                 }
117
118                 /* FFmpeg */
119                 shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (*i);
120                 if (fc) {
121                         decoder.reset (new FFmpegDecoder (fc, _film->log()));
122                         frc = FrameRateChange (fc->video_frame_rate(), _film->video_frame_rate());
123                 }
124
125                 shared_ptr<const DCPContent> dc = dynamic_pointer_cast<const DCPContent> (*i);
126                 if (dc) {
127                         decoder.reset (new DCPDecoder (dc, _film->log ()));
128                         frc = FrameRateChange (dc->video_frame_rate(), _film->video_frame_rate());
129                 }
130
131                 /* ImageContent */
132                 shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
133                 if (ic) {
134                         /* See if we can re-use an old ImageDecoder */
135                         for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
136                                 shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
137                                 if (imd && imd->content() == ic) {
138                                         decoder = imd;
139                                 }
140                         }
141
142                         if (!decoder) {
143                                 decoder.reset (new ImageDecoder (ic));
144                         }
145
146                         frc = FrameRateChange (ic->video_frame_rate(), _film->video_frame_rate());
147                 }
148
149                 /* SndfileContent */
150                 shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
151                 if (sc) {
152                         decoder.reset (new SndfileDecoder (sc));
153                         frc = best_overlap_frc;
154                 }
155
156                 /* SubRipContent */
157                 shared_ptr<const SubRipContent> rc = dynamic_pointer_cast<const SubRipContent> (*i);
158                 if (rc) {
159                         decoder.reset (new SubRipDecoder (rc));
160                         frc = best_overlap_frc;
161                 }
162
163                 _pieces.push_back (shared_ptr<Piece> (new Piece (*i, decoder, frc.get ())));
164         }
165
166         _have_valid_pieces = true;
167 }
168
169 void
170 Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
171 {
172         shared_ptr<Content> c = w.lock ();
173         if (!c) {
174                 return;
175         }
176
177         if (
178                 property == ContentProperty::POSITION ||
179                 property == ContentProperty::LENGTH ||
180                 property == ContentProperty::TRIM_START ||
181                 property == ContentProperty::TRIM_END ||
182                 property == ContentProperty::PATH ||
183                 property == VideoContentProperty::VIDEO_FRAME_TYPE
184                 ) {
185                 
186                 _have_valid_pieces = false;
187                 Changed (frequent);
188
189         } else if (
190                 property == SubtitleContentProperty::SUBTITLE_USE ||
191                 property == SubtitleContentProperty::SUBTITLE_X_OFFSET ||
192                 property == SubtitleContentProperty::SUBTITLE_Y_OFFSET ||
193                 property == SubtitleContentProperty::SUBTITLE_SCALE ||
194                 property == VideoContentProperty::VIDEO_CROP ||
195                 property == VideoContentProperty::VIDEO_SCALE ||
196                 property == VideoContentProperty::VIDEO_FRAME_RATE
197                 ) {
198                 
199                 Changed (frequent);
200         }
201 }
202
203 /** @param already_resampled true if this data has already been through the chain up to the resampler */
204 void
205 Player::playlist_changed ()
206 {
207         _have_valid_pieces = false;
208         Changed (false);
209 }
210
211 void
212 Player::set_video_container_size (dcp::Size s)
213 {
214         _video_container_size = s;
215
216         _black_image.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
217         _black_image->make_black ();
218 }
219
220 void
221 Player::film_changed (Film::Property p)
222 {
223         /* Here we should notice Film properties that affect our output, and
224            alert listeners that our output now would be different to how it was
225            last time we were run.
226         */
227
228         if (p == Film::SCALER || p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
229                 Changed (false);
230         }
231 }
232
233 list<PositionImage>
234 Player::transform_image_subtitles (list<ImageSubtitle> subs) const
235 {
236         list<PositionImage> all;
237         
238         for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
239                 if (!i->image) {
240                         continue;
241                 }
242
243                 /* We will scale the subtitle up to fit _video_container_size */
244                 dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
245                 
246                 /* Then we need a corrective translation, consisting of two parts:
247                  *
248                  * 1.  that which is the result of the scaling of the subtitle by _video_container_size; this will be
249                  *     rect.x * _video_container_size.width and rect.y * _video_container_size.height.
250                  *
251                  * 2.  that to shift the origin of the scale by subtitle_scale to the centre of the subtitle; this will be
252                  *     (width_before_subtitle_scale * (1 - subtitle_scale) / 2) and
253                  *     (height_before_subtitle_scale * (1 - subtitle_scale) / 2).
254                  *
255                  * Combining these two translations gives these expressions.
256                  */
257
258                 all.push_back (
259                         PositionImage (
260                                 i->image->scale (
261                                         scaled_size,
262                                         Scaler::from_id ("bicubic"),
263                                         i->image->pixel_format (),
264                                         true
265                                         ),
266                                 Position<int> (
267                                         rint (_video_container_size.width * i->rectangle.x),
268                                         rint (_video_container_size.height * i->rectangle.y)
269                                         )
270                                 )
271                         );
272         }
273
274         return all;
275 }
276
277 void
278 Player::set_approximate_size ()
279 {
280         _approximate_size = true;
281 }
282
283 shared_ptr<PlayerVideo>
284 Player::black_player_video_frame (DCPTime time) const
285 {
286         return shared_ptr<PlayerVideo> (
287                 new PlayerVideo (
288                         shared_ptr<const ImageProxy> (new RawImageProxy (_black_image, _film->log ())),
289                         time,
290                         Crop (),
291                         _video_container_size,
292                         _video_container_size,
293                         Scaler::from_id ("bicubic"),
294                         EYES_BOTH,
295                         PART_WHOLE,
296                         Config::instance()->colour_conversions().front().conversion
297                 )
298         );
299 }
300
301 /** @return All PlayerVideos at the given time (there may be two frames for 3D) */
302 list<shared_ptr<PlayerVideo> >
303 Player::get_video (DCPTime time, bool accurate)
304 {
305         if (!_have_valid_pieces) {
306                 setup_pieces ();
307         }
308         
309         list<shared_ptr<Piece> > ov = overlaps<VideoContent> (
310                 time,
311                 time + DCPTime::from_frames (1, _film->video_frame_rate ())
312                 );
313
314         list<shared_ptr<PlayerVideo> > pvf;
315
316         if (ov.empty ()) {
317                 /* No video content at this time */
318                 pvf.push_back (black_player_video_frame (time));
319         } else {
320                 /* Create a PlayerVideo from the content's video at this time */
321
322                 shared_ptr<Piece> piece = ov.back ();
323                 shared_ptr<VideoDecoder> decoder = dynamic_pointer_cast<VideoDecoder> (piece->decoder);
324                 assert (decoder);
325                 shared_ptr<VideoContent> content = dynamic_pointer_cast<VideoContent> (piece->content);
326                 assert (content);
327
328                 list<ContentVideo> content_video = decoder->get_video (dcp_to_content_video (piece, time), accurate);
329                 if (content_video.empty ()) {
330                         pvf.push_back (black_player_video_frame (time));
331                         return pvf;
332                 }
333                 
334                 dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
335                 if (_approximate_size) {
336                         image_size.width &= ~3;
337                         image_size.height &= ~3;
338                 }
339                 
340                 for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
341                         pvf.push_back (
342                                 shared_ptr<PlayerVideo> (
343                                         new PlayerVideo (
344                                                 i->image,
345                                                 content_video_to_dcp (piece, i->frame),
346                                                 content->crop (),
347                                                 image_size,
348                                                 _video_container_size,
349                                                 _film->scaler(),
350                                                 i->eyes,
351                                                 i->part,
352                                                 content->colour_conversion ()
353                                                 )
354                                         )
355                                 );
356                 }
357         }
358
359         /* Add subtitles (for possible burn-in) to whatever PlayerVideos we got */
360
361         PlayerSubtitles ps = get_subtitles (time, DCPTime::from_frames (1, _film->video_frame_rate ()), false);
362
363         list<PositionImage> sub_images;
364
365         /* Image subtitles */
366         list<PositionImage> c = transform_image_subtitles (ps.image);
367         copy (c.begin(), c.end(), back_inserter (sub_images));
368
369         /* Text subtitles (rendered to images) */
370         sub_images.push_back (render_subtitles (ps.text, _video_container_size));
371         
372         if (!sub_images.empty ()) {
373                 for (list<shared_ptr<PlayerVideo> >::const_iterator i = pvf.begin(); i != pvf.end(); ++i) {
374                         (*i)->set_subtitle (merge (sub_images));
375                 }
376         }       
377                 
378         return pvf;
379 }
380
381 shared_ptr<AudioBuffers>
382 Player::get_audio (DCPTime time, DCPTime length, bool accurate)
383 {
384         if (!_have_valid_pieces) {
385                 setup_pieces ();
386         }
387
388         AudioFrame const length_frames = length.frames (_film->audio_frame_rate ());
389
390         shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
391         audio->make_silent ();
392         
393         list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
394         if (ov.empty ()) {
395                 return audio;
396         }
397
398         for (list<shared_ptr<Piece> >::iterator i = ov.begin(); i != ov.end(); ++i) {
399
400                 shared_ptr<AudioContent> content = dynamic_pointer_cast<AudioContent> ((*i)->content);
401                 assert (content);
402                 shared_ptr<AudioDecoder> decoder = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
403                 assert (decoder);
404
405                 if (content->audio_frame_rate() == 0) {
406                         /* This AudioContent has no audio (e.g. if it is an FFmpegContent with no
407                          * audio stream).
408                          */
409                         continue;
410                 }
411
412                 /* The time that we should request from the content */
413                 DCPTime request = time - DCPTime::from_seconds (content->audio_delay() / 1000.0);
414                 DCPTime offset;
415                 if (request < DCPTime ()) {
416                         /* We went off the start of the content, so we will need to offset
417                            the stuff we get back.
418                         */
419                         offset = -request;
420                         request = DCPTime ();
421                 }
422
423                 AudioFrame const content_frame = dcp_to_content_audio (*i, request);
424
425                 /* Audio from this piece's decoder (which might be more or less than what we asked for) */
426                 shared_ptr<ContentAudio> all = decoder->get_audio (content_frame, length_frames, accurate);
427
428                 /* Gain */
429                 if (content->audio_gain() != 0) {
430                         shared_ptr<AudioBuffers> gain (new AudioBuffers (all->audio));
431                         gain->apply_gain (content->audio_gain ());
432                         all->audio = gain;
433                 }
434
435                 /* Remap channels */
436                 shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), all->audio->frames()));
437                 dcp_mapped->make_silent ();
438                 AudioMapping map = content->audio_mapping ();
439                 for (int i = 0; i < map.content_channels(); ++i) {
440                         for (int j = 0; j < _film->audio_channels(); ++j) {
441                                 if (map.get (i, static_cast<dcp::Channel> (j)) > 0) {
442                                         dcp_mapped->accumulate_channel (
443                                                 all->audio.get(),
444                                                 i,
445                                                 j,
446                                                 map.get (i, static_cast<dcp::Channel> (j))
447                                                 );
448                                 }
449                         }
450                 }
451                 
452                 all->audio = dcp_mapped;
453
454                 audio->accumulate_frames (
455                         all->audio.get(),
456                         content_frame - all->frame,
457                         offset.frames (_film->audio_frame_rate()),
458                         min (AudioFrame (all->audio->frames()), length_frames) - offset.frames (_film->audio_frame_rate ())
459                         );
460         }
461
462         return audio;
463 }
464
465 VideoFrame
466 Player::dcp_to_content_video (shared_ptr<const Piece> piece, DCPTime t) const
467 {
468         /* s is the offset of t from the start position of this content */
469         DCPTime s = t - piece->content->position ();
470         s = DCPTime (max (DCPTime::Type (0), s.get ()));
471         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
472
473         /* Convert this to the content frame */
474         return DCPTime (s + piece->content->trim_start()).frames (_film->video_frame_rate()) * piece->frc.factor ();
475 }
476
477 DCPTime
478 Player::content_video_to_dcp (shared_ptr<const Piece> piece, VideoFrame f) const
479 {
480         DCPTime t = DCPTime::from_frames (f / piece->frc.factor (), _film->video_frame_rate()) - piece->content->trim_start () + piece->content->position ();
481         if (t < DCPTime ()) {
482                 t = DCPTime ();
483         }
484
485         return t;
486 }
487
488 AudioFrame
489 Player::dcp_to_content_audio (shared_ptr<const Piece> piece, DCPTime t) const
490 {
491         /* s is the offset of t from the start position of this content */
492         DCPTime s = t - piece->content->position ();
493         s = DCPTime (max (DCPTime::Type (0), s.get ()));
494         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
495
496         /* Convert this to the content frame */
497         return DCPTime (s + piece->content->trim_start()).frames (_film->audio_frame_rate());
498 }
499
500 ContentTime
501 Player::dcp_to_content_subtitle (shared_ptr<const Piece> piece, DCPTime t) const
502 {
503         /* s is the offset of t from the start position of this content */
504         DCPTime s = t - piece->content->position ();
505         s = DCPTime (max (DCPTime::Type (0), s.get ()));
506         s = DCPTime (min (piece->content->length_after_trim().get(), s.get()));
507
508         return ContentTime (s + piece->content->trim_start(), piece->frc);
509 }
510
511 void
512 PlayerStatistics::dump (shared_ptr<Log> log) const
513 {
514         log->log (String::compose ("Video: %1 good %2 skipped %3 black %4 repeat", video.good, video.skip, video.black, video.repeat), Log::TYPE_GENERAL);
515         log->log (String::compose ("Audio: %1 good %2 skipped %3 silence", audio.good, audio.skip, audio.silence.seconds()), Log::TYPE_GENERAL);
516 }
517
518 PlayerStatistics const &
519 Player::statistics () const
520 {
521         return _statistics;
522 }
523
524 PlayerSubtitles
525 Player::get_subtitles (DCPTime time, DCPTime length, bool starting)
526 {
527         list<shared_ptr<Piece> > subs = overlaps<SubtitleContent> (time, time + length);
528
529         PlayerSubtitles ps (time, length);
530
531         for (list<shared_ptr<Piece> >::const_iterator j = subs.begin(); j != subs.end(); ++j) {
532                 shared_ptr<SubtitleContent> subtitle_content = dynamic_pointer_cast<SubtitleContent> ((*j)->content);
533                 if (!subtitle_content->subtitle_use ()) {
534                         continue;
535                 }
536
537                 shared_ptr<SubtitleDecoder> subtitle_decoder = dynamic_pointer_cast<SubtitleDecoder> ((*j)->decoder);
538                 ContentTime const from = dcp_to_content_subtitle (*j, time);
539                 /* XXX: this video_frame_rate() should be the rate that the subtitle content has been prepared for */
540                 ContentTime const to = from + ContentTime::from_frames (1, _film->video_frame_rate ());
541
542                 list<ContentImageSubtitle> image = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to), starting);
543                 for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
544                         
545                         /* Apply content's subtitle offsets */
546                         i->sub.rectangle.x += subtitle_content->subtitle_x_offset ();
547                         i->sub.rectangle.y += subtitle_content->subtitle_y_offset ();
548
549                         /* Apply content's subtitle scale */
550                         i->sub.rectangle.width *= subtitle_content->subtitle_scale ();
551                         i->sub.rectangle.height *= subtitle_content->subtitle_scale ();
552
553                         /* Apply a corrective translation to keep the subtitle centred after that scale */
554                         i->sub.rectangle.x -= i->sub.rectangle.width * (subtitle_content->subtitle_scale() - 1);
555                         i->sub.rectangle.y -= i->sub.rectangle.height * (subtitle_content->subtitle_scale() - 1);
556                         
557                         ps.image.push_back (i->sub);
558                 }
559
560                 list<ContentTextSubtitle> text = subtitle_decoder->get_text_subtitles (ContentTimePeriod (from, to), starting);
561                 for (list<ContentTextSubtitle>::const_iterator i = text.begin(); i != text.end(); ++i) {
562                         copy (i->subs.begin(), i->subs.end(), back_inserter (ps.text));
563                 }
564         }
565
566         return ps;
567 }