Allow moving-still-image sources to have their frame rate specified.
[dcpomatic.git] / src / lib / player.cc
index 02d3903654649f33ba7ce582221768a3f34447fe..9f859969341fa65a1eacc22d0c3da9b7002387c0 100644 (file)
 #include "film.h"
 #include "ffmpeg_decoder.h"
 #include "ffmpeg_content.h"
-#include "still_image_decoder.h"
-#include "still_image_content.h"
-#include "moving_image_decoder.h"
-#include "moving_image_content.h"
+#include "image_decoder.h"
+#include "image_content.h"
 #include "sndfile_decoder.h"
 #include "sndfile_content.h"
 #include "subtitle_content.h"
@@ -66,9 +64,9 @@ public:
                , audio_position (c->position ())
        {}
 
+       /** Set this piece to repeat a video frame a given number of times */
        void set_repeat (IncomingVideo video, int num)
        {
-               cout << "Set repeat " << num << "\n";
                repeat_video = video;
                repeat_to_do = num;
                repeat_done = 0;
@@ -88,12 +86,11 @@ public:
 
        void repeat (Player* player)
        {
-               cout << "repeating; " << repeat_done << "\n";
                player->process_video (
                        repeat_video.weak_piece,
                        repeat_video.image,
                        repeat_video.eyes,
-                       repeat_video.same,
+                       repeat_done > 0,
                        repeat_video.frame,
                        (repeat_done + 1) * (TIME_HZ / player->_film->video_frame_rate ())
                        );
@@ -103,7 +100,9 @@ public:
        
        shared_ptr<Content> content;
        shared_ptr<Decoder> decoder;
+       /** Time of the last video we emitted relative to the start of the DCP */
        Time video_position;
+       /** Time of the last audio we emitted relative to the start of the DCP */
        Time audio_position;
 
        IncomingVideo repeat_video;
@@ -145,7 +144,6 @@ Player::pass ()
 {
        if (!_have_valid_pieces) {
                setup_pieces ();
-               _have_valid_pieces = true;
        }
 
        Time earliest_t = TIME_MAX;
@@ -157,11 +155,13 @@ Player::pass ()
 
        for (list<shared_ptr<Piece> >::iterator i = _pieces.begin(); i != _pieces.end(); ++i) {
                if ((*i)->decoder->done ()) {
-                       cout << "Scan: done.\n";
                        continue;
                }
 
-               if (_video && dynamic_pointer_cast<VideoDecoder> ((*i)->decoder)) {
+               shared_ptr<VideoDecoder> vd = dynamic_pointer_cast<VideoDecoder> ((*i)->decoder);
+               shared_ptr<AudioDecoder> ad = dynamic_pointer_cast<AudioDecoder> ((*i)->decoder);
+
+               if (_video && vd) {
                        if ((*i)->video_position < earliest_t) {
                                earliest_t = (*i)->video_position;
                                earliest = *i;
@@ -169,7 +169,7 @@ Player::pass ()
                        }
                }
 
-               if (_audio && dynamic_pointer_cast<AudioDecoder> ((*i)->decoder)) {
+               if (_audio && ad && ad->has_audio ()) {
                        if ((*i)->audio_position < earliest_t) {
                                earliest_t = (*i)->audio_position;
                                earliest = *i;
@@ -179,31 +179,24 @@ Player::pass ()
        }
 
        if (!earliest) {
-               cout << "No earliest: out.\n";
                flush ();
                return true;
        }
 
-       cout << "Earliest: " << earliest_t << "\n";
-
        switch (type) {
        case VIDEO:
-               cout << "VIDEO.\n";
                if (earliest_t > _video_position) {
                        emit_black ();
                } else {
                        if (earliest->repeating ()) {
-                               cout << "-repeating.\n";
                                earliest->repeat (this);
                        } else {
-                               cout << "-passing.\n";
                                earliest->decoder->pass ();
                        }
                }
                break;
 
        case AUDIO:
-               cout << "SOUND.\n";
                if (earliest_t > _audio_position) {
                        emit_silence (_film->time_to_audio_frames (earliest_t - _audio_position));
                } else {
@@ -225,32 +218,38 @@ Player::pass ()
        }
 
        if (_audio) {
-               Time audio_done_up_to = TIME_MAX;
+               boost::optional<Time> audio_done_up_to;
                for (list<shared_ptr<Piece> >::iterator i = _pieces.begin(); i != _pieces.end(); ++i) {
+                       if ((*i)->decoder->done ()) {
+                               continue;
+                       }
+
                        if (dynamic_pointer_cast<AudioDecoder> ((*i)->decoder)) {
-                               audio_done_up_to = min (audio_done_up_to, (*i)->audio_position);
+                               audio_done_up_to = min (audio_done_up_to.get_value_or (TIME_MAX), (*i)->audio_position);
                        }
                }
 
-               TimedAudioBuffers<Time> tb = _audio_merger.pull (audio_done_up_to);
-               Audio (tb.audio, tb.time);
-               _audio_position += _film->audio_frames_to_time (tb.audio->frames ());
+               if (audio_done_up_to) {
+                       TimedAudioBuffers<Time> tb = _audio_merger.pull (audio_done_up_to.get ());
+                       Audio (tb.audio, tb.time);
+                       _audio_position += _film->audio_frames_to_time (tb.audio->frames ());
+               }
        }
                
        return false;
 }
 
+/** @param extra Amount of extra time to add to the content frame's time (for repeat) */
 void
 Player::process_video (weak_ptr<Piece> weak_piece, shared_ptr<const Image> image, Eyes eyes, bool same, VideoContent::Frame frame, Time extra)
 {
-       cout << "PLAYER RECEIVES A VIDEO FRAME, extra " << extra << "\n";
-       
        /* Keep a note of what came in so that we can repeat it if required */
        _last_incoming_video.weak_piece = weak_piece;
        _last_incoming_video.image = image;
        _last_incoming_video.eyes = eyes;
        _last_incoming_video.same = same;
        _last_incoming_video.frame = frame;
+       _last_incoming_video.extra = extra;
        
        shared_ptr<Piece> piece = weak_piece.lock ();
        if (!piece) {
@@ -270,42 +269,38 @@ Player::process_video (weak_ptr<Piece> weak_piece, shared_ptr<const Image> image
                return;
        }
 
-       /* Convert to RGB first, as FFmpeg doesn't seem to like handling YUV images with odd widths */
-       shared_ptr<Image> work_image = image->scale (image->size (), _film->scaler(), PIX_FMT_RGB24, true);
-
-       work_image = work_image->crop (content->crop(), true);
-
+       Time const time = content->position() + relative_time + extra - content->trim_start ();
        float const ratio = content->ratio() ? content->ratio()->ratio() : content->video_size_after_crop().ratio();
-       libdcp::Size image_size = fit_ratio_within (ratio, _video_container_size);
+       libdcp::Size const image_size = fit_ratio_within (ratio, _video_container_size);
+
+       shared_ptr<PlayerImage> pi (
+               new PlayerImage (
+                       image,
+                       content->crop(),
+                       image_size,
+                       _video_container_size,
+                       _film->scaler()
+                       )
+               );
        
-       work_image = work_image->scale (image_size, _film->scaler(), PIX_FMT_RGB24, true);
-
-       Time time = content->position() + relative_time + extra - content->trim_start ();
-           
        if (_film->with_subtitles () && _out_subtitle.image && time >= _out_subtitle.from && time <= _out_subtitle.to) {
-               work_image->alpha_blend (_out_subtitle.image, _out_subtitle.position);
-       }
 
-       if (image_size != _video_container_size) {
-               assert (image_size.width <= _video_container_size.width);
-               assert (image_size.height <= _video_container_size.height);
-               shared_ptr<Image> im (new Image (PIX_FMT_RGB24, _video_container_size, true));
-               im->make_black ();
-               im->copy (work_image, Position<int> ((_video_container_size.width - image_size.width) / 2, (_video_container_size.height - image_size.height) / 2));
-               work_image = im;
-       }
+               Position<int> const container_offset (
+                       (_video_container_size.width - image_size.width) / 2,
+                       (_video_container_size.height - image_size.width) / 2
+                       );
 
+               pi->set_subtitle (_out_subtitle.image, _out_subtitle.position + container_offset);
+       }
+                                           
 #ifdef DCPOMATIC_DEBUG
        _last_video = piece->content;
 #endif
 
-       Video (work_image, eyes, content->colour_conversion(), same, time);
+       Video (pi, eyes, content->colour_conversion(), same, time);
 
-       time += TIME_HZ / _film->video_frame_rate();
        _last_emit_was_black = false;
-       _video_position = piece->video_position = time;
-
-       cout << "frc.repeat=" << frc.repeat << "; vp now " << _video_position << "\n";
+       _video_position = piece->video_position = (time + TIME_HZ / _film->video_frame_rate());
 
        if (frc.repeat > 1 && !piece->repeating ()) {
                piece->set_repeat (_last_incoming_video, frc.repeat - 1);
@@ -344,15 +339,23 @@ Player::process_audio (weak_ptr<Piece> weak_piece, shared_ptr<const AudioBuffers
                return;
        }
 
-       Time time = content->position() + (content->audio_delay() * TIME_HZ / 1000) + relative_time;
+       Time time = content->position() + (content->audio_delay() * TIME_HZ / 1000) + relative_time - content->trim_start ();
        
        /* Remap channels */
        shared_ptr<AudioBuffers> dcp_mapped (new AudioBuffers (_film->audio_channels(), audio->frames()));
        dcp_mapped->make_silent ();
-       list<pair<int, libdcp::Channel> > map = content->audio_mapping().content_to_dcp ();
-       for (list<pair<int, libdcp::Channel> >::iterator i = map.begin(); i != map.end(); ++i) {
-               if (i->first < audio->channels() && i->second < dcp_mapped->channels()) {
-                       dcp_mapped->accumulate_channel (audio.get(), i->first, i->second);
+
+       AudioMapping map = content->audio_mapping ();
+       for (int i = 0; i < map.content_channels(); ++i) {
+               for (int j = 0; j < _film->audio_channels(); ++j) {
+                       if (map.get (i, static_cast<libdcp::Channel> (j)) > 0) {
+                               dcp_mapped->accumulate_channel (
+                                       audio.get(),
+                                       i,
+                                       static_cast<libdcp::Channel> (j),
+                                       map.get (i, static_cast<libdcp::Channel> (j))
+                                       );
+                       }
                }
        }
 
@@ -380,16 +383,16 @@ void
 Player::flush ()
 {
        TimedAudioBuffers<Time> tb = _audio_merger.flush ();
-       if (tb.audio) {
+       if (_audio && tb.audio) {
                Audio (tb.audio, tb.time);
                _audio_position += _film->audio_frames_to_time (tb.audio->frames ());
        }
 
-       while (_video_position < _audio_position) {
+       while (_video && _video_position < _audio_position) {
                emit_black ();
        }
 
-       while (_audio_position < _video_position) {
+       while (_audio && _audio_position < _video_position) {
                emit_silence (_film->time_to_audio_frames (_video_position - _audio_position));
        }
        
@@ -404,7 +407,6 @@ Player::seek (Time t, bool accurate)
 {
        if (!_have_valid_pieces) {
                setup_pieces ();
-               _have_valid_pieces = true;
        }
 
        if (_pieces.empty ()) {
@@ -416,20 +418,19 @@ Player::seek (Time t, bool accurate)
                if (!vc) {
                        continue;
                }
-               
+
+               /* s is the offset of t from the start position of this content */
                Time s = t - vc->position ();
                s = max (static_cast<Time> (0), s);
                s = min (vc->length_after_trim(), s);
 
+               /* Hence set the piece positions to the `global' time */
                (*i)->video_position = (*i)->audio_position = vc->position() + s;
 
-               FrameRateConversion frc (vc->video_frame_rate(), _film->video_frame_rate());
-               /* Here we are converting from time (in the DCP) to a frame number in the content.
-                  Hence we need to use the DCP's frame rate and the double/skip correction, not
-                  the source's rate.
-               */
-               VideoContent::Frame f = (s + vc->trim_start ()) * _film->video_frame_rate() / (frc.factor() * TIME_HZ);
-               dynamic_pointer_cast<VideoDecoder>((*i)->decoder)->seek (f, accurate);
+               /* And seek the decoder */
+               dynamic_pointer_cast<VideoDecoder>((*i)->decoder)->seek (
+                       vc->time_to_content_video_frames (s + vc->trim_start ()), accurate
+                       );
 
                (*i)->reset_repeat ();
        }
@@ -459,55 +460,46 @@ Player::setup_pieces ()
                if (fc) {
                        shared_ptr<FFmpegDecoder> fd (new FFmpegDecoder (_film, fc, _video, _audio));
                        
-                       fd->Video.connect (bind (&Player::process_video, this, piece, _1, _2, _3, _4, 0));
-                       fd->Audio.connect (bind (&Player::process_audio, this, piece, _1, _2));
-                       fd->Subtitle.connect (bind (&Player::process_subtitle, this, piece, _1, _2, _3, _4));
+                       fd->Video.connect (bind (&Player::process_video, this, weak_ptr<Piece> (piece), _1, _2, _3, _4, 0));
+                       fd->Audio.connect (bind (&Player::process_audio, this, weak_ptr<Piece> (piece), _1, _2));
+                       fd->Subtitle.connect (bind (&Player::process_subtitle, this, weak_ptr<Piece> (piece), _1, _2, _3, _4));
 
+                       fd->seek (fc->time_to_content_video_frames (fc->trim_start ()), true);
                        piece->decoder = fd;
                }
                
-               shared_ptr<const StillImageContent> ic = dynamic_pointer_cast<const StillImageContent> (*i);
+               shared_ptr<const ImageContent> ic = dynamic_pointer_cast<const ImageContent> (*i);
                if (ic) {
-                       shared_ptr<StillImageDecoder> id;
+                       bool reusing = false;
                        
-                       /* See if we can re-use an old StillImageDecoder */
+                       /* See if we can re-use an old ImageDecoder */
                        for (list<shared_ptr<Piece> >::const_iterator j = old_pieces.begin(); j != old_pieces.end(); ++j) {
-                               shared_ptr<StillImageDecoder> imd = dynamic_pointer_cast<StillImageDecoder> ((*j)->decoder);
+                               shared_ptr<ImageDecoder> imd = dynamic_pointer_cast<ImageDecoder> ((*j)->decoder);
                                if (imd && imd->content() == ic) {
-                                       id = imd;
+                                       piece = *j;
+                                       reusing = true;
                                }
                        }
 
-                       if (!id) {
-                               id.reset (new StillImageDecoder (_film, ic));
-                               id->Video.connect (bind (&Player::process_video, this, piece, _1, _2, _3, _4, 0));
+                       if (!reusing) {
+                               shared_ptr<ImageDecoder> id (new ImageDecoder (_film, ic));
+                               id->Video.connect (bind (&Player::process_video, this, weak_ptr<Piece> (piece), _1, _2, _3, _4, 0));
+                               piece->decoder = id;
                        }
-
-                       piece->decoder = id;
-               }
-
-               shared_ptr<const MovingImageContent> mc = dynamic_pointer_cast<const MovingImageContent> (*i);
-               if (mc) {
-                       shared_ptr<MovingImageDecoder> md;
-
-                       if (!md) {
-                               md.reset (new MovingImageDecoder (_film, mc));
-                               md->Video.connect (bind (&Player::process_video, this, piece, _1, _2, _3, _4, 0));
-                       }
-
-                       piece->decoder = md;
                }
 
                shared_ptr<const SndfileContent> sc = dynamic_pointer_cast<const SndfileContent> (*i);
                if (sc) {
                        shared_ptr<AudioDecoder> sd (new SndfileDecoder (_film, sc));
-                       sd->Audio.connect (bind (&Player::process_audio, this, piece, _1, _2));
+                       sd->Audio.connect (bind (&Player::process_audio, this, weak_ptr<Piece> (piece), _1, _2));
 
                        piece->decoder = sd;
                }
 
                _pieces.push_back (piece);
        }
+
+       _have_valid_pieces = true;
 }
 
 void
@@ -520,7 +512,8 @@ Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
 
        if (
                property == ContentProperty::POSITION || property == ContentProperty::LENGTH ||
-               property == ContentProperty::TRIM_START || property == ContentProperty::TRIM_END
+               property == ContentProperty::TRIM_START || property == ContentProperty::TRIM_END ||
+               property == VideoContentProperty::VIDEO_FRAME_TYPE 
                ) {
                
                _have_valid_pieces = false;
@@ -532,8 +525,8 @@ Player::content_changed (weak_ptr<Content> w, int property, bool frequent)
                Changed (frequent);
 
        } else if (
-               property == VideoContentProperty::VIDEO_FRAME_TYPE || property == VideoContentProperty::VIDEO_CROP ||
-               property == VideoContentProperty::VIDEO_RATIO
+               property == VideoContentProperty::VIDEO_CROP || property == VideoContentProperty::VIDEO_RATIO ||
+               property == VideoContentProperty::VIDEO_FRAME_RATE
                ) {
                
                Changed (frequent);
@@ -555,8 +548,19 @@ void
 Player::set_video_container_size (libdcp::Size s)
 {
        _video_container_size = s;
-       _black_frame.reset (new Image (PIX_FMT_RGB24, _video_container_size, true));
-       _black_frame->make_black ();
+
+       shared_ptr<Image> im (new Image (PIX_FMT_RGB24, _video_container_size, true));
+       im->make_black ();
+       
+       _black_frame.reset (
+               new PlayerImage (
+                       im,
+                       Crop(),
+                       _video_container_size,
+                       _video_container_size,
+                       Scaler::from_id ("bicubic")
+                       )
+               );
 }
 
 shared_ptr<Resampler>
@@ -570,6 +574,12 @@ Player::resampler (shared_ptr<AudioContent> c, bool create)
        if (!create) {
                return shared_ptr<Resampler> ();
        }
+
+       _film->log()->log (
+               String::compose (
+                       "Creating new resampler for %1 to %2 with %3 channels", c->content_audio_frame_rate(), c->output_audio_frame_rate(), c->audio_channels()
+                       )
+               );
        
        shared_ptr<Resampler> r (new Resampler (c->content_audio_frame_rate(), c->output_audio_frame_rate(), c->audio_channels()));
        _resamplers[c] = r;
@@ -582,7 +592,7 @@ Player::emit_black ()
 #ifdef DCPOMATIC_DEBUG
        _last_video.reset ();
 #endif
-       
+
        Video (_black_frame, EYES_BOTH, ColourConversion(), _last_emit_was_black, _video_position);
        _video_position += _film->video_frames_to_time (1);
        _last_emit_was_black = true;
@@ -610,7 +620,7 @@ Player::film_changed (Film::Property p)
           last time we were run.
        */
 
-       if (p == Film::SCALER || p == Film::WITH_SUBTITLES || p == Film::CONTAINER) {
+       if (p == Film::SCALER || p == Film::WITH_SUBTITLES || p == Film::CONTAINER || p == Film::VIDEO_FRAME_RATE) {
                Changed (false);
        }
 }
@@ -683,7 +693,7 @@ Player::update_subtitle ()
 bool
 Player::repeat_last_video ()
 {
-       if (!_last_incoming_video.image) {
+       if (!_last_incoming_video.image || !_have_valid_pieces) {
                return false;
        }
 
@@ -693,8 +703,45 @@ Player::repeat_last_video ()
                _last_incoming_video.eyes,
                _last_incoming_video.same,
                _last_incoming_video.frame,
-               0
+               _last_incoming_video.extra
                );
 
        return true;
 }
+
+PlayerImage::PlayerImage (
+       shared_ptr<const Image> in,
+       Crop crop,
+       libdcp::Size inter_size,
+       libdcp::Size out_size,
+       Scaler const * scaler
+       )
+       : _in (in)
+       , _crop (crop)
+       , _inter_size (inter_size)
+       , _out_size (out_size)
+       , _scaler (scaler)
+{
+
+}
+
+void
+PlayerImage::set_subtitle (shared_ptr<const Image> image, Position<int> pos)
+{
+       _subtitle_image = image;
+       _subtitle_position = pos;
+}
+
+shared_ptr<Image>
+PlayerImage::image ()
+{
+       shared_ptr<Image> out = _in->crop_scale_window (_crop, _inter_size, _out_size, _scaler, PIX_FMT_RGB24, false);
+
+       Position<int> const container_offset ((_out_size.width - _inter_size.width) / 2, (_out_size.height - _inter_size.width) / 2);
+
+       if (_subtitle_image) {
+               out->alpha_blend (_subtitle_image, _subtitle_position);
+       }
+
+       return out;
+}