Fix incorrect reel lengths in some cases; account for emitted data being rejected...
authorCarl Hetherington <cth@carlh.net>
Tue, 29 Aug 2017 23:59:26 +0000 (00:59 +0100)
committerCarl Hetherington <cth@carlh.net>
Tue, 29 Aug 2017 23:59:26 +0000 (00:59 +0100)
src/lib/audio_decoder.cc
src/lib/audio_decoder.h
src/lib/dcp_encoder.cc
src/lib/player.cc
src/lib/player.h
src/lib/reel_writer.cc
src/lib/video_decoder.cc
src/lib/video_decoder.h

index 5425798f6b615a9ec10a3de15a8200b97dcb0219..16a03a8e93593cdc5879f1e33f72c93742660ffb 100644 (file)
@@ -94,8 +94,7 @@ AudioDecoder::emit (AudioStreamPtr stream, shared_ptr<const AudioBuffers> data,
                data = ro;
        }
 
-       Data (stream, ContentAudio (data, _positions[stream]));
-       _positions[stream] += data->frames();
+       _positions[stream] += Data(stream, ContentAudio (data, _positions[stream])).get_value_or(0);
 }
 
 /** @return Time just after the last thing that was emitted from a given stream */
index 19d10354351d0bef14461961b3758b56812fbd87..359540d6ffd1861343793f8f2d71f146b451ba70 100644 (file)
@@ -53,7 +53,8 @@ public:
 
        ContentTime stream_position (AudioStreamPtr stream) const;
 
-       boost::signals2::signal<void (AudioStreamPtr, ContentAudio)> Data;
+       /** @return Number of frames of data that were accepted */
+       boost::signals2::signal<Frame (AudioStreamPtr, ContentAudio)> Data;
 
 private:
        void silence (int milliseconds);
index 5ee6cca982a596b5bc1728c5390bf2c5409a28f1..81464c8dc9c7c43f9f810b3b32bf967bac52bed1 100644 (file)
@@ -121,6 +121,7 @@ DCPEncoder::video (shared_ptr<PlayerVideo> data, DCPTime time)
        _j2k_encoder->encode (data, time);
 }
 
+/** The audio data passed into this method must be contiguous and start from the last accurate seek time */
 void
 DCPEncoder::audio (shared_ptr<AudioBuffers> data, DCPTime time)
 {
index 7e21ef937b82a98f5ff3198ac07c2ea0a469cbfb..7d3f381ed64b221de7b2cbb795ae6097961182ea 100644 (file)
@@ -594,14 +594,15 @@ Player::pass ()
        list<pair<shared_ptr<AudioBuffers>, DCPTime> > audio = _audio_merger.pull (pull_to);
        for (list<pair<shared_ptr<AudioBuffers>, DCPTime> >::iterator i = audio.begin(); i != audio.end(); ++i) {
                if (_last_audio_time && i->second < *_last_audio_time) {
-                       /* There has been an accurate seek and we have received some audio before the seek time;
-                          discard it.
-                       */
+                       /* This new data comes before the last we emitted (or the last seek); discard it */
                        pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (i->first, i->second, *_last_audio_time);
                        if (!cut.first) {
                                continue;
                        }
                        *i = cut;
+               } else if (_last_audio_time && i->second > *_last_audio_time) {
+                       /* There's a gap between this data and the last we emitted; fill with silence */
+                       fill_audio (DCPTimePeriod (*_last_audio_time, i->second));
                }
 
                emit_audio (i->first, i->second);
@@ -635,17 +636,17 @@ Player::subtitles_for_frame (DCPTime time) const
        return merge (subtitles);
 }
 
-void
+bool
 Player::video (weak_ptr<Piece> wp, ContentVideo video)
 {
        shared_ptr<Piece> piece = wp.lock ();
        if (!piece) {
-               return;
+               return false;
        }
 
        FrameRateChange frc(piece->content->active_video_frame_rate(), _film->video_frame_rate());
        if (frc.skip && (video.frame % 2) == 1) {
-               return;
+               return false;
        }
 
        /* Time of the first frame we will emit */
@@ -656,7 +657,7 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                time < piece->content->position() ||
                time >= piece->content->end() ||
                (_last_video_time && time < *_last_video_time)) {
-               return;
+               return false;
        }
 
        /* Fill gaps that we discover now that we have some video which needs to be emitted */
@@ -694,16 +695,18 @@ Player::video (weak_ptr<Piece> wp, ContentVideo video)
                emit_video (_last_video[wp], t);
                t += one_video_frame ();
        }
+
+       return true;
 }
 
-void
+Frame
 Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_audio)
 {
        DCPOMATIC_ASSERT (content_audio.audio->frames() > 0);
 
        shared_ptr<Piece> piece = wp.lock ();
        if (!piece) {
-               return;
+               return 0;
        }
 
        shared_ptr<AudioContent> content = piece->content->audio;
@@ -719,17 +722,17 @@ Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_a
                pair<shared_ptr<AudioBuffers>, DCPTime> cut = discard_audio (content_audio.audio, time, piece->content->position());
                if (!cut.first) {
                        /* This audio is entirely discarded */
-                       return;
+                       return 0;
                }
                content_audio.audio = cut.first;
                time = cut.second;
        } else if (time > piece->content->end()) {
                /* Discard it all */
-               return;
+               return 0;
        } else if (end > piece->content->end()) {
                Frame const remaining_frames = DCPTime(piece->content->end() - time).frames_round(_film->audio_frame_rate());
                if (remaining_frames == 0) {
-                       return;
+                       return 0;
                }
                shared_ptr<AudioBuffers> cut (new AudioBuffers (content_audio.audio->channels(), remaining_frames));
                cut->copy_from (content_audio.audio.get(), remaining_frames, 0, 0);
@@ -761,6 +764,7 @@ Player::audio (weak_ptr<Piece> wp, AudioStreamPtr stream, ContentAudio content_a
        _audio_merger.push (content_audio.audio, time);
        DCPOMATIC_ASSERT (_stream_states.find (stream) != _stream_states.end ());
        _stream_states[stream].last_push_end = time + DCPTime::from_frames (content_audio.audio->frames(), _film->audio_frame_rate());
+       return content_audio.audio->frames();
 }
 
 void
index 9dd5afd26ae3114e66b5a2121936702c5ab97014..ed55b62849eec0a763811fa10bf20a5ed2c496b4 100644 (file)
@@ -105,8 +105,8 @@ private:
        ContentTime dcp_to_content_time (boost::shared_ptr<const Piece> piece, DCPTime t) const;
        DCPTime content_time_to_dcp (boost::shared_ptr<const Piece> piece, ContentTime t) const;
        boost::shared_ptr<PlayerVideo> black_player_video_frame () const;
-       void video (boost::weak_ptr<Piece>, ContentVideo);
-       void audio (boost::weak_ptr<Piece>, AudioStreamPtr, ContentAudio);
+       bool video (boost::weak_ptr<Piece>, ContentVideo);
+       Frame audio (boost::weak_ptr<Piece>, AudioStreamPtr, ContentAudio);
        void image_subtitle_start (boost::weak_ptr<Piece>, ContentImageSubtitle);
        void text_subtitle_start (boost::weak_ptr<Piece>, ContentTextSubtitle);
        void subtitle_stop (boost::weak_ptr<Piece>, ContentTime);
index 118a5b74ca41a9cc9d501e0facf136c775c93461..fde977c3a97838e4247c603ec80de9eabe097c3c 100644 (file)
@@ -364,11 +364,13 @@ ReelWriter::create_reel (list<ReferencedReelAsset> const & refs, list<shared_ptr
 
        LOG_GENERAL ("create_reel for %1-%2; %3 of %4", _period.from.get(), _period.to.get(), _reel_index, _reel_count);
 
+       Frame const period_duration = _period.duration().frames_round(_film->video_frame_rate());
+
        DCPOMATIC_ASSERT (reel_picture_asset);
-       if (reel_picture_asset->duration() != _period.duration().frames_round (_film->video_frame_rate ())) {
+       if (reel_picture_asset->duration() != period_duration) {
                throw ProgrammingError (
                        __FILE__, __LINE__,
-                       String::compose ("%1 vs %2", reel_picture_asset->duration(), _period.duration().frames_round (_film->video_frame_rate ()))
+                       String::compose ("%1 vs %2", reel_picture_asset->duration(), period_duration)
                        );
        }
        reel->add (reel_picture_asset);
@@ -398,13 +400,19 @@ ReelWriter::create_reel (list<ReferencedReelAsset> const & refs, list<shared_ptr
        }
 
        DCPOMATIC_ASSERT (reel_sound_asset);
-       if (reel_sound_asset->duration() != _period.duration().frames_round (_film->video_frame_rate ())) {
+       if (reel_sound_asset->duration() != period_duration) {
                LOG_ERROR (
                        "Reel sound asset has length %1 but reel period is %2",
                        reel_sound_asset->duration(),
-                       _period.duration().frames_round(_film->video_frame_rate())
+                       period_duration
                        );
-               DCPOMATIC_ASSERT (reel_sound_asset->duration() == _period.duration().frames_round (_film->video_frame_rate ()));
+               if (reel_sound_asset->duration() != period_duration) {
+                       throw ProgrammingError (
+                               __FILE__, __LINE__,
+                               String::compose ("%1 vs %2", reel_sound_asset->duration(), period_duration)
+                               );
+               }
+
        }
        reel->add (reel_sound_asset);
 
@@ -473,7 +481,7 @@ ReelWriter::create_reel (list<ReferencedReelAsset> const & refs, list<shared_ptr
        }
 
        if (reel_subtitle_asset) {
-               DCPOMATIC_ASSERT (reel_subtitle_asset->duration() == _period.duration().frames_round (_film->video_frame_rate ()));
+               DCPOMATIC_ASSERT (reel_subtitle_asset->duration() == period_duration);
                reel->add (reel_subtitle_asset);
        }
 
index afd4a83eeb12c7219b0049a7b892a4d2b69b10f6..dceadcd15f1d42392e7a55b5cfe97a4b7f121f24 100644 (file)
@@ -59,9 +59,11 @@ VideoDecoder::emit (shared_ptr<const ImageProxy> image, Frame frame)
                return;
        }
 
+       optional<bool> taken;
+
        switch (_content->video->frame_type ()) {
        case VIDEO_FRAME_TYPE_2D:
-               Data (ContentVideo (image, frame, EYES_BOTH, PART_WHOLE));
+               taken = Data (ContentVideo (image, frame, EYES_BOTH, PART_WHOLE));
                break;
        case VIDEO_FRAME_TYPE_3D:
        {
@@ -69,31 +71,33 @@ VideoDecoder::emit (shared_ptr<const ImageProxy> image, Frame frame)
                   frame this one is.
                */
                bool const same = (_last_emitted && _last_emitted.get() == frame);
-               Data (ContentVideo (image, frame, same ? EYES_RIGHT : EYES_LEFT, PART_WHOLE));
+               taken = Data (ContentVideo (image, frame, same ? EYES_RIGHT : EYES_LEFT, PART_WHOLE));
                _last_emitted = frame;
                break;
        }
        case VIDEO_FRAME_TYPE_3D_ALTERNATE:
-               Data (ContentVideo (image, frame / 2, (frame % 2) ? EYES_RIGHT : EYES_LEFT, PART_WHOLE));
+               taken = Data (ContentVideo (image, frame / 2, (frame % 2) ? EYES_RIGHT : EYES_LEFT, PART_WHOLE));
                frame /= 2;
                break;
        case VIDEO_FRAME_TYPE_3D_LEFT_RIGHT:
-               Data (ContentVideo (image, frame, EYES_LEFT, PART_LEFT_HALF));
-               Data (ContentVideo (image, frame, EYES_RIGHT, PART_RIGHT_HALF));
+               taken = Data (ContentVideo (image, frame, EYES_LEFT, PART_LEFT_HALF));
+               taken = Data (ContentVideo (image, frame, EYES_RIGHT, PART_RIGHT_HALF));
                break;
        case VIDEO_FRAME_TYPE_3D_TOP_BOTTOM:
-               Data (ContentVideo (image, frame, EYES_LEFT, PART_TOP_HALF));
-               Data (ContentVideo (image, frame, EYES_RIGHT, PART_BOTTOM_HALF));
+               taken = Data (ContentVideo (image, frame, EYES_LEFT, PART_TOP_HALF));
+               taken = Data (ContentVideo (image, frame, EYES_RIGHT, PART_BOTTOM_HALF));
                break;
        case VIDEO_FRAME_TYPE_3D_LEFT:
-               Data (ContentVideo (image, frame, EYES_LEFT, PART_WHOLE));
+               taken = Data (ContentVideo (image, frame, EYES_LEFT, PART_WHOLE));
                break;
        case VIDEO_FRAME_TYPE_3D_RIGHT:
-               Data (ContentVideo (image, frame, EYES_RIGHT, PART_WHOLE));
+               taken = Data (ContentVideo (image, frame, EYES_RIGHT, PART_WHOLE));
                break;
        default:
                DCPOMATIC_ASSERT (false);
        }
 
-       _position = ContentTime::from_frames (frame, _content->active_video_frame_rate ());
+       if (taken.get_value_or(false)) {
+               _position = ContentTime::from_frames (frame, _content->active_video_frame_rate ());
+       }
 }
index e16884568555a72b7c384ffa3a29376ed9681910..8b199bd1ddfb27836ddd07380e0387f5f44156e2 100644 (file)
@@ -57,7 +57,8 @@ public:
 
        void emit (boost::shared_ptr<const ImageProxy>, Frame frame);
 
-       boost::signals2::signal<void (ContentVideo)> Data;
+       /** @return true if the emitted data was accepted, false if not */
+       boost::signals2::signal<bool (ContentVideo)> Data;
 
 private:
        boost::shared_ptr<const Content> _content;