_db.video->set_subtitle_stream (_film_a->subtitle_stream ());
_da.audio->set_audio_stream (_film_a->audio_stream ());
- _da.video->Video.connect (bind (&Combiner::process_video, _combiner, _1, _2, _3, _4));
- _db.video->Video.connect (bind (&Combiner::process_video_b, _combiner, _1, _2, _3, _4));
+ _da.video->Video.connect (bind (&Combiner::process_video, _combiner, _1, _2, _3));
+ _db.video->Video.connect (bind (&Combiner::process_video_b, _combiner, _1, _2, _3));
if (_matcher) {
_combiner->connect_video (_matcher);
_matcher->connect_video (_encoder);
} else {
- /* Remove timestamp from the output of the combiner */
- _combiner->Video.connect (bind (&Encoder::process_video, _encoder, _1, _2, _3));
+ _combiner->connect_video (_encoder);
}
if (_matcher && _delay_line) {
/** @class AudioDecoder.
* @brief Parent class for audio decoders.
*/
-class AudioDecoder : public TimedAudioSource, public virtual Decoder
+class AudioDecoder : public AudioSource, public virtual Decoder
{
public:
AudioDecoder (boost::shared_ptr<Film>, DecodeOptions);
virtual void process_audio (boost::shared_ptr<AudioBuffers>) = 0;
};
-class TimedAudioSink
-{
-public:
- /** Call with some audio data */
- virtual void process_audio (boost::shared_ptr<AudioBuffers>, double t) = 0;
-};
-
#endif
{
Audio.connect (bind (&AudioSink::process_audio, s, _1));
}
-
-void
-TimedAudioSource::connect_audio (shared_ptr<TimedAudioSink> s)
-{
- Audio.connect (bind (&TimedAudioSink::process_audio, s, _1, _2));
-}
class AudioBuffers;
class AudioSink;
-class TimedAudioSink;
/** A class that emits audio data */
class AudioSource
void connect_audio (boost::shared_ptr<AudioSink>);
};
-
-/** A class that emits audio data with timestamps */
-class TimedAudioSource
-{
-public:
- /** Emitted when some audio data is ready */
- boost::signals2::signal<void (boost::shared_ptr<AudioBuffers>, double)> Audio;
-
- void connect_audio (boost::shared_ptr<TimedAudioSink>);
-};
-
#endif
using boost::shared_ptr;
Combiner::Combiner (Log* log)
- : Processor (log)
+ : VideoProcessor (log)
{
}
* @param image Frame image.
*/
void
-Combiner::process_video (shared_ptr<Image> image, bool, shared_ptr<Subtitle>, double)
+Combiner::process_video (shared_ptr<Image> image, bool, shared_ptr<Subtitle>)
{
_image = image;
}
* @param sub Subtitle (which will be put onto the whole frame)
*/
void
-Combiner::process_video_b (shared_ptr<Image> image, bool, shared_ptr<Subtitle> sub, double t)
+Combiner::process_video_b (shared_ptr<Image> image, bool, shared_ptr<Subtitle> sub)
{
/* Copy the right half of this image into our _image */
/* XXX: this should probably be in the Image class */
}
}
- Video (_image, false, sub, t);
+ Video (_image, false, sub);
_image.reset ();
}
* one image used for the left half of the screen and the other for
* the right.
*/
-class Combiner : public Processor, public TimedVideoSink, public TimedVideoSource
+class Combiner : public VideoProcessor
{
public:
Combiner (Log* log);
- void process_video (boost::shared_ptr<Image> i, bool, boost::shared_ptr<Subtitle> s, double t);
- void process_video_b (boost::shared_ptr<Image> i, bool, boost::shared_ptr<Subtitle> s, double t);
+ void process_video (boost::shared_ptr<Image> i, bool, boost::shared_ptr<Subtitle> s);
+ void process_video_b (boost::shared_ptr<Image> i, bool, boost::shared_ptr<Subtitle> s);
private:
/** The image that we are currently working on */
* @param frames Delay in frames, +ve to move audio later.
*/
DelayLine::DelayLine (Log* log, int channels, int frames)
- : Processor (log)
+ : AudioProcessor (log)
, _negative_delay_remaining (0)
, _frames (frames)
{
}
}
-/* XXX: can we just get rid of all this and fiddle with the timestamp? */
void
-DelayLine::process_audio (shared_ptr<AudioBuffers> data, double t)
+DelayLine::process_audio (shared_ptr<AudioBuffers> data)
{
if (_buffers) {
/* We have some buffers, so we are moving the audio later */
}
}
- Audio (data, t);
+ Audio (data);
}
class AudioBuffers;
/** A delay line for audio */
-class DelayLine : public Processor, public TimedAudioSink, public TimedAudioSource
+class DelayLine : public AudioProcessor
{
public:
DelayLine (Log* log, int channels, int frames);
- void process_audio (boost::shared_ptr<AudioBuffers>, double);
+ void process_audio (boost::shared_ptr<AudioBuffers>);
private:
boost::shared_ptr<AudioBuffers> _buffers;
sf_count_t const block = _audio_stream->sample_rate() / 2;
shared_ptr<AudioBuffers> audio (new AudioBuffers (_audio_stream->channels(), block));
- sf_count_t done = 0;
while (frames > 0) {
sf_count_t const this_time = min (block, frames);
for (size_t i = 0; i < sndfiles.size(); ++i) {
}
audio->set_frames (this_time);
- Audio (audio, double(done) / _audio_stream->sample_rate());
- done += this_time;
+ Audio (audio);
frames -= this_time;
}
if (delta > one_frame) {
int const extra = rint (delta / one_frame);
for (int i = 0; i < extra; ++i) {
- /* XXX: timestamp is wrong */
- repeat_last_video (source_pts_seconds);
+ repeat_last_video ();
_film->log()->log (
String::compose (
N_("Extra video frame inserted at %1s; source frame %2, source PTS %3 (at %4 fps)"),
if (s) {
shared_ptr<AudioBuffers> audio (new AudioBuffers (ffa->channels(), s));
audio->make_silent ();
- /* XXX: this time stamp is wrong */
- Audio (audio, source_pts_seconds);
+ Audio (audio);
}
}
);
assert (_audio_codec_context->channels == _film->audio_channels());
- Audio (deinterleave_audio (_frame->data, data_size), source_pts_seconds );
+ Audio (deinterleave_audio (_frame->data, data_size));
}
}
/** @param gain gain in dB */
Gain::Gain (Log* log, float gain)
- : Processor (log)
+ : AudioProcessor (log)
, _gain (gain)
{
#include "processor.h"
-class Gain : public Processor, public AudioSink, public AudioSource
+class Gain : public AudioProcessor
{
public:
Gain (Log* log, float gain);
return true;
}
- /* XXX: timestamp is wrong */
- repeat_last_video (0);
+ repeat_last_video ();
return false;
}
using boost::shared_ptr;
Matcher::Matcher (Log* log, int sample_rate, float frames_per_second)
- : Processor (log)
+ : AudioVideoProcessor (log)
, _sample_rate (sample_rate)
, _frames_per_second (frames_per_second)
, _video_frames (0)
}
void
-Matcher::process_video (boost::shared_ptr<Image> i, bool same, boost::shared_ptr<Subtitle> s, double)
+Matcher::process_video (boost::shared_ptr<Image> i, bool same, boost::shared_ptr<Subtitle> s)
{
Video (i, same, s);
_video_frames++;
}
void
-Matcher::process_audio (boost::shared_ptr<AudioBuffers> b, double)
+Matcher::process_audio (boost::shared_ptr<AudioBuffers> b)
{
Audio (b);
_audio_frames += b->frames ();
#include "processor.h"
#include "ffmpeg_compatibility.h"
-class Matcher : public Processor, public TimedVideoSink, public TimedAudioSink, public VideoSource, public AudioSource
+class Matcher : public AudioVideoProcessor
{
public:
Matcher (Log* log, int sample_rate, float frames_per_second);
- void process_video (boost::shared_ptr<Image> i, bool, boost::shared_ptr<Subtitle> s, double t);
- void process_audio (boost::shared_ptr<AudioBuffers>, double t);
+ void process_video (boost::shared_ptr<Image> i, bool, boost::shared_ptr<Subtitle> s);
+ void process_audio (boost::shared_ptr<AudioBuffers>);
void process_end ();
private:
Log* _log; ///< log to write to
};
+/** @class AudioVideoProcessor
+ * @brief A processor which handles both video and audio data.
+ */
+class AudioVideoProcessor : public Processor, public VideoSource, public VideoSink, public AudioSource, public AudioSink
+{
+public:
+ /** Construct an AudioVideoProcessor.
+ * @param log Log to write to.
+ */
+ AudioVideoProcessor (Log* log)
+ : Processor (log)
+ {}
+};
+
+/** @class AudioProcessor
+ * @brief A processor which handles just audio data.
+ */
+class AudioProcessor : public Processor, public AudioSource, public AudioSink
+{
+public:
+ /** Construct an AudioProcessor.
+ * @param log Log to write to.
+ */
+ AudioProcessor (Log* log)
+ : Processor (log)
+ {}
+};
+
+/** @class VideoProcessor
+ * @brief A processor which handles just video data.
+ */
+class VideoProcessor : public Processor, public VideoSource, public VideoSink
+{
+public:
+ /** Construct an VideoProcessor.
+ * @param log Log to write to.
+ */
+ VideoProcessor (Log* log)
+ : Processor (log)
+ {}
+};
+
#endif
_decoders.video->connect_video (_matcher);
_matcher->connect_video (_encoder);
} else {
- /* Discard timestamps here */
- _decoders.video->Video.connect (boost::bind (&Encoder::process_video, _encoder, _1, _2, _3));
+ _decoders.video->connect_video (_encoder);
}
if (_matcher && _delay_line && _decoders.audio) {
sub = _timed_subtitle->subtitle ();
}
- signal_video (image, false, sub, t);
+ signal_video (image, false, sub);
_last_source_time = t;
}
* we will generate a black frame.
*/
void
-VideoDecoder::repeat_last_video (double t)
+VideoDecoder::repeat_last_video ()
{
if (!_last_image) {
_last_image.reset (new SimpleImage (pixel_format(), native_size(), true));
_last_image->make_black ();
}
- signal_video (_last_image, true, _last_subtitle, t);
+ signal_video (_last_image, true, _last_subtitle);
}
/** Emit our signal to say that some video data is ready.
* @param sub Subtitle for this frame, or 0.
*/
void
-VideoDecoder::signal_video (shared_ptr<Image> image, bool same, shared_ptr<Subtitle> sub, double t)
+VideoDecoder::signal_video (shared_ptr<Image> image, bool same, shared_ptr<Subtitle> sub)
{
TIMING (N_("Decoder emits %1"), _video_frame);
- Video (image, same, sub, t);
+ Video (image, same, sub);
++_video_frame;
_last_image = image;
#include "stream.h"
#include "decoder.h"
-class VideoDecoder : public TimedVideoSource, public virtual Decoder
+class VideoDecoder : public VideoSource, public virtual Decoder
{
public:
VideoDecoder (boost::shared_ptr<Film>, DecodeOptions);
void emit_video (boost::shared_ptr<Image>, double);
void emit_subtitle (boost::shared_ptr<TimedSubtitle>);
- void repeat_last_video (double t);
+ void repeat_last_video ();
/** Subtitle stream to use when decoding */
boost::shared_ptr<SubtitleStream> _subtitle_stream;
std::vector<boost::shared_ptr<SubtitleStream> > _subtitle_streams;
private:
- void signal_video (boost::shared_ptr<Image>, bool, boost::shared_ptr<Subtitle>, double);
+ void signal_video (boost::shared_ptr<Image>, bool, boost::shared_ptr<Subtitle>);
int _video_frame;
double _last_source_time;
virtual void process_video (boost::shared_ptr<Image> i, bool same, boost::shared_ptr<Subtitle> s) = 0;
};
-class TimedVideoSink
-{
-public:
- /** Call with a frame of video.
- * @param i Video frame image.
- * @param same true if i is the same as last time we were called.
- * @param s A subtitle that should be on this frame, or 0.
- * @param t Source timestamp.
- */
- virtual void process_video (boost::shared_ptr<Image> i, bool same, boost::shared_ptr<Subtitle> s, double t) = 0;
-};
-
#endif
{
Video.connect (bind (&VideoSink::process_video, s, _1, _2, _3));
}
-
-void
-TimedVideoSource::connect_video (shared_ptr<TimedVideoSink> s)
-{
- Video.connect (bind (&TimedVideoSink::process_video, s, _1, _2, _3, _4));
-}
#include "util.h"
class VideoSink;
-class TimedVideoSink;
class Subtitle;
class Image;
-/** @class VideoSource
- * @param A class that emits video data without timestamps.
+/** @class VideoSink
+ * @param A class that emits video data.
*/
class VideoSource
{
void connect_video (boost::shared_ptr<VideoSink>);
};
-/** @class TimedVideoSource
- * @param A class that emits video data with timestamps.
- */
-class TimedVideoSource
-{
-public:
-
- /** Emitted when a video frame is ready.
- * First parameter is the video image.
- * Second parameter is true if the image is the same as the last one that was emitted.
- * Third parameter is either 0 or a subtitle that should be on this frame.
- * Fourth parameter is the source timestamp of this frame.
- */
- boost::signals2::signal<void (boost::shared_ptr<Image>, bool, boost::shared_ptr<Subtitle>, double)> Video;
-
- void connect_video (boost::shared_ptr<TimedVideoSink>);
-};
-
#endif
}
/* This only works because the delay line modifies the parameter */
- /* XXX: timestamp is wrong */
- d.process_audio (data, 0);
+ d.process_audio (data);
returned += data->frames ();
for (int j = 0; j < data->frames(); ++j) {
}
/* This only works because the delay line modifies the parameter */
- /* XXX: timestamp is wrong */
- d.process_audio (data, 0);
+ d.process_audio (data);
returned += data->frames ();
for (int j = 0; j < data->frames(); ++j) {