#include "log.h"
#include "image.h"
#include "cross.h"
-#include "butler.h"
#include "compose.hpp"
#include <iostream>
using boost::shared_ptr;
using boost::bind;
using boost::weak_ptr;
+#if BOOST_VERSION >= 106100
+using namespace boost::placeholders;
+#endif
int FFmpegFileEncoder::_video_stream_index = 0;
int FFmpegFileEncoder::_audio_stream_index = 1;
-static AVPixelFormat
-force_pixel_format (AVPixelFormat, AVPixelFormat out)
-{
- return out;
-}
-
FFmpegFileEncoder::FFmpegFileEncoder (
dcp::Size video_frame_size,
int video_frame_rate,
int audio_frame_rate,
int channels,
- shared_ptr<Log> log,
ExportFormat format,
int x264_crf,
boost::filesystem::path output
, _video_frame_size (video_frame_size)
, _video_frame_rate (video_frame_rate)
, _audio_frame_rate (audio_frame_rate)
- , _log (log)
{
+ _pixel_format = pixel_format (format);
+
switch (format) {
case EXPORT_FORMAT_PRORES:
- _pixel_format = AV_PIX_FMT_YUV422P10;
_sample_format = AV_SAMPLE_FMT_S16;
_video_codec_name = "prores_ks";
_audio_codec_name = "pcm_s16le";
av_dict_set (&_video_options, "threads", "auto", 0);
break;
case EXPORT_FORMAT_H264:
- _pixel_format = AV_PIX_FMT_YUV420P;
_sample_format = AV_SAMPLE_FMT_FLTP;
_video_codec_name = "libx264";
_audio_codec_name = "aac";
setup_video ();
setup_audio ();
- avformat_alloc_output_context2 (&_format_context, 0, 0, _output.string().c_str());
+ int r = avformat_alloc_output_context2 (&_format_context, 0, 0, _output.string().c_str());
if (!_format_context) {
- throw runtime_error ("could not allocate FFmpeg format context");
+ throw runtime_error (String::compose("could not allocate FFmpeg format context (%1)", r));
}
_video_stream = avformat_new_stream (_format_context, _video_codec);
throw runtime_error ("could not open FFmpeg video codec");
}
- int r = avcodec_open2 (_audio_codec_context, _audio_codec, 0);
+ r = avcodec_open2 (_audio_codec_context, _audio_codec, 0);
if (r < 0) {
char buffer[256];
av_strerror (r, buffer, sizeof(buffer));
_pending_audio.reset (new AudioBuffers(channels, 0));
}
+AVPixelFormat
+FFmpegFileEncoder::pixel_format (ExportFormat format)
+{
+ switch (format) {
+ case EXPORT_FORMAT_PRORES:
+ return AV_PIX_FMT_YUV422P10;
+ case EXPORT_FORMAT_H264:
+ return AV_PIX_FMT_YUV420P;
+ default:
+ DCPOMATIC_ASSERT (false);
+ }
+
+ return AV_PIX_FMT_YUV422P10;
+}
+
void
FFmpegFileEncoder::setup_video ()
{
FFmpegFileEncoder::video (shared_ptr<PlayerVideo> video, DCPTime time)
{
shared_ptr<Image> image = video->image (
- boost::optional<dcp::NoteHandler>(bind(&Log::dcp_log, _log.get(), _1, _2)),
- bind (&force_pixel_format, _1, _pixel_format),
+ bind (&PlayerVideo::force, _1, _pixel_format),
true,
false
);
AVFrame* frame = av_frame_alloc ();
DCPOMATIC_ASSERT (frame);
- _pending_images[image->data()[0]] = image;
+ {
+ boost::mutex::scoped_lock lm (_pending_images_mutex);
+ _pending_images[image->data()[0]] = image;
+ }
+
for (int i = 0; i < 3; ++i) {
AVBufferRef* buffer = av_buffer_create(image->data()[i], image->stride()[i] * image->size().height, &buffer_free, this, 0);
frame->buf[i] = av_buffer_ref (buffer);
frame->width = image->size().width;
frame->height = image->size().height;
frame->format = _pixel_format;
- frame->pts = time.seconds() / av_q2d (_video_stream->time_base);
+ DCPOMATIC_ASSERT (_video_stream->time_base.num == 1);
+ frame->pts = time.get() * _video_stream->time_base.den / DCPTime::HZ;
AVPacket packet;
av_init_packet (&packet);
void
FFmpegFileEncoder::buffer_free2 (uint8_t* data)
{
- _pending_images.erase (data);
+ boost::mutex::scoped_lock lm (_pending_images_mutex);
+ if (_pending_images.find(data) != _pending_images.end()) {
+ _pending_images.erase (data);
+ }
}