Make terminate_threads() less likely to leave _threads containing invalid pointers.
[dcpomatic.git] / src / lib / ffmpeg_file_encoder.cc
index 97a342e29a59733ec62f7b9d21708cd77e4a55ee..294b031621b91076606bc716b273096ade261d61 100644 (file)
@@ -26,7 +26,6 @@
 #include "log.h"
 #include "image.h"
 #include "cross.h"
-#include "butler.h"
 #include "compose.hpp"
 #include <iostream>
 
@@ -43,18 +42,11 @@ using boost::weak_ptr;
 int FFmpegFileEncoder::_video_stream_index = 0;
 int FFmpegFileEncoder::_audio_stream_index = 1;
 
-static AVPixelFormat
-force_pixel_format (AVPixelFormat, AVPixelFormat out)
-{
-       return out;
-}
-
 FFmpegFileEncoder::FFmpegFileEncoder (
        dcp::Size video_frame_size,
        int video_frame_rate,
        int audio_frame_rate,
        int channels,
-       shared_ptr<Log> log,
        ExportFormat format,
        int x264_crf,
        boost::filesystem::path output
@@ -65,11 +57,11 @@ FFmpegFileEncoder::FFmpegFileEncoder (
        , _video_frame_size (video_frame_size)
        , _video_frame_rate (video_frame_rate)
        , _audio_frame_rate (audio_frame_rate)
-       , _log (log)
 {
+       _pixel_format = pixel_format (format);
+
        switch (format) {
        case EXPORT_FORMAT_PRORES:
-               _pixel_format = AV_PIX_FMT_YUV422P10;
                _sample_format = AV_SAMPLE_FMT_S16;
                _video_codec_name = "prores_ks";
                _audio_codec_name = "pcm_s16le";
@@ -77,7 +69,6 @@ FFmpegFileEncoder::FFmpegFileEncoder (
                av_dict_set (&_video_options, "threads", "auto", 0);
                break;
        case EXPORT_FORMAT_H264:
-               _pixel_format = AV_PIX_FMT_YUV420P;
                _sample_format = AV_SAMPLE_FMT_FLTP;
                _video_codec_name = "libx264";
                _audio_codec_name = "aac";
@@ -88,9 +79,9 @@ FFmpegFileEncoder::FFmpegFileEncoder (
        setup_video ();
        setup_audio ();
 
-       avformat_alloc_output_context2 (&_format_context, 0, 0, _output.string().c_str());
+       int r = avformat_alloc_output_context2 (&_format_context, 0, 0, _output.string().c_str());
        if (!_format_context) {
-               throw runtime_error ("could not allocate FFmpeg format context");
+               throw runtime_error (String::compose("could not allocate FFmpeg format context (%1)", r));
        }
 
        _video_stream = avformat_new_stream (_format_context, _video_codec);
@@ -113,7 +104,7 @@ FFmpegFileEncoder::FFmpegFileEncoder (
                throw runtime_error ("could not open FFmpeg video codec");
        }
 
-       int r = avcodec_open2 (_audio_codec_context, _audio_codec, 0);
+       r = avcodec_open2 (_audio_codec_context, _audio_codec, 0);
        if (r < 0) {
                char buffer[256];
                av_strerror (r, buffer, sizeof(buffer));
@@ -131,6 +122,21 @@ FFmpegFileEncoder::FFmpegFileEncoder (
        _pending_audio.reset (new AudioBuffers(channels, 0));
 }
 
+AVPixelFormat
+FFmpegFileEncoder::pixel_format (ExportFormat format)
+{
+       switch (format) {
+       case EXPORT_FORMAT_PRORES:
+               return AV_PIX_FMT_YUV422P10;
+       case EXPORT_FORMAT_H264:
+               return AV_PIX_FMT_YUV420P;
+       default:
+               DCPOMATIC_ASSERT (false);
+       }
+
+       return AV_PIX_FMT_YUV422P10;
+}
+
 void
 FFmpegFileEncoder::setup_video ()
 {
@@ -230,8 +236,7 @@ void
 FFmpegFileEncoder::video (shared_ptr<PlayerVideo> video, DCPTime time)
 {
        shared_ptr<Image> image = video->image (
-               bind (&Log::dcp_log, _log.get(), _1, _2),
-               bind (&force_pixel_format, _1, _pixel_format),
+               bind (&PlayerVideo::force, _1, _pixel_format),
                true,
                false
                );
@@ -239,7 +244,11 @@ FFmpegFileEncoder::video (shared_ptr<PlayerVideo> video, DCPTime time)
        AVFrame* frame = av_frame_alloc ();
        DCPOMATIC_ASSERT (frame);
 
-       _pending_images[image->data()[0]] = image;
+       {
+               boost::mutex::scoped_lock lm (_pending_images_mutex);
+               _pending_images[image->data()[0]] = image;
+       }
+
        for (int i = 0; i < 3; ++i) {
                AVBufferRef* buffer = av_buffer_create(image->data()[i], image->stride()[i] * image->size().height, &buffer_free, this, 0);
                frame->buf[i] = av_buffer_ref (buffer);
@@ -251,7 +260,8 @@ FFmpegFileEncoder::video (shared_ptr<PlayerVideo> video, DCPTime time)
        frame->width = image->size().width;
        frame->height = image->size().height;
        frame->format = _pixel_format;
-       frame->pts = time.seconds() / av_q2d (_video_stream->time_base);
+       DCPOMATIC_ASSERT (_video_stream->time_base.num == 1);
+       frame->pts = time.get() * _video_stream->time_base.den / DCPTime::HZ;
 
        AVPacket packet;
        av_init_packet (&packet);
@@ -373,5 +383,8 @@ FFmpegFileEncoder::buffer_free (void* opaque, uint8_t* data)
 void
 FFmpegFileEncoder::buffer_free2 (uint8_t* data)
 {
-       _pending_images.erase (data);
+       boost::mutex::scoped_lock lm (_pending_images_mutex);
+       if (_pending_images.find(data) != _pending_images.end()) {
+               _pending_images.erase (data);
+       }
 }