Rename SafeStringStream -> locked_stringstream. Bump deps for removal of stringstream.
[dcpomatic.git] / src / lib / ffmpeg_content.cc
index a186db48eaecff51d6f0f1b6fa59f508378ab5ed..60c777b3c3d9f6107386525fa211630c14906afc 100644 (file)
@@ -1,28 +1,26 @@
 /*
-    Copyright (C) 2013-2014 Carl Hetherington <cth@carlh.net>
+    Copyright (C) 2013-2016 Carl Hetherington <cth@carlh.net>
 
-    This program is free software; you can redistribute it and/or modify
+    This file is part of DCP-o-matic.
+
+    DCP-o-matic is free software; you can redistribute it and/or modify
     it under the terms of the GNU General Public License as published by
     the Free Software Foundation; either version 2 of the License, or
     (at your option) any later version.
 
-    This program is distributed in the hope that it will be useful,
+    DCP-o-matic is distributed in the hope that it will be useful,
     but WITHOUT ANY WARRANTY; without even the implied warranty of
     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     GNU General Public License for more details.
 
     You should have received a copy of the GNU General Public License
-    along with this program; if not, write to the Free Software
-    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+    along with DCP-o-matic.  If not, see <http://www.gnu.org/licenses/>.
 
 */
 
-extern "C" {
-#include <libavformat/avformat.h>
-}
-#include <libcxml/cxml.h>
-#include <dcp/raw_convert.h>
 #include "ffmpeg_content.h"
+#include "video_content.h"
+#include "audio_content.h"
 #include "ffmpeg_examiner.h"
 #include "ffmpeg_subtitle_stream.h"
 #include "ffmpeg_audio_stream.h"
@@ -34,45 +32,53 @@ extern "C" {
 #include "log.h"
 #include "exceptions.h"
 #include "frame_rate_change.h"
-#include "safe_stringstream.h"
+#include "raw_convert.h"
+#include "subtitle_content.h"
+#include <locked_sstream.h>
+#include <libcxml/cxml.h>
+extern "C" {
+#include <libavformat/avformat.h>
+#include <libavutil/pixdesc.h>
+}
+#include <libxml++/libxml++.h>
+#include <boost/foreach.hpp>
+#include <iostream>
 
 #include "i18n.h"
 
-#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
+#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
 
 using std::string;
 using std::vector;
 using std::list;
 using std::cout;
 using std::pair;
+using std::make_pair;
+using std::max;
 using boost::shared_ptr;
 using boost::dynamic_pointer_cast;
-using dcp::raw_convert;
+using boost::optional;
 
 int const FFmpegContentProperty::SUBTITLE_STREAMS = 100;
 int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
-int const FFmpegContentProperty::AUDIO_STREAMS = 102;
-int const FFmpegContentProperty::AUDIO_STREAM = 103;
-int const FFmpegContentProperty::FILTERS = 104;
-
-FFmpegContent::FFmpegContent (shared_ptr<const Film> f, boost::filesystem::path p)
-       : Content (f, p)
-       , VideoContent (f, p)
-       , AudioContent (f, p)
-       , SubtitleContent (f, p)
+int const FFmpegContentProperty::FILTERS = 102;
+
+FFmpegContent::FFmpegContent (shared_ptr<const Film> film, boost::filesystem::path p)
+       : Content (film, p)
 {
 
 }
 
-FFmpegContent::FFmpegContent (shared_ptr<const Film> f, cxml::ConstNodePtr node, int version, list<string>& notes)
-       : Content (f, node)
-       , VideoContent (f, node, version)
-       , AudioContent (f, node)
-       , SubtitleContent (f, node, version)
+FFmpegContent::FFmpegContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version, list<string>& notes)
+       : Content (film, node)
 {
+       video = VideoContent::from_xml (this, node, version);
+       audio = AudioContent::from_xml (this, node, version);
+       subtitle = SubtitleContent::from_xml (this, node, version);
+
        list<cxml::NodePtr> c = node->node_children ("SubtitleStream");
        for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
-               _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (*i)));
+               _subtitle_streams.push_back (shared_ptr<FFmpegSubtitleStream> (new FFmpegSubtitleStream (*i, version)));
                if ((*i)->optional_number_child<int> ("Selected")) {
                        _subtitle_stream = _subtitle_streams.back ();
                }
@@ -80,9 +86,11 @@ FFmpegContent::FFmpegContent (shared_ptr<const Film> f, cxml::ConstNodePtr node,
 
        c = node->node_children ("AudioStream");
        for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
-               _audio_streams.push_back (shared_ptr<FFmpegAudioStream> (new FFmpegAudioStream (*i, version)));
-               if ((*i)->optional_number_child<int> ("Selected")) {
-                       _audio_stream = _audio_streams.back ();
+               shared_ptr<FFmpegAudioStream> as (new FFmpegAudioStream (*i, version));
+               audio->add_stream (as);
+               if (version < 11 && !(*i)->optional_node_child ("Selected")) {
+                       /* This is an old file and this stream is not selected, so un-map it */
+                       as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS));
                }
        }
 
@@ -96,34 +104,80 @@ FFmpegContent::FFmpegContent (shared_ptr<const Film> f, cxml::ConstNodePtr node,
                }
        }
 
-       _first_video = node->optional_number_child<double> ("FirstVideo");
+       optional<ContentTime::Type> const f = node->optional_number_child<ContentTime::Type> ("FirstVideo");
+       if (f) {
+               _first_video = ContentTime (f.get ());
+       }
+
+       _color_range = static_cast<AVColorRange> (node->optional_number_child<int>("ColorRange").get_value_or (AVCOL_RANGE_UNSPECIFIED));
+       _color_primaries = static_cast<AVColorPrimaries> (node->optional_number_child<int>("ColorPrimaries").get_value_or (AVCOL_PRI_UNSPECIFIED));
+       _color_trc = static_cast<AVColorTransferCharacteristic> (
+               node->optional_number_child<int>("ColorTransferCharacteristic").get_value_or (AVCOL_TRC_UNSPECIFIED)
+               );
+       _colorspace = static_cast<AVColorSpace> (node->optional_number_child<int>("Colorspace").get_value_or (AVCOL_SPC_UNSPECIFIED));
+       _bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
+
 }
 
-FFmpegContent::FFmpegContent (shared_ptr<const Film> f, vector<boost::shared_ptr<Content> > c)
-       : Content (f, c)
-       , VideoContent (f, c)
-       , AudioContent (f, c)
-       , SubtitleContent (f, c)
+FFmpegContent::FFmpegContent (shared_ptr<const Film> film, vector<shared_ptr<Content> > c)
+       : Content (film, c)
 {
+       vector<shared_ptr<Content> >::const_iterator i = c.begin ();
+
+       bool need_video = false;
+       bool need_audio = false;
+       bool need_subtitle = false;
+
+       if (i != c.end ()) {
+               need_video = static_cast<bool> ((*i)->video);
+               need_audio = static_cast<bool> ((*i)->audio);
+               need_subtitle = static_cast<bool> ((*i)->subtitle);
+       }
+
+       while (i != c.end ()) {
+               if (need_video != static_cast<bool> ((*i)->video)) {
+                       throw JoinError (_("Content to be joined must all have or not have video"));
+               }
+               if (need_audio != static_cast<bool> ((*i)->audio)) {
+                       throw JoinError (_("Content to be joined must all have or not have audio"));
+               }
+               if (need_subtitle != static_cast<bool> ((*i)->subtitle)) {
+                       throw JoinError (_("Content to be joined must all have or not have subtitles"));
+               }
+               ++i;
+       }
+
+       if (need_video) {
+               video.reset (new VideoContent (this, c));
+       }
+       if (need_audio) {
+               audio.reset (new AudioContent (this, c));
+       }
+       if (need_subtitle) {
+               subtitle.reset (new SubtitleContent (this, c));
+       }
+
        shared_ptr<FFmpegContent> ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
-       assert (ref);
+       DCPOMATIC_ASSERT (ref);
 
        for (size_t i = 0; i < c.size(); ++i) {
                shared_ptr<FFmpegContent> fc = dynamic_pointer_cast<FFmpegContent> (c[i]);
-               if (fc->use_subtitles() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
+               if (fc->subtitle && fc->subtitle->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
                        throw JoinError (_("Content to be joined must use the same subtitle stream."));
                }
-
-               if (*(fc->_audio_stream.get()) != *(ref->_audio_stream.get())) {
-                       throw JoinError (_("Content to be joined must use the same audio stream."));
-               }
        }
 
+       /* XXX: should probably check that more of the stuff below is the same in *this and ref */
+
        _subtitle_streams = ref->subtitle_streams ();
        _subtitle_stream = ref->subtitle_stream ();
-       _audio_streams = ref->audio_streams ();
-       _audio_stream = ref->audio_stream ();
        _first_video = ref->_first_video;
+       _filters = ref->_filters;
+       _color_range = ref->_color_range;
+       _color_primaries = ref->_color_primaries;
+       _color_trc = ref->_color_trc;
+       _colorspace = ref->_colorspace;
+       _bits_per_pixel = ref->_bits_per_pixel;
 }
 
 void
@@ -131,9 +185,24 @@ FFmpegContent::as_xml (xmlpp::Node* node) const
 {
        node->add_child("Type")->add_child_text ("FFmpeg");
        Content::as_xml (node);
-       VideoContent::as_xml (node);
-       AudioContent::as_xml (node);
-       SubtitleContent::as_xml (node);
+
+       if (video) {
+               video->as_xml (node);
+       }
+
+       if (audio) {
+               audio->as_xml (node);
+
+               BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
+                       shared_ptr<FFmpegAudioStream> f = dynamic_pointer_cast<FFmpegAudioStream> (i);
+                       DCPOMATIC_ASSERT (f);
+                       f->as_xml (node->add_child("AudioStream"));
+               }
+       }
+
+       if (subtitle) {
+               subtitle->as_xml (node);
+       }
 
        boost::mutex::scoped_lock lm (_mutex);
 
@@ -145,14 +214,6 @@ FFmpegContent::as_xml (xmlpp::Node* node) const
                (*i)->as_xml (t);
        }
 
-       for (vector<shared_ptr<FFmpegAudioStream> >::const_iterator i = _audio_streams.begin(); i != _audio_streams.end(); ++i) {
-               xmlpp::Node* t = node->add_child("AudioStream");
-               if (_audio_stream && *i == _audio_stream) {
-                       t->add_child("Selected")->add_child_text("1");
-               }
-               (*i)->as_xml (t);
-       }
-
        for (vector<Filter const *>::const_iterator i = _filters.begin(); i != _filters.end(); ++i) {
                node->add_child("Filter")->add_child_text ((*i)->id ());
        }
@@ -160,57 +221,94 @@ FFmpegContent::as_xml (xmlpp::Node* node) const
        if (_first_video) {
                node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
        }
+
+       node->add_child("ColorRange")->add_child_text (raw_convert<string> (_color_range));
+       node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (_color_primaries));
+       node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (_color_trc));
+       node->add_child("Colorspace")->add_child_text (raw_convert<string> (_colorspace));
+       if (_bits_per_pixel) {
+               node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (_bits_per_pixel.get ()));
+       }
 }
 
 void
-FFmpegContent::examine (shared_ptr<Job> job, bool calculate_digest)
+FFmpegContent::examine (shared_ptr<Job> job)
 {
        job->set_progress_unknown ();
 
-       Content::examine (job, calculate_digest);
+       Content::examine (job);
+
+       shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this (), job));
 
-       shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this ()));
-       take_from_video_examiner (examiner);
+       if (examiner->has_video ()) {
+               video.reset (new VideoContent (this));
+               video->take_from_examiner (examiner);
+               set_default_colour_conversion ();
+       }
 
-       shared_ptr<const Film> film = _film.lock ();
-       assert (film);
+       boost::filesystem::path first_path = path (0);
 
        {
                boost::mutex::scoped_lock lm (_mutex);
 
+               if (examiner->has_video ()) {
+                       _first_video = examiner->first_video ();
+                       _color_range = examiner->color_range ();
+                       _color_primaries = examiner->color_primaries ();
+                       _color_trc = examiner->color_trc ();
+                       _colorspace = examiner->colorspace ();
+                       _bits_per_pixel = examiner->bits_per_pixel ();
+               }
+
+               if (!examiner->audio_streams().empty ()) {
+                       audio.reset (new AudioContent (this));
+
+                       BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, examiner->audio_streams ()) {
+                               audio->add_stream (i);
+                       }
+
+                       AudioStreamPtr as = audio->streams().front();
+                       AudioMapping m = as->mapping ();
+                       film()->make_audio_mapping_default (m, first_path);
+                       as->set_mapping (m);
+               }
+
                _subtitle_streams = examiner->subtitle_streams ();
                if (!_subtitle_streams.empty ()) {
+                       subtitle.reset (new SubtitleContent (this));
                        _subtitle_stream = _subtitle_streams.front ();
                }
-               
-               _audio_streams = examiner->audio_streams ();
-               if (!_audio_streams.empty ()) {
-                       _audio_stream = _audio_streams.front ();
-               }
 
-               _first_video = examiner->first_video ();
        }
 
        signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS);
        signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
-       signal_changed (FFmpegContentProperty::AUDIO_STREAMS);
-       signal_changed (FFmpegContentProperty::AUDIO_STREAM);
-       signal_changed (AudioContentProperty::AUDIO_CHANNELS);
 }
 
 string
 FFmpegContent::summary () const
 {
-       /* Get the string() here so that the name does not have quotes around it */
-       return String::compose (_("%1 [movie]"), path_summary ());
+       if (video && audio) {
+               return String::compose (_("%1 [movie]"), path_summary ());
+       } else if (video) {
+               return String::compose (_("%1 [video]"), path_summary ());
+       } else if (audio) {
+               return String::compose (_("%1 [audio]"), path_summary ());
+       }
+
+       return path_summary ();
 }
 
 string
 FFmpegContent::technical_summary () const
 {
-       string as = "none";
-       if (_audio_stream) {
-               as = _audio_stream->technical_summary ();
+       string as = "";
+       BOOST_FOREACH (shared_ptr<FFmpegAudioStream> i, ffmpeg_audio_streams ()) {
+               as += i->technical_summary () + " " ;
+       }
+
+       if (as.empty ()) {
+               as = "none";
        }
 
        string ss = "none";
@@ -219,28 +317,20 @@ FFmpegContent::technical_summary () const
        }
 
        string filt = Filter::ffmpeg_string (_filters);
-       
-       return Content::technical_summary() + " - "
-               + VideoContent::technical_summary() + " - "
-               + AudioContent::technical_summary() + " - "
-               + String::compose (
-                       "ffmpeg: audio %1, subtitle %2, filters %3", as, ss, filt
-                       );
-}
 
-string
-FFmpegContent::information () const
-{
-       if (video_length() == ContentTime (0) || video_frame_rate() == 0) {
-               return "";
+       string s = Content::technical_summary ();
+
+       if (video) {
+               s += " - " + video->technical_summary ();
        }
-       
-       SafeStringStream s;
-       
-       s << String::compose (_("%1 frames; %2 frames per second"), video_length_after_3d_combine().frames (video_frame_rate()), video_frame_rate()) << "\n";
-       s << VideoContent::information ();
 
-       return s.str ();
+       if (audio) {
+               s += " - " + audio->technical_summary ();
+       }
+
+       return s + String::compose (
+               "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt
+               );
 }
 
 void
@@ -254,51 +344,6 @@ FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
        signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
 }
 
-void
-FFmpegContent::set_audio_stream (shared_ptr<FFmpegAudioStream> s)
-{
-       {
-               boost::mutex::scoped_lock lm (_mutex);
-               _audio_stream = s;
-       }
-
-       signal_changed (FFmpegContentProperty::AUDIO_STREAM);
-}
-
-ContentTime
-FFmpegContent::audio_length () const
-{
-       if (!audio_stream ()) {
-               return ContentTime ();
-       }
-
-       return video_length ();
-}
-
-int
-FFmpegContent::audio_channels () const
-{
-       boost::mutex::scoped_lock lm (_mutex);
-       
-       if (!_audio_stream) {
-               return 0;
-       }
-
-       return _audio_stream->channels ();
-}
-
-int
-FFmpegContent::audio_frame_rate () const
-{
-       boost::mutex::scoped_lock lm (_mutex);
-
-       if (!_audio_stream) {
-               return 0;
-       }
-
-       return _audio_stream->frame_rate ();
-}
-
 bool
 operator== (FFmpegStream const & a, FFmpegStream const & b)
 {
@@ -314,21 +359,19 @@ operator!= (FFmpegStream const & a, FFmpegStream const & b)
 DCPTime
 FFmpegContent::full_length () const
 {
-       shared_ptr<const Film> film = _film.lock ();
-       assert (film);
-       return DCPTime (video_length_after_3d_combine(), FrameRateChange (video_frame_rate (), film->video_frame_rate ()));
-}
+       FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ());
+       if (video) {
+               return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate());
+       }
 
-AudioMapping
-FFmpegContent::audio_mapping () const
-{
-       boost::mutex::scoped_lock lm (_mutex);
+       DCPOMATIC_ASSERT (audio);
 
-       if (!_audio_stream) {
-               return AudioMapping ();
+       DCPTime longest;
+       BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
+               longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate()));
        }
 
-       return _audio_stream->mapping ();
+       return longest;
 }
 
 void
@@ -342,19 +385,20 @@ FFmpegContent::set_filters (vector<Filter const *> const & filters)
        signal_changed (FFmpegContentProperty::FILTERS);
 }
 
-void
-FFmpegContent::set_audio_mapping (AudioMapping m)
-{
-       audio_stream()->set_mapping (m);
-       AudioContent::set_audio_mapping (m);
-}
-
 string
 FFmpegContent::identifier () const
 {
-       SafeStringStream s;
+       locked_stringstream s;
+
+       s << Content::identifier();
+
+       if (video) {
+               s << "_" << video->identifier();
+       }
 
-       s << VideoContent::identifier();
+       if (subtitle) {
+               s << "_" << subtitle->identifier();
+       }
 
        boost::mutex::scoped_lock lm (_mutex);
 
@@ -369,49 +413,191 @@ FFmpegContent::identifier () const
        return s.str ();
 }
 
-boost::filesystem::path
-FFmpegContent::audio_analysis_path () const
+list<ContentTimePeriod>
+FFmpegContent::image_subtitles_during (ContentTimePeriod period, bool starting) const
 {
-       shared_ptr<const Film> film = _film.lock ();
-       if (!film) {
-               return boost::filesystem::path ();
+       shared_ptr<FFmpegSubtitleStream> stream = subtitle_stream ();
+       if (!stream) {
+               return list<ContentTimePeriod> ();
        }
 
-       /* We need to include the stream ID in this path so that we get different
-          analyses for each stream.
-       */
-
-       boost::filesystem::path p = film->audio_analysis_dir ();
-       string name = digest ();
-       if (audio_stream ()) {
-               name += "_" + audio_stream()->identifier ();
-       }
-       p /= name;
-       return p;
+       return stream->image_subtitles_during (period, starting);
 }
 
 list<ContentTimePeriod>
-FFmpegContent::subtitles_during (ContentTimePeriod period, bool starting) const
+FFmpegContent::text_subtitles_during (ContentTimePeriod period, bool starting) const
 {
-       list<ContentTimePeriod> d;
-       
        shared_ptr<FFmpegSubtitleStream> stream = subtitle_stream ();
        if (!stream) {
-               return d;
+               return list<ContentTimePeriod> ();
+       }
+
+       return stream->text_subtitles_during (period, starting);
+}
+
+void
+FFmpegContent::set_default_colour_conversion ()
+{
+       DCPOMATIC_ASSERT (video);
+
+       dcp::Size const s = video->size ();
+
+       boost::mutex::scoped_lock lm (_mutex);
+
+       if (s.width < 1080) {
+               video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion);
+       } else {
+               video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion);
        }
+}
 
-       /* XXX: inefficient */
-       for (vector<ContentTimePeriod>::const_iterator i = stream->periods.begin(); i != stream->periods.end(); ++i) {
-               if ((starting && period.contains (i->from)) || (!starting && period.overlaps (*i))) {
-                       d.push_back (*i);
+void
+FFmpegContent::add_properties (list<UserProperty>& p) const
+{
+       Content::add_properties (p);
+
+       if (video) {
+               video->add_properties (p);
+
+               if (_bits_per_pixel) {
+                       int const sub = 219 * pow (2, _bits_per_pixel.get() - 8);
+                       int const total = pow (2, _bits_per_pixel.get());
+
+                       switch (_color_range) {
+                       case AVCOL_RANGE_UNSPECIFIED:
+                               /// TRANSLATORS: this means that the range of pixel values used in this
+                               /// file is unknown (not specified in the file).
+                               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
+                               break;
+                       case AVCOL_RANGE_MPEG:
+                               /// TRANSLATORS: this means that the range of pixel values used in this
+                               /// file is limited, so that not all possible values are valid.
+                               p.push_back (
+                                       UserProperty (
+                                               UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2)
+                                               )
+                                       );
+                               break;
+                       case AVCOL_RANGE_JPEG:
+                               /// TRANSLATORS: this means that the range of pixel values used in this
+                               /// file is full, so that all possible pixel values are valid.
+                               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total)));
+                               break;
+                       default:
+                               DCPOMATIC_ASSERT (false);
+                       }
+               } else {
+                       switch (_color_range) {
+                       case AVCOL_RANGE_UNSPECIFIED:
+                               /// TRANSLATORS: this means that the range of pixel values used in this
+                               /// file is unknown (not specified in the file).
+                               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified")));
+                               break;
+                       case AVCOL_RANGE_MPEG:
+                               /// TRANSLATORS: this means that the range of pixel values used in this
+                               /// file is limited, so that not all possible values are valid.
+                               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited")));
+                               break;
+                       case AVCOL_RANGE_JPEG:
+                               /// TRANSLATORS: this means that the range of pixel values used in this
+                               /// file is full, so that all possible pixel values are valid.
+                               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full")));
+                               break;
+                       default:
+                               DCPOMATIC_ASSERT (false);
+                       }
+               }
+
+               char const * primaries[] = {
+                       _("Unspecified"),
+                       _("BT709"),
+                       _("Unspecified"),
+                       _("Unspecified"),
+                       _("BT470M"),
+                       _("BT470BG"),
+                       _("SMPTE 170M (BT601)"),
+                       _("SMPTE 240M"),
+                       _("Film"),
+                       _("BT2020"),
+                       _("SMPTE ST 428-1 (CIE 1931 XYZ)")
+               };
+
+               DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 11);
+               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries]));
+
+               char const * transfers[] = {
+                       _("Unspecified"),
+                       _("BT709"),
+                       _("Unspecified"),
+                       _("Unspecified"),
+                       _("Gamma 22 (BT470M)"),
+                       _("Gamma 28 (BT470BG)"),
+                       _("SMPTE 170M (BT601)"),
+                       _("SMPTE 240M"),
+                       _("Linear"),
+                       _("Logarithmic (100:1 range)"),
+                       _("Logarithmic (316:1 range)"),
+                       _("IEC61966-2-4"),
+                       _("BT1361 extended colour gamut"),
+                       _("IEC61966-2-1 (sRGB or sYCC)"),
+                       _("BT2020 for a 10-bit system"),
+                       _("BT2020 for a 12-bit system"),
+                       _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"),
+                       _("SMPTE ST 428-1"),
+                       _("ARIB STD-B67 ('Hybrid log-gamma')")
+               };
+
+               DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
+               p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc]));
+
+               char const * spaces[] = {
+                       _("RGB / sRGB (IEC61966-2-1)"),
+                       _("BT709"),
+                       _("Unspecified"),
+                       _("Unspecified"),
+                       _("FCC"),
+                       _("BT470BG (BT601-6)"),
+                       _("SMPTE 170M (BT601-6)"),
+                       _("SMPTE 240M"),
+                       _("YCOCG"),
+                       _("BT2020 non-constant luminance"),
+                       _("BT2020 constant luminance"),
+               };
+
+               DCPOMATIC_ASSERT (AVCOL_SPC_NB == 11);
+               p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace]));
+
+               if (_bits_per_pixel) {
+                       p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), raw_convert<string> (_bits_per_pixel.get ())));
                }
        }
 
-       return d;
+       if (audio) {
+               audio->add_properties (p);
+       }
 }
 
-bool
-FFmpegContent::has_subtitles () const
+/** Our subtitle streams have colour maps, which can be changed, but
+ *  they have no way of signalling that change.  As a hack, we have this
+ *  method which callers can use when they've modified one of our subtitle
+ *  streams.
+ */
+void
+FFmpegContent::signal_subtitle_stream_changed ()
 {
-       return !subtitle_streams().empty ();
+       signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
+}
+
+vector<shared_ptr<FFmpegAudioStream> >
+FFmpegContent::ffmpeg_audio_streams () const
+{
+       vector<shared_ptr<FFmpegAudioStream> > fa;
+
+       if (audio) {
+               BOOST_FOREACH (AudioStreamPtr i, audio->streams()) {
+                       fa.push_back (dynamic_pointer_cast<FFmpegAudioStream> (i));
+               }
+       }
+
+       return fa;
 }