X-Git-Url: https://main.carlh.net/gitweb/?p=dcpomatic.git;a=blobdiff_plain;f=src%2Flib%2Fffmpeg_content.cc;h=60c777b3c3d9f6107386525fa211630c14906afc;hp=a4209f5b648306e734861c56ea96329b79a25a4a;hb=422be0eece2bf6ee80db1d3c21553cd82efff789;hpb=0b6c6de07f9a3aa28c2e8ca8ef30340e3fa1bfc6 diff --git a/src/lib/ffmpeg_content.cc b/src/lib/ffmpeg_content.cc index a4209f5b6..60c777b3c 100644 --- a/src/lib/ffmpeg_content.cc +++ b/src/lib/ffmpeg_content.cc @@ -1,29 +1,29 @@ /* - Copyright (C) 2013-2014 Carl Hetherington + Copyright (C) 2013-2016 Carl Hetherington - This program is free software; you can redistribute it and/or modify + This file is part of DCP-o-matic. + + DCP-o-matic is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. - This program is distributed in the hope that it will be useful, + DCP-o-matic is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + along with DCP-o-matic. If not, see . */ -extern "C" { -#include -} -#include -#include #include "ffmpeg_content.h" +#include "video_content.h" +#include "audio_content.h" #include "ffmpeg_examiner.h" +#include "ffmpeg_subtitle_stream.h" +#include "ffmpeg_audio_stream.h" #include "compose.hpp" #include "job.h" #include "util.h" @@ -32,45 +32,53 @@ extern "C" { #include "log.h" #include "exceptions.h" #include "frame_rate_change.h" -#include "safe_stringstream.h" +#include "raw_convert.h" +#include "subtitle_content.h" +#include +#include +extern "C" { +#include +#include +} +#include +#include +#include #include "i18n.h" -#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL); +#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL); using std::string; using std::vector; using std::list; using std::cout; using std::pair; +using std::make_pair; +using std::max; using boost::shared_ptr; using boost::dynamic_pointer_cast; -using libdcp::raw_convert; +using boost::optional; int const FFmpegContentProperty::SUBTITLE_STREAMS = 100; int const FFmpegContentProperty::SUBTITLE_STREAM = 101; -int const FFmpegContentProperty::AUDIO_STREAMS = 102; -int const FFmpegContentProperty::AUDIO_STREAM = 103; -int const FFmpegContentProperty::FILTERS = 104; - -FFmpegContent::FFmpegContent (shared_ptr f, boost::filesystem::path p) - : Content (f, p) - , VideoContent (f, p) - , AudioContent (f, p) - , SubtitleContent (f, p) +int const FFmpegContentProperty::FILTERS = 102; + +FFmpegContent::FFmpegContent (shared_ptr film, boost::filesystem::path p) + : Content (film, p) { } -FFmpegContent::FFmpegContent (shared_ptr f, shared_ptr node, int version, list& notes) - : Content (f, node) - , VideoContent (f, node, version) - , AudioContent (f, node) - , SubtitleContent (f, node, version) +FFmpegContent::FFmpegContent (shared_ptr film, cxml::ConstNodePtr node, int version, list& notes) + : Content (film, node) { + video = VideoContent::from_xml (this, node, version); + audio = AudioContent::from_xml (this, node, version); + subtitle = SubtitleContent::from_xml (this, node, version); + list c = node->node_children ("SubtitleStream"); for (list::const_iterator i = c.begin(); i != c.end(); ++i) { - _subtitle_streams.push_back (shared_ptr (new FFmpegSubtitleStream (*i))); + _subtitle_streams.push_back (shared_ptr (new FFmpegSubtitleStream (*i, version))); if ((*i)->optional_number_child ("Selected")) { _subtitle_stream = _subtitle_streams.back (); } @@ -78,9 +86,11 @@ FFmpegContent::FFmpegContent (shared_ptr f, shared_ptrnode_children ("AudioStream"); for (list::const_iterator i = c.begin(); i != c.end(); ++i) { - _audio_streams.push_back (shared_ptr (new FFmpegAudioStream (*i, version))); - if ((*i)->optional_number_child ("Selected")) { - _audio_stream = _audio_streams.back (); + shared_ptr as (new FFmpegAudioStream (*i, version)); + audio->add_stream (as); + if (version < 11 && !(*i)->optional_node_child ("Selected")) { + /* This is an old file and this stream is not selected, so un-map it */ + as->set_mapping (AudioMapping (as->channels (), MAX_DCP_AUDIO_CHANNELS)); } } @@ -94,34 +104,80 @@ FFmpegContent::FFmpegContent (shared_ptr f, shared_ptroptional_number_child ("FirstVideo"); + optional const f = node->optional_number_child ("FirstVideo"); + if (f) { + _first_video = ContentTime (f.get ()); + } + + _color_range = static_cast (node->optional_number_child("ColorRange").get_value_or (AVCOL_RANGE_UNSPECIFIED)); + _color_primaries = static_cast (node->optional_number_child("ColorPrimaries").get_value_or (AVCOL_PRI_UNSPECIFIED)); + _color_trc = static_cast ( + node->optional_number_child("ColorTransferCharacteristic").get_value_or (AVCOL_TRC_UNSPECIFIED) + ); + _colorspace = static_cast (node->optional_number_child("Colorspace").get_value_or (AVCOL_SPC_UNSPECIFIED)); + _bits_per_pixel = node->optional_number_child ("BitsPerPixel"); + } -FFmpegContent::FFmpegContent (shared_ptr f, vector > c) - : Content (f, c) - , VideoContent (f, c) - , AudioContent (f, c) - , SubtitleContent (f, c) +FFmpegContent::FFmpegContent (shared_ptr film, vector > c) + : Content (film, c) { + vector >::const_iterator i = c.begin (); + + bool need_video = false; + bool need_audio = false; + bool need_subtitle = false; + + if (i != c.end ()) { + need_video = static_cast ((*i)->video); + need_audio = static_cast ((*i)->audio); + need_subtitle = static_cast ((*i)->subtitle); + } + + while (i != c.end ()) { + if (need_video != static_cast ((*i)->video)) { + throw JoinError (_("Content to be joined must all have or not have video")); + } + if (need_audio != static_cast ((*i)->audio)) { + throw JoinError (_("Content to be joined must all have or not have audio")); + } + if (need_subtitle != static_cast ((*i)->subtitle)) { + throw JoinError (_("Content to be joined must all have or not have subtitles")); + } + ++i; + } + + if (need_video) { + video.reset (new VideoContent (this, c)); + } + if (need_audio) { + audio.reset (new AudioContent (this, c)); + } + if (need_subtitle) { + subtitle.reset (new SubtitleContent (this, c)); + } + shared_ptr ref = dynamic_pointer_cast (c[0]); - assert (ref); + DCPOMATIC_ASSERT (ref); for (size_t i = 0; i < c.size(); ++i) { shared_ptr fc = dynamic_pointer_cast (c[i]); - if (f->with_subtitles() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) { + if (fc->subtitle && fc->subtitle->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) { throw JoinError (_("Content to be joined must use the same subtitle stream.")); } - - if (*(fc->_audio_stream.get()) != *(ref->_audio_stream.get())) { - throw JoinError (_("Content to be joined must use the same audio stream.")); - } } + /* XXX: should probably check that more of the stuff below is the same in *this and ref */ + _subtitle_streams = ref->subtitle_streams (); _subtitle_stream = ref->subtitle_stream (); - _audio_streams = ref->audio_streams (); - _audio_stream = ref->audio_stream (); _first_video = ref->_first_video; + _filters = ref->_filters; + _color_range = ref->_color_range; + _color_primaries = ref->_color_primaries; + _color_trc = ref->_color_trc; + _colorspace = ref->_colorspace; + _bits_per_pixel = ref->_bits_per_pixel; } void @@ -129,9 +185,24 @@ FFmpegContent::as_xml (xmlpp::Node* node) const { node->add_child("Type")->add_child_text ("FFmpeg"); Content::as_xml (node); - VideoContent::as_xml (node); - AudioContent::as_xml (node); - SubtitleContent::as_xml (node); + + if (video) { + video->as_xml (node); + } + + if (audio) { + audio->as_xml (node); + + BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) { + shared_ptr f = dynamic_pointer_cast (i); + DCPOMATIC_ASSERT (f); + f->as_xml (node->add_child("AudioStream")); + } + } + + if (subtitle) { + subtitle->as_xml (node); + } boost::mutex::scoped_lock lm (_mutex); @@ -143,20 +214,20 @@ FFmpegContent::as_xml (xmlpp::Node* node) const (*i)->as_xml (t); } - for (vector >::const_iterator i = _audio_streams.begin(); i != _audio_streams.end(); ++i) { - xmlpp::Node* t = node->add_child("AudioStream"); - if (_audio_stream && *i == _audio_stream) { - t->add_child("Selected")->add_child_text("1"); - } - (*i)->as_xml (t); - } - for (vector::const_iterator i = _filters.begin(); i != _filters.end(); ++i) { node->add_child("Filter")->add_child_text ((*i)->id ()); } if (_first_video) { - node->add_child("FirstVideo")->add_child_text (raw_convert (_first_video.get ())); + node->add_child("FirstVideo")->add_child_text (raw_convert (_first_video.get().get())); + } + + node->add_child("ColorRange")->add_child_text (raw_convert (_color_range)); + node->add_child("ColorPrimaries")->add_child_text (raw_convert (_color_primaries)); + node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert (_color_trc)); + node->add_child("Colorspace")->add_child_text (raw_convert (_colorspace)); + if (_bits_per_pixel) { + node->add_child("BitsPerPixel")->add_child_text (raw_convert (_bits_per_pixel.get ())); } } @@ -167,56 +238,77 @@ FFmpegContent::examine (shared_ptr job) Content::examine (job); - shared_ptr film = _film.lock (); - assert (film); + shared_ptr examiner (new FFmpegExaminer (shared_from_this (), job)); - shared_ptr examiner (new FFmpegExaminer (shared_from_this ())); + if (examiner->has_video ()) { + video.reset (new VideoContent (this)); + video->take_from_examiner (examiner); + set_default_colour_conversion (); + } - VideoContent::Frame video_length = 0; - video_length = examiner->video_length (); - LOG_GENERAL ("Video length obtained from header as %1 frames", video_length); + boost::filesystem::path first_path = path (0); { boost::mutex::scoped_lock lm (_mutex); - _video_length = video_length; + if (examiner->has_video ()) { + _first_video = examiner->first_video (); + _color_range = examiner->color_range (); + _color_primaries = examiner->color_primaries (); + _color_trc = examiner->color_trc (); + _colorspace = examiner->colorspace (); + _bits_per_pixel = examiner->bits_per_pixel (); + } + + if (!examiner->audio_streams().empty ()) { + audio.reset (new AudioContent (this)); + + BOOST_FOREACH (shared_ptr i, examiner->audio_streams ()) { + audio->add_stream (i); + } + + AudioStreamPtr as = audio->streams().front(); + AudioMapping m = as->mapping (); + film()->make_audio_mapping_default (m, first_path); + as->set_mapping (m); + } _subtitle_streams = examiner->subtitle_streams (); if (!_subtitle_streams.empty ()) { + subtitle.reset (new SubtitleContent (this)); _subtitle_stream = _subtitle_streams.front (); } - - _audio_streams = examiner->audio_streams (); - if (!_audio_streams.empty ()) { - _audio_stream = _audio_streams.front (); - } - _first_video = examiner->first_video (); } - take_from_video_examiner (examiner); - - signal_changed (ContentProperty::LENGTH); signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS); signal_changed (FFmpegContentProperty::SUBTITLE_STREAM); - signal_changed (FFmpegContentProperty::AUDIO_STREAMS); - signal_changed (FFmpegContentProperty::AUDIO_STREAM); - signal_changed (AudioContentProperty::AUDIO_CHANNELS); } string FFmpegContent::summary () const { - /* Get the string() here so that the name does not have quotes around it */ - return String::compose (_("%1 [movie]"), path_summary ()); + if (video && audio) { + return String::compose (_("%1 [movie]"), path_summary ()); + } else if (video) { + return String::compose (_("%1 [video]"), path_summary ()); + } else if (audio) { + return String::compose (_("%1 [audio]"), path_summary ()); + } + + return path_summary (); } string FFmpegContent::technical_summary () const { - string as = "none"; - if (_audio_stream) { - as = _audio_stream->technical_summary (); + string as = ""; + BOOST_FOREACH (shared_ptr i, ffmpeg_audio_streams ()) { + as += i->technical_summary () + " " ; + } + + if (as.empty ()) { + as = "none"; } string ss = "none"; @@ -225,28 +317,20 @@ FFmpegContent::technical_summary () const } string filt = Filter::ffmpeg_string (_filters); - - return Content::technical_summary() + " - " - + VideoContent::technical_summary() + " - " - + AudioContent::technical_summary() + " - " - + String::compose ( - "ffmpeg: audio %1, subtitle %2, filters %3", as, ss, filt - ); -} -string -FFmpegContent::information () const -{ - if (video_length() == 0 || video_frame_rate() == 0) { - return ""; + string s = Content::technical_summary (); + + if (video) { + s += " - " + video->technical_summary (); } - - SafeStringStream s; - - s << String::compose (_("%1 frames; %2 frames per second"), video_length_after_3d_combine(), video_frame_rate()) << "\n"; - s << VideoContent::information (); - return s.str (); + if (audio) { + s += " - " + audio->technical_summary (); + } + + return s + String::compose ( + "ffmpeg: audio %1 subtitle %2 filters %3", as, ss, filt + ); } void @@ -260,225 +344,260 @@ FFmpegContent::set_subtitle_stream (shared_ptr s) signal_changed (FFmpegContentProperty::SUBTITLE_STREAM); } -void -FFmpegContent::set_audio_stream (shared_ptr s) +bool +operator== (FFmpegStream const & a, FFmpegStream const & b) { - { - boost::mutex::scoped_lock lm (_mutex); - _audio_stream = s; - } - - signal_changed (FFmpegContentProperty::AUDIO_STREAM); + return a._id == b._id; } -AudioContent::Frame -FFmpegContent::audio_length () const +bool +operator!= (FFmpegStream const & a, FFmpegStream const & b) { - int const cafr = content_audio_frame_rate (); - float const vfr = video_frame_rate (); - VideoContent::Frame const vl = video_length_after_3d_combine (); - - boost::mutex::scoped_lock lm (_mutex); - if (!_audio_stream) { - return 0; - } - - return video_frames_to_audio_frames (vl, cafr, vfr); + return a._id != b._id; } -int -FFmpegContent::audio_channels () const +DCPTime +FFmpegContent::full_length () const { - boost::mutex::scoped_lock lm (_mutex); - - if (!_audio_stream) { - return 0; + FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ()); + if (video) { + return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate()); } - return _audio_stream->channels; -} - -int -FFmpegContent::content_audio_frame_rate () const -{ - boost::mutex::scoped_lock lm (_mutex); + DCPOMATIC_ASSERT (audio); - if (!_audio_stream) { - return 0; + DCPTime longest; + BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) { + longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate())); } - return _audio_stream->frame_rate; + return longest; } -bool -operator== (FFmpegStream const & a, FFmpegStream const & b) +void +FFmpegContent::set_filters (vector const & filters) { - return a._id == b._id; -} + { + boost::mutex::scoped_lock lm (_mutex); + _filters = filters; + } -bool -operator!= (FFmpegStream const & a, FFmpegStream const & b) -{ - return a._id != b._id; + signal_changed (FFmpegContentProperty::FILTERS); } -FFmpegStream::FFmpegStream (shared_ptr node) - : name (node->string_child ("Name")) - , _id (node->number_child ("Id")) +string +FFmpegContent::identifier () const { + locked_stringstream s; -} + s << Content::identifier(); -void -FFmpegStream::as_xml (xmlpp::Node* root) const -{ - root->add_child("Name")->add_child_text (name); - root->add_child("Id")->add_child_text (raw_convert (_id)); -} + if (video) { + s << "_" << video->identifier(); + } -FFmpegAudioStream::FFmpegAudioStream (shared_ptr node, int version) - : FFmpegStream (node) - , mapping (node->node_child ("Mapping"), version) -{ - frame_rate = node->number_child ("FrameRate"); - channels = node->number_child ("Channels"); - first_audio = node->optional_number_child ("FirstAudio"); -} + if (subtitle) { + s << "_" << subtitle->identifier(); + } -void -FFmpegAudioStream::as_xml (xmlpp::Node* root) const -{ - FFmpegStream::as_xml (root); - root->add_child("FrameRate")->add_child_text (raw_convert (frame_rate)); - root->add_child("Channels")->add_child_text (raw_convert (channels)); - if (first_audio) { - root->add_child("FirstAudio")->add_child_text (raw_convert (first_audio.get ())); + boost::mutex::scoped_lock lm (_mutex); + + if (_subtitle_stream) { + s << "_" << _subtitle_stream->identifier (); } - mapping.as_xml (root->add_child("Mapping")); -} -bool -FFmpegStream::uses_index (AVFormatContext const * fc, int index) const -{ - size_t i = 0; - while (i < fc->nb_streams) { - if (fc->streams[i]->id == _id) { - return int (i) == index; - } - ++i; + for (vector::const_iterator i = _filters.begin(); i != _filters.end(); ++i) { + s << "_" << (*i)->id (); } - return false; + return s.str (); } -AVStream * -FFmpegStream::stream (AVFormatContext const * fc) const +list +FFmpegContent::image_subtitles_during (ContentTimePeriod period, bool starting) const { - size_t i = 0; - while (i < fc->nb_streams) { - if (fc->streams[i]->id == _id) { - return fc->streams[i]; - } - ++i; + shared_ptr stream = subtitle_stream (); + if (!stream) { + return list (); } - assert (false); - return 0; + return stream->image_subtitles_during (period, starting); } -/** Construct a SubtitleStream from a value returned from to_string(). - * @param t String returned from to_string(). - * @param v State file version. - */ -FFmpegSubtitleStream::FFmpegSubtitleStream (shared_ptr node) - : FFmpegStream (node) +list +FFmpegContent::text_subtitles_during (ContentTimePeriod period, bool starting) const { - + shared_ptr stream = subtitle_stream (); + if (!stream) { + return list (); + } + + return stream->text_subtitles_during (period, starting); } void -FFmpegSubtitleStream::as_xml (xmlpp::Node* root) const +FFmpegContent::set_default_colour_conversion () { - FFmpegStream::as_xml (root); -} + DCPOMATIC_ASSERT (video); -Time -FFmpegContent::full_length () const -{ - shared_ptr film = _film.lock (); - assert (film); - - FrameRateChange frc (video_frame_rate (), film->video_frame_rate ()); - return video_length_after_3d_combine() * frc.factor() * TIME_HZ / film->video_frame_rate (); -} + dcp::Size const s = video->size (); -AudioMapping -FFmpegContent::audio_mapping () const -{ boost::mutex::scoped_lock lm (_mutex); - if (!_audio_stream) { - return AudioMapping (); + if (s.width < 1080) { + video->set_colour_conversion (PresetColourConversion::from_id ("rec601").conversion); + } else { + video->set_colour_conversion (PresetColourConversion::from_id ("rec709").conversion); } - - return _audio_stream->mapping; } void -FFmpegContent::set_filters (vector const & filters) +FFmpegContent::add_properties (list& p) const { - { - boost::mutex::scoped_lock lm (_mutex); - _filters = filters; + Content::add_properties (p); + + if (video) { + video->add_properties (p); + + if (_bits_per_pixel) { + int const sub = 219 * pow (2, _bits_per_pixel.get() - 8); + int const total = pow (2, _bits_per_pixel.get()); + + switch (_color_range) { + case AVCOL_RANGE_UNSPECIFIED: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is unknown (not specified in the file). + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified"))); + break; + case AVCOL_RANGE_MPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is limited, so that not all possible values are valid. + p.push_back ( + UserProperty ( + UserProperty::VIDEO, _("Colour range"), String::compose (_("Limited (%1-%2)"), (total - sub) / 2, (total + sub) / 2) + ) + ); + break; + case AVCOL_RANGE_JPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is full, so that all possible pixel values are valid. + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), String::compose (_("Full (0-%1)"), total))); + break; + default: + DCPOMATIC_ASSERT (false); + } + } else { + switch (_color_range) { + case AVCOL_RANGE_UNSPECIFIED: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is unknown (not specified in the file). + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Unspecified"))); + break; + case AVCOL_RANGE_MPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is limited, so that not all possible values are valid. + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Limited"))); + break; + case AVCOL_RANGE_JPEG: + /// TRANSLATORS: this means that the range of pixel values used in this + /// file is full, so that all possible pixel values are valid. + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour range"), _("Full"))); + break; + default: + DCPOMATIC_ASSERT (false); + } + } + + char const * primaries[] = { + _("Unspecified"), + _("BT709"), + _("Unspecified"), + _("Unspecified"), + _("BT470M"), + _("BT470BG"), + _("SMPTE 170M (BT601)"), + _("SMPTE 240M"), + _("Film"), + _("BT2020"), + _("SMPTE ST 428-1 (CIE 1931 XYZ)") + }; + + DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 11); + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries])); + + char const * transfers[] = { + _("Unspecified"), + _("BT709"), + _("Unspecified"), + _("Unspecified"), + _("Gamma 22 (BT470M)"), + _("Gamma 28 (BT470BG)"), + _("SMPTE 170M (BT601)"), + _("SMPTE 240M"), + _("Linear"), + _("Logarithmic (100:1 range)"), + _("Logarithmic (316:1 range)"), + _("IEC61966-2-4"), + _("BT1361 extended colour gamut"), + _("IEC61966-2-1 (sRGB or sYCC)"), + _("BT2020 for a 10-bit system"), + _("BT2020 for a 12-bit system"), + _("SMPTE ST 2084 for 10, 12, 14 and 16 bit systems"), + _("SMPTE ST 428-1"), + _("ARIB STD-B67 ('Hybrid log-gamma')") + }; + + DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19); + p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc])); + + char const * spaces[] = { + _("RGB / sRGB (IEC61966-2-1)"), + _("BT709"), + _("Unspecified"), + _("Unspecified"), + _("FCC"), + _("BT470BG (BT601-6)"), + _("SMPTE 170M (BT601-6)"), + _("SMPTE 240M"), + _("YCOCG"), + _("BT2020 non-constant luminance"), + _("BT2020 constant luminance"), + }; + + DCPOMATIC_ASSERT (AVCOL_SPC_NB == 11); + p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace])); + + if (_bits_per_pixel) { + p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), raw_convert (_bits_per_pixel.get ()))); + } } - signal_changed (FFmpegContentProperty::FILTERS); + if (audio) { + audio->add_properties (p); + } } +/** Our subtitle streams have colour maps, which can be changed, but + * they have no way of signalling that change. As a hack, we have this + * method which callers can use when they've modified one of our subtitle + * streams. + */ void -FFmpegContent::set_audio_mapping (AudioMapping m) +FFmpegContent::signal_subtitle_stream_changed () { - audio_stream()->mapping = m; - signal_changed (AudioContentProperty::AUDIO_MAPPING); + signal_changed (FFmpegContentProperty::SUBTITLE_STREAM); } -string -FFmpegContent::identifier () const +vector > +FFmpegContent::ffmpeg_audio_streams () const { - SafeStringStream s; - - s << VideoContent::identifier(); - - boost::mutex::scoped_lock lm (_mutex); + vector > fa; - if (_subtitle_stream) { - s << "_" << _subtitle_stream->identifier (); - } - - for (vector::const_iterator i = _filters.begin(); i != _filters.end(); ++i) { - s << "_" << (*i)->id (); - } - - return s.str (); -} - -boost::filesystem::path -FFmpegContent::audio_analysis_path () const -{ - shared_ptr film = _film.lock (); - if (!film) { - return boost::filesystem::path (); + if (audio) { + BOOST_FOREACH (AudioStreamPtr i, audio->streams()) { + fa.push_back (dynamic_pointer_cast (i)); + } } - /* We need to include the stream ID in this path so that we get different - analyses for each stream. - */ - - boost::filesystem::path p = film->audio_analysis_dir (); - string name = digest (); - if (audio_stream ()) { - name += "_" + audio_stream()->identifier (); - } - p /= name; - return p; + return fa; }