#include "log.h"
#include "exceptions.h"
#include "frame_rate_change.h"
-#include "subtitle_content.h"
+#include "text_content.h"
#include <dcp/raw_convert.h>
#include <libcxml/cxml.h>
extern "C" {
#include "i18n.h"
-#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
-
using std::string;
using std::vector;
using std::list;
int const FFmpegContentProperty::SUBTITLE_STREAM = 101;
int const FFmpegContentProperty::FILTERS = 102;
-FFmpegContent::FFmpegContent (shared_ptr<const Film> film, boost::filesystem::path p)
- : Content (film, p)
+FFmpegContent::FFmpegContent (boost::filesystem::path p)
+ : Content (p)
+ , _encrypted (false)
{
}
-FFmpegContent::FFmpegContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version, list<string>& notes)
- : Content (film, node)
+template <class T>
+optional<T>
+get_optional_enum (cxml::ConstNodePtr node, string name)
+{
+ optional<int> const v = node->optional_number_child<int>(name);
+ if (!v) {
+ return optional<T>();
+ }
+ return static_cast<T>(*v);
+}
+
+FFmpegContent::FFmpegContent (cxml::ConstNodePtr node, int version, list<string>& notes)
+ : Content (node)
{
video = VideoContent::from_xml (this, node, version);
audio = AudioContent::from_xml (this, node, version);
- subtitle = SubtitleContent::from_xml (this, node, version);
+ text = TextContent::from_xml (this, node, version);
list<cxml::NodePtr> c = node->node_children ("SubtitleStream");
for (list<cxml::NodePtr>::const_iterator i = c.begin(); i != c.end(); ++i) {
_first_video = ContentTime (f.get ());
}
- _color_range = static_cast<AVColorRange> (node->optional_number_child<int>("ColorRange").get_value_or (AVCOL_RANGE_UNSPECIFIED));
- _color_primaries = static_cast<AVColorPrimaries> (node->optional_number_child<int>("ColorPrimaries").get_value_or (AVCOL_PRI_UNSPECIFIED));
- _color_trc = static_cast<AVColorTransferCharacteristic> (
- node->optional_number_child<int>("ColorTransferCharacteristic").get_value_or (AVCOL_TRC_UNSPECIFIED)
- );
- _colorspace = static_cast<AVColorSpace> (node->optional_number_child<int>("Colorspace").get_value_or (AVCOL_SPC_UNSPECIFIED));
+ _color_range = get_optional_enum<AVColorRange>(node, "ColorRange");
+ _color_primaries = get_optional_enum<AVColorPrimaries>(node, "ColorPrimaries");
+ _color_trc = get_optional_enum<AVColorTransferCharacteristic>(node, "ColorTransferCharacteristic");
+ _colorspace = get_optional_enum<AVColorSpace>(node, "Colorspace");
_bits_per_pixel = node->optional_number_child<int> ("BitsPerPixel");
-
+ _decryption_key = node->optional_string_child ("DecryptionKey");
+ _encrypted = node->optional_bool_child("Encrypted").get_value_or(false);
}
-FFmpegContent::FFmpegContent (shared_ptr<const Film> film, vector<shared_ptr<Content> > c)
- : Content (film, c)
+FFmpegContent::FFmpegContent (vector<shared_ptr<Content> > c)
+ : Content (c)
{
vector<shared_ptr<Content> >::const_iterator i = c.begin ();
bool need_video = false;
bool need_audio = false;
- bool need_subtitle = false;
+ bool need_text = false;
if (i != c.end ()) {
need_video = static_cast<bool> ((*i)->video);
need_audio = static_cast<bool> ((*i)->audio);
- need_subtitle = static_cast<bool> ((*i)->subtitle);
+ need_text = !(*i)->text.empty();
}
while (i != c.end ()) {
if (need_audio != static_cast<bool> ((*i)->audio)) {
throw JoinError (_("Content to be joined must all have or not have audio"));
}
- if (need_subtitle != static_cast<bool> ((*i)->subtitle)) {
- throw JoinError (_("Content to be joined must all have or not have subtitles"));
+ if (need_text != !(*i)->text.empty()) {
+ throw JoinError (_("Content to be joined must all have or not have subtitles or captions"));
}
++i;
}
if (need_audio) {
audio.reset (new AudioContent (this, c));
}
- if (need_subtitle) {
- subtitle.reset (new SubtitleContent (this, c));
+ if (need_text) {
+ text.push_back (shared_ptr<TextContent> (new TextContent (this, c)));
}
shared_ptr<FFmpegContent> ref = dynamic_pointer_cast<FFmpegContent> (c[0]);
for (size_t i = 0; i < c.size(); ++i) {
shared_ptr<FFmpegContent> fc = dynamic_pointer_cast<FFmpegContent> (c[i]);
- if (fc->subtitle && fc->subtitle->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
+ if (fc->only_text() && fc->only_text()->use() && *(fc->_subtitle_stream.get()) != *(ref->_subtitle_stream.get())) {
throw JoinError (_("Content to be joined must use the same subtitle stream."));
}
}
_color_trc = ref->_color_trc;
_colorspace = ref->_colorspace;
_bits_per_pixel = ref->_bits_per_pixel;
+ _encrypted = ref->_encrypted;
}
void
}
}
- if (subtitle) {
- subtitle->as_xml (node);
+ if (only_text()) {
+ only_text()->as_xml (node);
}
boost::mutex::scoped_lock lm (_mutex);
node->add_child("FirstVideo")->add_child_text (raw_convert<string> (_first_video.get().get()));
}
- node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (_color_range)));
- node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (_color_primaries)));
- node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (_color_trc)));
- node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (_colorspace)));
+ if (_color_range) {
+ node->add_child("ColorRange")->add_child_text (raw_convert<string> (static_cast<int> (*_color_range)));
+ }
+ if (_color_primaries) {
+ node->add_child("ColorPrimaries")->add_child_text (raw_convert<string> (static_cast<int> (*_color_primaries)));
+ }
+ if (_color_trc) {
+ node->add_child("ColorTransferCharacteristic")->add_child_text (raw_convert<string> (static_cast<int> (*_color_trc)));
+ }
+ if (_colorspace) {
+ node->add_child("Colorspace")->add_child_text (raw_convert<string> (static_cast<int> (*_colorspace)));
+ }
if (_bits_per_pixel) {
- node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (_bits_per_pixel.get ()));
+ node->add_child("BitsPerPixel")->add_child_text (raw_convert<string> (*_bits_per_pixel));
+ }
+ if (_decryption_key) {
+ node->add_child("DecryptionKey")->add_child_text (_decryption_key.get());
+ }
+ if (_encrypted) {
+ node->add_child("Encypted")->add_child_text ("1");
}
}
void
-FFmpegContent::examine (shared_ptr<Job> job)
+FFmpegContent::examine (shared_ptr<const Film> film, shared_ptr<Job> job)
{
+ ChangeSignaller<Content> cc1 (this, FFmpegContentProperty::SUBTITLE_STREAMS);
+ ChangeSignaller<Content> cc2 (this, FFmpegContentProperty::SUBTITLE_STREAM);
+
job->set_progress_unknown ();
- Content::examine (job);
+ Content::examine (film, job);
shared_ptr<FFmpegExaminer> examiner (new FFmpegExaminer (shared_from_this (), job));
_color_trc = examiner->color_trc ();
_colorspace = examiner->colorspace ();
_bits_per_pixel = examiner->bits_per_pixel ();
+
+ if (examiner->rotation()) {
+ double rot = *examiner->rotation ();
+ if (fabs (rot - 180) < 1.0) {
+ _filters.push_back (Filter::from_id ("vflip"));
+ _filters.push_back (Filter::from_id ("hflip"));
+ } else if (fabs (rot - 90) < 1.0) {
+ _filters.push_back (Filter::from_id ("90clock"));
+ } else if (fabs (rot - 270) < 1.0) {
+ _filters.push_back (Filter::from_id ("90anticlock"));
+ }
+ }
}
if (!examiner->audio_streams().empty ()) {
AudioStreamPtr as = audio->streams().front();
AudioMapping m = as->mapping ();
- film()->make_audio_mapping_default (m, first_path);
+ m.make_default (film ? film->audio_processor() : 0, first_path);
as->set_mapping (m);
}
_subtitle_streams = examiner->subtitle_streams ();
if (!_subtitle_streams.empty ()) {
- subtitle.reset (new SubtitleContent (this));
+ text.clear ();
+ text.push_back (shared_ptr<TextContent> (new TextContent (this, TEXT_OPEN_SUBTITLE, TEXT_UNKNOWN)));
_subtitle_stream = _subtitle_streams.front ();
}
+ _encrypted = first_path.extension() == ".ecinema";
}
if (examiner->has_video ()) {
set_default_colour_conversion ();
}
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAMS);
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
+ if (examiner->has_video() && examiner->pulldown() && video_frame_rate() && fabs(*video_frame_rate() - 29.97) < 0.001) {
+ /* FFmpeg has detected this file as 29.97 and the examiner thinks it is using "soft" 2:3 pulldown (telecine).
+ * This means we can treat it as a 23.976fps file.
+ */
+ set_video_frame_rate (24000.0 / 1001);
+ video->set_length (video->length() * 24.0 / 30);
+ }
+
+#ifdef DCPOMATIC_VARIANT_SWAROOP
+ _id = examiner->id ();
+#endif
}
string
void
FFmpegContent::set_subtitle_stream (shared_ptr<FFmpegSubtitleStream> s)
{
+ ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
+
{
boost::mutex::scoped_lock lm (_mutex);
_subtitle_stream = s;
}
-
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
}
bool
}
DCPTime
-FFmpegContent::full_length () const
+FFmpegContent::full_length (shared_ptr<const Film> film) const
{
- FrameRateChange const frc (active_video_frame_rate (), film()->video_frame_rate ());
+ FrameRateChange const frc (film, shared_from_this());
if (video) {
- return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film()->video_frame_rate());
+ return DCPTime::from_frames (llrint (video->length_after_3d_combine() * frc.factor()), film->video_frame_rate());
+ }
+
+ if (audio) {
+ DCPTime longest;
+ BOOST_FOREACH (AudioStreamPtr i, audio->streams()) {
+ longest = max (longest, DCPTime::from_frames(llrint(i->length() / frc.speed_up), i->frame_rate()));
+ }
+ return longest;
+ }
+
+ /* XXX: subtitle content? */
+
+ return DCPTime();
+}
+
+DCPTime
+FFmpegContent::approximate_length () const
+{
+ if (video) {
+ return DCPTime::from_frames (video->length_after_3d_combine(), 24);
}
DCPOMATIC_ASSERT (audio);
- DCPTime longest;
+ Frame longest = 0;
BOOST_FOREACH (AudioStreamPtr i, audio->streams ()) {
- longest = max (longest, DCPTime::from_frames (llrint (i->length() / frc.speed_up), i->frame_rate()));
+ longest = max (longest, Frame(llrint(i->length())));
}
- return longest;
+ return DCPTime::from_frames (longest, 24);
}
void
FFmpegContent::set_filters (vector<Filter const *> const & filters)
{
+ ChangeSignaller<Content> cc (this, FFmpegContentProperty::FILTERS);
+
{
boost::mutex::scoped_lock lm (_mutex);
_filters = filters;
}
-
- signal_changed (FFmpegContentProperty::FILTERS);
}
string
s += "_" + video->identifier();
}
- if (subtitle && subtitle->use() && subtitle->burn()) {
- s += "_" + subtitle->identifier();
+ if (only_text() && only_text()->use() && only_text()->burn()) {
+ s += "_" + only_text()->identifier();
}
boost::mutex::scoped_lock lm (_mutex);
boost::mutex::scoped_lock lm (_mutex);
- switch (_colorspace) {
+ switch (_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)) {
case AVCOL_SPC_RGB:
video->set_colour_conversion (PresetColourConversion::from_id ("srgb").conversion);
break;
}
void
-FFmpegContent::add_properties (list<UserProperty>& p) const
+FFmpegContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
{
- Content::add_properties (p);
+ Content::add_properties (film, p);
if (video) {
video->add_properties (p);
int const sub = 219 * pow (2, _bits_per_pixel.get() - 8);
int const total = pow (2, _bits_per_pixel.get());
- switch (_color_range) {
+ switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
case AVCOL_RANGE_UNSPECIFIED:
/// TRANSLATORS: this means that the range of pixel values used in this
/// file is unknown (not specified in the file).
DCPOMATIC_ASSERT (false);
}
} else {
- switch (_color_range) {
+ switch (_color_range.get_value_or(AVCOL_RANGE_UNSPECIFIED)) {
case AVCOL_RANGE_UNSPECIFIED:
/// TRANSLATORS: this means that the range of pixel values used in this
/// file is unknown (not specified in the file).
_("BT2020"),
_("SMPTE ST 428-1 (CIE 1931 XYZ)"),
_("SMPTE ST 431-2 (2011)"),
- _("SMPTE ST 432-1 D65 (2010)")
+ _("SMPTE ST 432-1 D65 (2010)"), // 12
+ "", // 13
+ "", // 14
+ "", // 15
+ "", // 16
+ "", // 17
+ "", // 18
+ "", // 19
+ "", // 20
+ "", // 21
+ _("JEDEC P22")
};
- DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 13);
- p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries]));
+ DCPOMATIC_ASSERT (AVCOL_PRI_NB <= 23);
+ p.push_back (UserProperty (UserProperty::VIDEO, _("Colour primaries"), primaries[_color_primaries.get_value_or(AVCOL_PRI_UNSPECIFIED)]));
char const * transfers[] = {
_("Unspecified"),
};
DCPOMATIC_ASSERT (AVCOL_TRC_NB <= 19);
- p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc]));
+ p.push_back (UserProperty (UserProperty::VIDEO, _("Colour transfer characteristic"), transfers[_color_trc.get_value_or(AVCOL_TRC_UNSPECIFIED)]));
char const * spaces[] = {
_("RGB / sRGB (IEC61966-2-1)"),
_("BT2020 non-constant luminance"),
_("BT2020 constant luminance"),
_("SMPTE 2085, Y'D'zD'x"),
+ _("Chroma-derived non-constant luminance"),
+ _("Chroma-derived constant luminance"),
+ _("BT2100")
};
- DCPOMATIC_ASSERT (AVCOL_SPC_NB == 12);
- p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace]));
+ DCPOMATIC_ASSERT (AVCOL_SPC_NB == 15);
+ p.push_back (UserProperty (UserProperty::VIDEO, _("Colourspace"), spaces[_colorspace.get_value_or(AVCOL_SPC_UNSPECIFIED)]));
if (_bits_per_pixel) {
- p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), _bits_per_pixel.get ()));
+ p.push_back (UserProperty (UserProperty::VIDEO, _("Bits per pixel"), *_bits_per_pixel));
}
}
if (audio) {
- audio->add_properties (p);
+ audio->add_properties (film, p);
}
}
void
FFmpegContent::signal_subtitle_stream_changed ()
{
- signal_changed (FFmpegContentProperty::SUBTITLE_STREAM);
+ /* XXX: this is too late; really it should be before the change */
+ ChangeSignaller<Content> cc (this, FFmpegContentProperty::SUBTITLE_STREAM);
}
vector<shared_ptr<FFmpegAudioStream> >
void
FFmpegContent::take_settings_from (shared_ptr<const Content> c)
{
- Content::take_settings_from (c);
-
shared_ptr<const FFmpegContent> fc = dynamic_pointer_cast<const FFmpegContent> (c);
+ if (!fc) {
+ return;
+ }
+
+ Content::take_settings_from (c);
_filters = fc->_filters;
}