#include <dcp/colour_matrix.h>
#include <libxml++/libxml++.h>
#include <iomanip>
+#include <iostream>
#include "i18n.h"
-#define LOG_GENERAL(...) film->log()->log (String::compose (__VA_ARGS__), Log::TYPE_GENERAL);
+#define LOG_GENERAL(...) film()->log()->log (String::compose (__VA_ARGS__), LogEntry::TYPE_GENERAL);
int const VideoContentProperty::VIDEO_SIZE = 0;
int const VideoContentProperty::VIDEO_FRAME_RATE = 1;
using std::vector;
using std::min;
using std::max;
-using std::stringstream;
using std::fixed;
using std::setprecision;
using std::list;
VideoContent::VideoContent (shared_ptr<const Film> film)
: Content (film)
, _video_length (0)
- , _video_frame_rate (0)
, _video_frame_type (VIDEO_FRAME_TYPE_2D)
, _scale (VideoContentScale (Ratio::from_id ("178")))
+ , _yuv (true)
, _fade_in (0)
, _fade_out (0)
{
VideoContent::VideoContent (shared_ptr<const Film> film, DCPTime s, Frame len)
: Content (film, s)
, _video_length (len)
- , _video_frame_rate (0)
, _video_frame_type (VIDEO_FRAME_TYPE_2D)
, _scale (VideoContentScale (Ratio::from_id ("178")))
+ , _yuv (true)
, _fade_in (0)
, _fade_out (0)
{
VideoContent::VideoContent (shared_ptr<const Film> film, boost::filesystem::path p)
: Content (film, p)
, _video_length (0)
- , _video_frame_rate (0)
, _video_frame_type (VIDEO_FRAME_TYPE_2D)
, _scale (VideoContentScale (Ratio::from_id ("178")))
+ , _yuv (true)
, _fade_in (0)
, _fade_out (0)
{
{
_video_size.width = node->number_child<int> ("VideoWidth");
_video_size.height = node->number_child<int> ("VideoHeight");
- _video_frame_rate = node->number_child<double> ("VideoFrameRate");
+ _video_frame_rate = node->optional_number_child<double> ("VideoFrameRate");
_video_length = node->number_child<Frame> ("VideoLength");
_video_frame_type = static_cast<VideoFrameType> (node->number_child<int> ("VideoFrameType"));
_sample_aspect_ratio = node->optional_number_child<double> ("SampleAspectRatio");
if (node->optional_node_child ("ColourConversion")) {
_colour_conversion = ColourConversion (node->node_child ("ColourConversion"), version);
}
+
+ _yuv = node->optional_bool_child("YUV").get_value_or (true);
+
if (version >= 32) {
_fade_in = node->number_child<Frame> ("FadeIn");
_fade_out = node->number_child<Frame> ("FadeOut");
VideoContent::VideoContent (shared_ptr<const Film> film, vector<shared_ptr<Content> > c)
: Content (film, c)
, _video_length (0)
+ , _yuv (false)
{
shared_ptr<VideoContent> ref = dynamic_pointer_cast<VideoContent> (c[0]);
DCPOMATIC_ASSERT (ref);
}
_video_length += vc->video_length ();
+
+ if (vc->yuv ()) {
+ _yuv = true;
+ }
}
_video_size = ref->video_size ();
node->add_child("VideoLength")->add_child_text (raw_convert<string> (_video_length));
node->add_child("VideoWidth")->add_child_text (raw_convert<string> (_video_size.width));
node->add_child("VideoHeight")->add_child_text (raw_convert<string> (_video_size.height));
- node->add_child("VideoFrameRate")->add_child_text (raw_convert<string> (_video_frame_rate));
+ if (_video_frame_rate) {
+ node->add_child("VideoFrameRate")->add_child_text (raw_convert<string> (_video_frame_rate.get()));
+ }
node->add_child("VideoFrameType")->add_child_text (raw_convert<string> (static_cast<int> (_video_frame_type)));
if (_sample_aspect_ratio) {
node->add_child("SampleAspectRatio")->add_child_text (raw_convert<string> (_sample_aspect_ratio.get ()));
if (_colour_conversion) {
_colour_conversion.get().as_xml (node->add_child("ColourConversion"));
}
+ node->add_child("YUV")->add_child_text (_yuv ? "1" : "0");
node->add_child("FadeIn")->add_child_text (raw_convert<string> (_fade_in));
node->add_child("FadeOut")->add_child_text (raw_convert<string> (_fade_out));
}
optional<double> const vfr = d->video_frame_rate ();
Frame vl = d->video_length ();
optional<double> const ar = d->sample_aspect_ratio ();
+ bool const yuv = d->yuv ();
{
boost::mutex::scoped_lock lm (_mutex);
_video_size = vs;
- /* Default video frame rate to 24fps if the examiner doesn't know */
- _video_frame_rate = vfr.get_value_or (24);
+ _video_frame_rate = vfr;
_video_length = vl;
_sample_aspect_ratio = ar;
+ _yuv = yuv;
/* Guess correct scale from size and sample aspect ratio */
_scale = VideoContentScale (
);
}
- shared_ptr<const Film> film = _film.lock ();
- DCPOMATIC_ASSERT (film);
LOG_GENERAL ("Video length obtained from header as %1 frames", _video_length);
set_default_colour_conversion ();
}
void
-VideoContent::unset_colour_conversion (bool signal)
+VideoContent::unset_colour_conversion ()
{
{
boost::mutex::scoped_lock lm (_mutex);
_colour_conversion = boost::optional<ColourConversion> ();
}
- if (signal) {
- signal_changed (VideoContentProperty::COLOUR_CONVERSION);
- }
+ signal_changed (VideoContentProperty::COLOUR_CONVERSION);
}
void
void
VideoContent::scale_and_crop_to_fit_width ()
{
- shared_ptr<const Film> film = _film.lock ();
- DCPOMATIC_ASSERT (film);
+ set_scale (VideoContentScale (film()->container ()));
- set_scale (VideoContentScale (film->container ()));
-
- int const crop = max (0, int (video_size().height - double (film->frame_size().height) * video_size().width / film->frame_size().width));
+ int const crop = max (0, int (video_size().height - double (film()->frame_size().height) * video_size().width / film()->frame_size().width));
set_top_crop (crop / 2);
set_bottom_crop (crop / 2);
}
void
VideoContent::scale_and_crop_to_fit_height ()
{
- shared_ptr<const Film> film = _film.lock ();
- DCPOMATIC_ASSERT (film);
-
- set_scale (VideoContentScale (film->container ()));
+ set_scale (VideoContentScale (film()->container ()));
- int const crop = max (0, int (video_size().width - double (film->frame_size().width) * video_size().height / film->frame_size().height));
+ int const crop = max (0, int (video_size().width - double (film()->frame_size().width) * video_size().height / film()->frame_size().height));
set_left_crop (crop / 2);
set_right_crop (crop / 2);
}
VideoContent::processing_description () const
{
/* stringstream is OK here as this string is just for presentation to the user */
- stringstream d;
+ SafeStringStream d;
if (video_size().width && video_size().height) {
d << String::compose (
d << " (" << fixed << setprecision(2) << cropped.ratio () << ":1)\n";
}
- shared_ptr<const Film> film = _film.lock ();
- DCPOMATIC_ASSERT (film);
-
- dcp::Size const container_size = film->frame_size ();
+ dcp::Size const container_size = film()->frame_size ();
dcp::Size const scaled = scale().size (dynamic_pointer_cast<const VideoContent> (shared_from_this ()), container_size, container_size);
if (scaled != video_size_after_crop ()) {
if (scaled != container_size) {
d << String::compose (
_("Padded with black to fit container %1 (%2x%3)"),
- film->container()->nickname (),
+ film()->container()->nickname (),
container_size.width, container_size.height
);
d << _("Content frame rate");
d << " " << fixed << setprecision(4) << video_frame_rate() << "\n";
- FrameRateChange frc (video_frame_rate(), film->video_frame_rate ());
+ FrameRateChange frc (video_frame_rate(), film()->video_frame_rate ());
d << frc.description () << "\n";
return d.str ();
p.push_back (make_pair (_("Video size"), raw_convert<string> (video_size().width) + "x" + raw_convert<string> (video_size().height)));
p.push_back (make_pair (_("Video frame rate"), raw_convert<string> (video_frame_rate()) + " " + _("frames per second")));
}
+
+list<DCPTime>
+VideoContent::reel_split_points () const
+{
+ list<DCPTime> t;
+ /* XXX: this is questionable; perhaps the position should be forced to be on a frame boundary */
+ t.push_back (position().round_up (film()->video_frame_rate()));
+ return t;
+}
+
+double
+VideoContent::video_frame_rate () const
+{
+ boost::mutex::scoped_lock lm (_mutex);
+ return _video_frame_rate.get_value_or (film()->video_frame_rate ());
+}