*/
-#include "reel_writer.h"
-#include "film.h"
+
+#include "audio_buffers.h"
+#include "compose.hpp"
+#include "config.h"
#include "cross.h"
-#include "job.h"
-#include "log.h"
#include "dcpomatic_log.h"
#include "digester.h"
+#include "film.h"
#include "font_data.h"
-#include "compose.hpp"
-#include "config.h"
-#include "audio_buffers.h"
#include "image.h"
+#include "image_png.h"
+#include "job.h"
+#include "log.h"
+#include "reel_writer.h"
#include <dcp/atmos_asset.h>
#include <dcp/atmos_asset_writer.h>
+#include <dcp/certificate_chain.h>
+#include <dcp/cpl.h>
+#include <dcp/dcp.h>
+#include <dcp/interop_subtitle_asset.h>
#include <dcp/mono_picture_asset.h>
-#include <dcp/stereo_picture_asset.h>
-#include <dcp/sound_asset.h>
-#include <dcp/sound_asset_writer.h>
+#include <dcp/raw_convert.h>
#include <dcp/reel.h>
#include <dcp/reel_atmos_asset.h>
+#include <dcp/reel_interop_closed_caption_asset.h>
+#include <dcp/reel_interop_subtitle_asset.h>
+#include <dcp/reel_markers_asset.h>
#include <dcp/reel_mono_picture_asset.h>
-#include <dcp/reel_stereo_picture_asset.h>
+#include <dcp/reel_smpte_closed_caption_asset.h>
+#include <dcp/reel_smpte_subtitle_asset.h>
#include <dcp/reel_sound_asset.h>
-#include <dcp/reel_subtitle_asset.h>
-#include <dcp/reel_closed_caption_asset.h>
-#include <dcp/reel_markers_asset.h>
-#include <dcp/dcp.h>
-#include <dcp/cpl.h>
-#include <dcp/certificate_chain.h>
-#include <dcp/interop_subtitle_asset.h>
+#include <dcp/reel_stereo_picture_asset.h>
#include <dcp/smpte_subtitle_asset.h>
-#include <dcp/raw_convert.h>
+#include <dcp/sound_asset.h>
+#include <dcp/sound_asset_writer.h>
+#include <dcp/stereo_picture_asset.h>
#include <dcp/subtitle_image.h>
#include "i18n.h"
-using std::list;
-using std::string;
+
using std::cout;
+using std::dynamic_pointer_cast;
using std::exception;
+using std::list;
+using std::make_shared;
using std::map;
using std::set;
-using std::vector;
using std::shared_ptr;
-using std::make_shared;
+using std::string;
+using std::vector;
+using std::weak_ptr;
using boost::optional;
-using std::dynamic_pointer_cast;
#if BOOST_VERSION >= 106100
using namespace boost::placeholders;
#endif
-using std::weak_ptr;
using dcp::ArrayData;
using dcp::Data;
using dcp::raw_convert;
using namespace dcpomatic;
+
int const ReelWriter::_info_size = 48;
+
static dcp::MXFMetadata
mxf_metadata ()
{
return meta;
}
+
/** @param job Related job, or 0.
* @param text_only true to enable a special mode where the writer will expect only subtitles and closed captions to be written
* (no picture nor sound) and not give errors in that case. This is used by the hints system to check the potential sizes of
}
if (film()->audio_channels()) {
+ auto lang = film()->audio_language();
_sound_asset = make_shared<dcp::SoundAsset> (
- dcp::Fraction(film()->video_frame_rate(), 1), film()->audio_frame_rate(), film()->audio_channels(), film()->audio_language(), standard
+ dcp::Fraction(film()->video_frame_rate(), 1),
+ film()->audio_frame_rate(),
+ film()->audio_channels(),
+ lang ? *lang : dcp::LanguageTag("en-US"),
+ standard
);
_sound_asset->set_metadata (mxf_metadata());
DCPOMATIC_ASSERT (film()->directory());
- vector<dcp::Channel> active;
- for (auto i: film()->mapped_audio_channels()) {
- active.push_back (static_cast<dcp::Channel>(i));
- }
-
/* Write the sound asset into the film directory so that we leave the creation
of the DCP directory until the last minute.
*/
_sound_asset_writer = _sound_asset->start_write (
film()->directory().get() / audio_asset_filename (_sound_asset, _reel_index, _reel_count, _content_summary),
- active,
film()->contains_atmos_content()
);
}
_default_font = dcp::ArrayData(default_font_file());
}
+
/** @param frame reel-relative frame */
void
ReelWriter::write_frame_info (Frame frame, Eyes eyes, dcp::FrameInfo info) const
checked_fwrite (info.hash.c_str(), info.hash.size(), handle->get(), handle->file());
}
+
dcp::FrameInfo
ReelWriter::read_frame_info (shared_ptr<InfoFileHandle> info, Frame frame, Eyes eyes) const
{
return frame_info;
}
+
long
ReelWriter::frame_info_position (Frame frame, Eyes eyes) const
{
DCPOMATIC_ASSERT (false);
}
+
Frame
ReelWriter::check_existing_picture_asset (boost::filesystem::path asset)
{
return first_nonexistant_frame;
}
+
void
ReelWriter::write (shared_ptr<const Data> encoded, Frame frame, Eyes eyes)
{
_picture_asset_writer->fake_write (size);
}
+
void
ReelWriter::repeat_write (Frame frame, Eyes eyes)
{
write_frame_info (frame, eyes, fin);
}
+
void
ReelWriter::finish (boost::filesystem::path output_dcp)
{
}
}
-template <class T>
-shared_ptr<T>
+
+/** Try to make a ReelAsset for a subtitles or closed captions in a given period in the DCP.
+ * A SubtitleAsset can be provided, or we will use one from @ref refs if not.
+ */
+template <class Interop, class SMPTE, class Result>
+shared_ptr<Result>
maybe_add_text (
shared_ptr<dcp::SubtitleAsset> asset,
int64_t picture_duration,
{
Frame const period_duration = period.duration().frames_round(film->video_frame_rate());
- shared_ptr<T> reel_asset;
+ shared_ptr<Result> reel_asset;
if (asset) {
/* Add the font to the subtitle content */
asset->add_font (j.id, j.data.get_value_or(default_font));
}
- if (dynamic_pointer_cast<dcp::InteropSubtitleAsset> (asset)) {
- auto directory = output_dcp / asset->id ();
+ if (auto interop = dynamic_pointer_cast<dcp::InteropSubtitleAsset>(asset)) {
+ auto directory = output_dcp / interop->id ();
boost::filesystem::create_directories (directory);
- asset->write (directory / ("sub_" + asset->id() + ".xml"));
- } else {
+ interop->write (directory / ("sub_" + interop->id() + ".xml"));
+ reel_asset = make_shared<Interop> (
+ interop,
+ dcp::Fraction(film->video_frame_rate(), 1),
+ picture_duration,
+ 0
+ );
+ } else if (auto smpte = dynamic_pointer_cast<dcp::SMPTESubtitleAsset>(asset)) {
/* All our assets should be the same length; use the picture asset length here
as a reference to set the subtitle one. We'll use the duration rather than
the intrinsic duration; we don't care if the picture asset has been trimmed, we're
just interested in its presentation length.
*/
- dynamic_pointer_cast<dcp::SMPTESubtitleAsset>(asset)->set_intrinsic_duration (picture_duration);
-
- asset->write (
+ smpte->set_intrinsic_duration(picture_duration);
+ smpte->write (
output_dcp / ("sub_" + asset->id() + ".mxf")
);
+ reel_asset = make_shared<SMPTE> (
+ smpte,
+ dcp::Fraction(film->video_frame_rate(), 1),
+ picture_duration,
+ 0
+ );
}
- reel_asset = make_shared<T> (
- asset,
- dcp::Fraction(film->video_frame_rate(), 1),
- picture_duration,
- 0
- );
} else {
/* We don't have a subtitle asset of our own; hopefully we have one to reference */
for (auto j: refs) {
- auto k = dynamic_pointer_cast<T> (j.asset);
+ auto k = dynamic_pointer_cast<Result> (j.asset);
if (k && j.period == period) {
reel_asset = k;
/* If we have a hash for this asset in the CPL, assume that it is correct */
set<DCPTextTrack> ensure_closed_captions
) const
{
- auto subtitle = maybe_add_text<dcp::ReelSubtitleAsset> (
+ auto subtitle = maybe_add_text<dcp::ReelInteropSubtitleAsset, dcp::ReelSMPTESubtitleAsset, dcp::ReelSubtitleAsset> (
_subtitle_asset, duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only
);
if (subtitle) {
/* We have a subtitle asset that we either made or are referencing */
- if (!film()->subtitle_languages().empty()) {
- subtitle->set_language (film()->subtitle_languages().front());
+ if (auto main_language = film()->subtitle_languages().first) {
+ subtitle->set_language (*main_language);
}
} else if (ensure_subtitles) {
/* We had no subtitle asset, but we've been asked to make sure there is one */
- subtitle = maybe_add_text<dcp::ReelSubtitleAsset>(
- empty_text_asset(TextType::OPEN_SUBTITLE, optional<DCPTextTrack>()),
+ subtitle = maybe_add_text<dcp::ReelInteropSubtitleAsset, dcp::ReelSMPTESubtitleAsset, dcp::ReelSubtitleAsset> (
+ empty_text_asset(TextType::OPEN_SUBTITLE, optional<DCPTextTrack>(), true),
duration,
reel,
refs,
}
for (auto const& i: _closed_caption_assets) {
- auto a = maybe_add_text<dcp::ReelClosedCaptionAsset> (
+ auto a = maybe_add_text<dcp::ReelInteropClosedCaptionAsset, dcp::ReelSMPTEClosedCaptionAsset, dcp::ReelClosedCaptionAsset> (
i.second, duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only
);
DCPOMATIC_ASSERT (a);
a->set_annotation_text (i.first.name);
- if (!i.first.language.empty()) {
- a->set_language (dcp::LanguageTag(i.first.language));
+ if (i.first.language) {
+ a->set_language (i.first.language.get());
}
ensure_closed_captions.erase (i.first);
/* Make empty tracks for anything we've been asked to ensure but that we haven't added */
for (auto i: ensure_closed_captions) {
- auto a = maybe_add_text<dcp::ReelClosedCaptionAsset> (
- empty_text_asset(TextType::CLOSED_CAPTION, i), duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only
+ auto a = maybe_add_text<dcp::ReelInteropClosedCaptionAsset, dcp::ReelSMPTEClosedCaptionAsset, dcp::ReelClosedCaptionAsset> (
+ empty_text_asset(TextType::CLOSED_CAPTION, i, true), duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only
);
DCPOMATIC_ASSERT (a);
a->set_annotation_text (i.name);
- if (!i.language.empty()) {
- a->set_language (dcp::LanguageTag(i.language));
+ if (i.language) {
+ a->set_language (i.language.get());
}
}
}
-
void
ReelWriter::create_reel_markers (shared_ptr<dcp::Reel> reel) const
{
if (!reel_markers.empty ()) {
auto ma = make_shared<dcp::ReelMarkersAsset>(dcp::Fraction(film()->video_frame_rate(), 1), reel->duration(), 0);
for (auto const& i: reel_markers) {
- int h, m, s, f;
DCPTime relative = i.second - _period.from;
- relative.split (film()->video_frame_rate(), h, m, s, f);
- ma->set (i.first, dcp::Time(h, m, s, f, film()->video_frame_rate()));
+ auto hmsf = relative.split (film()->video_frame_rate());
+ ma->set (i.first, dcp::Time(hmsf.h, hmsf.m, hmsf.s, hmsf.f, film()->video_frame_rate()));
}
reel->add (ma);
}
return reel;
}
+
void
-ReelWriter::calculate_digests (boost::function<void (float)> set_progress)
+ReelWriter::calculate_digests (std::function<void (float)> set_progress)
+try
{
if (_picture_asset) {
_picture_asset->hash (set_progress);
if (_atmos_asset) {
_atmos_asset->hash (set_progress);
}
+} catch (boost::thread_interrupted) {
+ /* set_progress contains an interruption_point, so any of these methods
+ * may throw thread_interrupted, at which point we just give up.
+ */
}
+
Frame
ReelWriter::start () const
{
shared_ptr<dcp::SubtitleAsset>
-ReelWriter::empty_text_asset (TextType type, optional<DCPTextTrack> track) const
+ReelWriter::empty_text_asset (TextType type, optional<DCPTextTrack> track, bool with_dummy) const
{
shared_ptr<dcp::SubtitleAsset> asset;
auto s = make_shared<dcp::InteropSubtitleAsset>();
s->set_movie_title (film()->name());
if (type == TextType::OPEN_SUBTITLE) {
- s->set_language (lang.empty() ? "Unknown" : lang.front().to_string());
- } else if (!track->language.empty()) {
- s->set_language (track->language);
+ s->set_language (lang.first ? lang.first->to_string() : "Unknown");
+ } else if (track->language) {
+ s->set_language (track->language->to_string());
}
s->set_reel_number (raw_convert<string> (_reel_index + 1));
asset = s;
auto s = make_shared<dcp::SMPTESubtitleAsset>();
s->set_content_title_text (film()->name());
s->set_metadata (mxf_metadata());
- if (type == TextType::OPEN_SUBTITLE && !lang.empty()) {
- s->set_language (lang.front());
- } else if (track && !track->language.empty()) {
- s->set_language (dcp::LanguageTag(track->language));
+ if (type == TextType::OPEN_SUBTITLE && lang.first) {
+ s->set_language (*lang.first);
+ } else if (track && track->language) {
+ s->set_language (dcp::LanguageTag(track->language->to_string()));
}
s->set_edit_rate (dcp::Fraction (film()->video_frame_rate(), 1));
s->set_reel_number (_reel_index + 1);
if (film()->encrypted()) {
s->set_key (film()->key());
}
- s->add (
- std::make_shared<dcp::SubtitleString>(
- optional<std::string>(),
- false,
- false,
- false,
- dcp::Colour(),
- 42,
- 1.0,
- dcp::Time(0, 0, 0, 0, 24),
- dcp::Time(0, 0, 1, 0, 24),
- 0.5,
- dcp::HAlign::CENTER,
- 0.5,
- dcp::VAlign::CENTER,
- dcp::Direction::LTR,
- "",
- dcp::Effect::NONE,
- dcp::Colour(),
- dcp::Time(),
- dcp::Time()
- )
- );
+ if (with_dummy) {
+ s->add (
+ std::make_shared<dcp::SubtitleString>(
+ optional<std::string>(),
+ false,
+ false,
+ false,
+ dcp::Colour(),
+ 42,
+ 1.0,
+ dcp::Time(0, 0, 0, 0, 24),
+ dcp::Time(0, 0, 1, 0, 24),
+ 0.5,
+ dcp::HAlign::CENTER,
+ 0.5,
+ dcp::VAlign::CENTER,
+ dcp::Direction::LTR,
+ " ",
+ dcp::Effect::NONE,
+ dcp::Colour(),
+ dcp::Time(),
+ dcp::Time(),
+ 0
+ )
+ );
+ }
asset = s;
}
}
if (!asset) {
- asset = empty_text_asset (type, track);
+ asset = empty_text_asset (type, track, false);
}
switch (type) {
DCPOMATIC_ASSERT (false);
}
- auto const vfr = film()->video_frame_rate();
+ /* timecode rate for subtitles we emit; we might as well stick to ms accuracy here, I think */
+ auto const tcr = 1000;
for (auto i: subs.string) {
- i.set_in (dcp::Time(period.from.seconds() - _period.from.seconds(), vfr));
- i.set_out (dcp::Time(period.to.seconds() - _period.from.seconds(), vfr));
+ i.set_in (dcp::Time(period.from.seconds() - _period.from.seconds(), tcr));
+ i.set_out (dcp::Time(period.to.seconds() - _period.from.seconds(), tcr));
asset->add (make_shared<dcp::SubtitleString>(i));
}
for (auto i: subs.bitmap) {
asset->add (
make_shared<dcp::SubtitleImage>(
- i.image->as_png(),
- dcp::Time(period.from.seconds() - _period.from.seconds(), vfr),
- dcp::Time(period.to.seconds() - _period.from.seconds(), vfr),
+ image_as_png(i.image),
+ dcp::Time(period.from.seconds() - _period.from.seconds(), tcr),
+ dcp::Time(period.to.seconds() - _period.from.seconds(), tcr),
i.rectangle.x, dcp::HAlign::LEFT, i.rectangle.y, dcp::VAlign::TOP,
dcp::Time(), dcp::Time()
)
}
}
+
bool
ReelWriter::existing_picture_frame_ok (FILE* asset_file, shared_ptr<InfoFileHandle> info_file, Frame frame) const
{