X-Git-Url: https://main.carlh.net/gitweb/?p=dcpomatic.git;a=blobdiff_plain;f=src%2Flib%2Freel_writer.cc;h=521ba55df891f226393fdb7a6d12e5b139ebe6de;hp=4888a1567d5150b145c235c90dd6024c6094410c;hb=da44da6f31f97d39ca91c35955e573e76371f2c2;hpb=8fedaaa75c4586a4cc7ffb393bd71d1fdb091dc8 diff --git a/src/lib/reel_writer.cc b/src/lib/reel_writer.cc index 4888a1567..521ba55df 100644 --- a/src/lib/reel_writer.cc +++ b/src/lib/reel_writer.cc @@ -32,24 +32,26 @@ #include "image.h" #include #include +#include +#include +#include +#include #include -#include -#include -#include +#include #include #include +#include +#include +#include #include -#include +#include +#include #include -#include -#include -#include -#include -#include -#include -#include +#include #include -#include +#include +#include +#include #include #include "i18n.h" @@ -166,8 +168,13 @@ ReelWriter::ReelWriter ( } if (film()->audio_channels()) { + auto lang = film()->audio_language(); _sound_asset = make_shared ( - dcp::Fraction(film()->video_frame_rate(), 1), film()->audio_frame_rate(), film()->audio_channels(), film()->audio_language(), standard + dcp::Fraction(film()->video_frame_rate(), 1), + film()->audio_frame_rate(), + film()->audio_channels(), + lang ? *lang : dcp::LanguageTag("en-US"), + standard ); _sound_asset->set_metadata (mxf_metadata()); @@ -178,17 +185,11 @@ ReelWriter::ReelWriter ( DCPOMATIC_ASSERT (film()->directory()); - vector active; - for (auto i: film()->mapped_audio_channels()) { - active.push_back (static_cast(i)); - } - /* Write the sound asset into the film directory so that we leave the creation of the DCP directory until the last minute. */ _sound_asset_writer = _sound_asset->start_write ( film()->directory().get() / audio_asset_filename (_sound_asset, _reel_index, _reel_count, _content_summary), - active, film()->contains_atmos_content() ); } @@ -439,8 +440,8 @@ ReelWriter::finish (boost::filesystem::path output_dcp) } } -template -shared_ptr +template +shared_ptr maybe_add_text ( shared_ptr asset, int64_t picture_duration, @@ -456,7 +457,7 @@ maybe_add_text ( { Frame const period_duration = period.duration().frames_round(film->video_frame_rate()); - shared_ptr reel_asset; + shared_ptr reel_asset; if (asset) { /* Add the font to the subtitle content */ @@ -464,33 +465,38 @@ maybe_add_text ( asset->add_font (j.id, j.data.get_value_or(default_font)); } - if (dynamic_pointer_cast (asset)) { - auto directory = output_dcp / asset->id (); + if (auto interop = dynamic_pointer_cast(asset)) { + auto directory = output_dcp / interop->id (); boost::filesystem::create_directories (directory); - asset->write (directory / ("sub_" + asset->id() + ".xml")); - } else { + interop->write (directory / ("sub_" + interop->id() + ".xml")); + reel_asset = make_shared ( + interop, + dcp::Fraction(film->video_frame_rate(), 1), + picture_duration, + 0 + ); + } else if (auto smpte = dynamic_pointer_cast(asset)) { /* All our assets should be the same length; use the picture asset length here as a reference to set the subtitle one. We'll use the duration rather than the intrinsic duration; we don't care if the picture asset has been trimmed, we're just interested in its presentation length. */ - dynamic_pointer_cast(asset)->set_intrinsic_duration (picture_duration); - - asset->write ( + smpte->set_intrinsic_duration(picture_duration); + smpte->write ( output_dcp / ("sub_" + asset->id() + ".mxf") ); + reel_asset = make_shared ( + smpte, + dcp::Fraction(film->video_frame_rate(), 1), + picture_duration, + 0 + ); } - reel_asset = make_shared ( - asset, - dcp::Fraction(film->video_frame_rate(), 1), - picture_duration, - 0 - ); } else { /* We don't have a subtitle asset of our own; hopefully we have one to reference */ for (auto j: refs) { - auto k = dynamic_pointer_cast (j.asset); + auto k = dynamic_pointer_cast (j.asset); if (k && j.period == period) { reel_asset = k; /* If we have a hash for this asset in the CPL, assume that it is correct */ @@ -623,18 +629,18 @@ ReelWriter::create_reel_text ( set ensure_closed_captions ) const { - auto subtitle = maybe_add_text ( + auto subtitle = maybe_add_text ( _subtitle_asset, duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only ); if (subtitle) { /* We have a subtitle asset that we either made or are referencing */ - if (!film()->subtitle_languages().empty()) { - subtitle->set_language (film()->subtitle_languages().front()); + if (auto main_language = film()->subtitle_languages().first) { + subtitle->set_language (*main_language); } } else if (ensure_subtitles) { /* We had no subtitle asset, but we've been asked to make sure there is one */ - subtitle = maybe_add_text( + subtitle = maybe_add_text ( empty_text_asset(TextType::OPEN_SUBTITLE, optional()), duration, reel, @@ -649,13 +655,13 @@ ReelWriter::create_reel_text ( } for (auto const& i: _closed_caption_assets) { - auto a = maybe_add_text ( + auto a = maybe_add_text ( i.second, duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only ); DCPOMATIC_ASSERT (a); a->set_annotation_text (i.first.name); - if (!i.first.language.empty()) { - a->set_language (dcp::LanguageTag(i.first.language)); + if (i.first.language) { + a->set_language (i.first.language.get()); } ensure_closed_captions.erase (i.first); @@ -663,13 +669,13 @@ ReelWriter::create_reel_text ( /* Make empty tracks for anything we've been asked to ensure but that we haven't added */ for (auto i: ensure_closed_captions) { - auto a = maybe_add_text ( + auto a = maybe_add_text ( empty_text_asset(TextType::CLOSED_CAPTION, i), duration, reel, refs, fonts, _default_font, film(), _period, output_dcp, _text_only ); DCPOMATIC_ASSERT (a); a->set_annotation_text (i.name); - if (!i.language.empty()) { - a->set_language (dcp::LanguageTag(i.language)); + if (i.language) { + a->set_language (i.language.get()); } } } @@ -691,10 +697,9 @@ ReelWriter::create_reel_markers (shared_ptr reel) const if (!reel_markers.empty ()) { auto ma = make_shared(dcp::Fraction(film()->video_frame_rate(), 1), reel->duration(), 0); for (auto const& i: reel_markers) { - int h, m, s, f; DCPTime relative = i.second - _period.from; - relative.split (film()->video_frame_rate(), h, m, s, f); - ma->set (i.first, dcp::Time(h, m, s, f, film()->video_frame_rate())); + auto hmsf = relative.split (film()->video_frame_rate()); + ma->set (i.first, dcp::Time(hmsf.h, hmsf.m, hmsf.s, hmsf.f, film()->video_frame_rate())); } reel->add (ma); } @@ -739,7 +744,8 @@ ReelWriter::create_reel ( } void -ReelWriter::calculate_digests (boost::function set_progress) +ReelWriter::calculate_digests (std::function set_progress) +try { if (_picture_asset) { _picture_asset->hash (set_progress); @@ -752,6 +758,10 @@ ReelWriter::calculate_digests (boost::function set_progress) if (_atmos_asset) { _atmos_asset->hash (set_progress); } +} catch (boost::thread_interrupted) { + /* set_progress contains an interruption_point, so any of these methods + * may throw thread_interrupted, at which point we just give up. + */ } Frame @@ -783,20 +793,20 @@ ReelWriter::empty_text_asset (TextType type, optional track) const auto s = make_shared(); s->set_movie_title (film()->name()); if (type == TextType::OPEN_SUBTITLE) { - s->set_language (lang.empty() ? "Unknown" : lang.front().to_string()); - } else if (!track->language.empty()) { - s->set_language (track->language); + s->set_language (lang.first ? lang.first->to_string() : "Unknown"); + } else if (track->language) { + s->set_language (track->language->to_string()); } s->set_reel_number (raw_convert (_reel_index + 1)); asset = s; } else { - shared_ptr s (new dcp::SMPTESubtitleAsset ()); + auto s = make_shared(); s->set_content_title_text (film()->name()); s->set_metadata (mxf_metadata()); - if (type == TextType::OPEN_SUBTITLE && !lang.empty()) { - s->set_language (lang.front()); - } else if (track && !track->language.empty()) { - s->set_language (dcp::LanguageTag(track->language)); + if (type == TextType::OPEN_SUBTITLE && lang.first) { + s->set_language (*lang.first); + } else if (track && track->language) { + s->set_language (dcp::LanguageTag(track->language->to_string())); } s->set_edit_rate (dcp::Fraction (film()->video_frame_rate(), 1)); s->set_reel_number (_reel_index + 1); @@ -805,6 +815,29 @@ ReelWriter::empty_text_asset (TextType type, optional track) const if (film()->encrypted()) { s->set_key (film()->key()); } + s->add ( + std::make_shared( + optional(), + false, + false, + false, + dcp::Colour(), + 42, + 1.0, + dcp::Time(0, 0, 0, 0, 24), + dcp::Time(0, 0, 1, 0, 24), + 0.5, + dcp::HAlign::CENTER, + 0.5, + dcp::VAlign::CENTER, + dcp::Direction::LTR, + "", + dcp::Effect::NONE, + dcp::Colour(), + dcp::Time(), + dcp::Time() + ) + ); asset = s; } @@ -845,19 +878,21 @@ ReelWriter::write (PlayerText subs, TextType type, optional track, DCPOMATIC_ASSERT (false); } + /* timecode rate for subtitles we emit; we might as well stick to ms accuracy here, I think */ + auto const tcr = 1000; + for (auto i: subs.string) { - /* XXX: couldn't / shouldn't we use period here rather than getting time from the subtitle? */ - i.set_in (i.in() - dcp::Time (_period.from.seconds(), i.in().tcr)); - i.set_out (i.out() - dcp::Time (_period.from.seconds(), i.out().tcr)); - asset->add (shared_ptr(new dcp::SubtitleString(i))); + i.set_in (dcp::Time(period.from.seconds() - _period.from.seconds(), tcr)); + i.set_out (dcp::Time(period.to.seconds() - _period.from.seconds(), tcr)); + asset->add (make_shared(i)); } for (auto i: subs.bitmap) { asset->add ( make_shared( i.image->as_png(), - dcp::Time(period.from.seconds() - _period.from.seconds(), film()->video_frame_rate()), - dcp::Time(period.to.seconds() - _period.from.seconds(), film()->video_frame_rate()), + dcp::Time(period.from.seconds() - _period.from.seconds(), tcr), + dcp::Time(period.to.seconds() - _period.from.seconds(), tcr), i.rectangle.x, dcp::HAlign::LEFT, i.rectangle.y, dcp::VAlign::TOP, dcp::Time(), dcp::Time() )