boost::filesystem::path result;
for (boost::filesystem::path::iterator i = p.begin(); i != p.end(); ++i) {
if (*i == "..") {
- if (boost::filesystem::is_symlink (result) || result.filename() == "..") {
+ boost::system::error_code ec;
+ if (boost::filesystem::is_symlink(result, ec) || result.filename() == "..") {
result /= *i;
} else {
result = result.parent_path ();
_isdcf_date = boost::gregorian::from_undelimited_string (f.string_child ("DCIDate"));
}
+
{
optional<string> c = f.optional_string_child ("DCPContentType");
if (c) {
_user_explicit_video_frame_rate = f.optional_bool_child("UserExplicitVideoFrameRate").get_value_or(false);
list<string> notes;
- /* This method is the only one that can return notes (so far) */
_playlist->set_from_xml (shared_from_this(), f.node_child ("Playlist"), _state_version, notes);
/* Write backtraces to this film's directory, until another film is loaded */
if (!dm.audio_language.empty ()) {
d += "_" + dm.audio_language;
- if (!dm.subtitle_language.empty()) {
- /* I'm not clear on the precise details of the convention for CCAP labelling;
- for now I'm just appending -CCAP if we have any closed captions.
- */
+ /* I'm not clear on the precise details of the convention for CCAP labelling;
+ for now I'm just appending -CCAP if we have any closed captions.
+ */
- bool burnt_in = true;
- bool ccap = false;
- BOOST_FOREACH (shared_ptr<Content> i, content()) {
- BOOST_FOREACH (shared_ptr<TextContent> j, i->text) {
- if (j->type() == TEXT_OPEN_SUBTITLE && j->use() && !j->burn()) {
+ optional<string> subtitle_language;
+ bool burnt_in = true;
+ bool ccap = false;
+ BOOST_FOREACH (shared_ptr<Content> i, content()) {
+ BOOST_FOREACH (shared_ptr<TextContent> j, i->text) {
+ if (j->type() == TEXT_OPEN_SUBTITLE && j->use()) {
+ subtitle_language = j->language ();
+ if (!j->burn()) {
burnt_in = false;
- } else if (j->type() == TEXT_CLOSED_CAPTION) {
- ccap = true;
}
+ } else if (j->type() == TEXT_CLOSED_CAPTION && j->use()) {
+ ccap = true;
}
}
+ }
- string language = dm.subtitle_language;
- if (burnt_in && language != "XX") {
- transform (language.begin(), language.end(), language.begin(), ::tolower);
+ if (dm.subtitle_language) {
+ /* Subtitle language is overridden in ISDCF metadata, primarily to handle
+ content with pre-burnt subtitles.
+ */
+ d += "-" + *dm.subtitle_language;
+ if (ccap) {
+ d += "-CCAP";
+ }
+ } else if (subtitle_language) {
+ /* Language is worked out from the content */
+ if (burnt_in && *subtitle_language != "XX") {
+ transform (subtitle_language->begin(), subtitle_language->end(), subtitle_language->begin(), ::tolower);
} else {
- transform (language.begin(), language.end(), language.begin(), ::toupper);
+ transform (subtitle_language->begin(), subtitle_language->end(), subtitle_language->begin(), ::toupper);
}
- d += "-" + language;
+ d += "-" + *subtitle_language;
if (ccap) {
d += "-CCAP";
}
} else {
+ /* No subtitles */
d += "-XX";
}
}
/* Count mapped audio channels */
- pair<int, int> ch = audio_channel_types (mapped_audio_channels(), audio_channels());
+ list<int> mapped = mapped_audio_channels ();
+
+ pair<int, int> ch = audio_channel_types (mapped, audio_channels());
if (!ch.first && !ch.second) {
d += "_MOS";
} else if (ch.first) {
d += String::compose("_%1%2", ch.first, ch.second);
}
- /* XXX: HI/VI */
+ if (audio_channels() > static_cast<int>(dcp::HI) && find(mapped.begin(), mapped.end(), dcp::HI) != mapped.end()) {
+ d += "-HI";
+ }
+ if (audio_channels() > static_cast<int>(dcp::VI) && find(mapped.begin(), mapped.end(), dcp::VI) != mapped.end()) {
+ d += "-VI";
+ }
d += "_" + resolution_to_string (_resolution);
break;
case REELTYPE_BY_VIDEO_CONTENT:
{
- optional<DCPTime> last_split;
- shared_ptr<Content> last_video;
- BOOST_FOREACH (shared_ptr<Content> c, content ()) {
+ /* Collect all reel boundaries */
+ list<DCPTime> split_points;
+ split_points.push_back (DCPTime());
+ split_points.push_back (len);
+ BOOST_FOREACH (shared_ptr<Content> c, content()) {
if (c->video) {
BOOST_FOREACH (DCPTime t, c->reel_split_points(shared_from_this())) {
- if (last_split) {
- p.push_back (DCPTimePeriod (last_split.get(), t));
- }
- last_split = t;
+ split_points.push_back (t);
}
- last_video = c;
+ split_points.push_back (c->end(shared_from_this()));
}
}
- DCPTime video_end = last_video ? last_video->end(shared_from_this()) : DCPTime(0);
- if (last_split) {
- /* Definitely go from the last split to the end of the video content */
- p.push_back (DCPTimePeriod (last_split.get(), video_end));
- }
+ split_points.sort ();
+ split_points.unique ();
- if (video_end < len) {
- /* And maybe go after that as well if there is any non-video hanging over the end */
- p.push_back (DCPTimePeriod (video_end, len));
+ /* Make them into periods */
+ optional<DCPTime> last;
+ BOOST_FOREACH (DCPTime t, split_points) {
+ if (last) {
+ p.push_back (DCPTimePeriod(*last, t));
+ }
+ last = t;
}
break;
}
return tt;
}
+
+shared_ptr<InfoFileHandle>
+Film::info_file_handle (DCPTimePeriod period, bool read) const
+{
+ return shared_ptr<InfoFileHandle> (new InfoFileHandle(_info_file_mutex, info_file(period), read));
+}
+
+InfoFileHandle::InfoFileHandle (boost::mutex& mutex, boost::filesystem::path file, bool read)
+ : _lock (mutex)
+ , _file (file)
+{
+ if (read) {
+ _handle = fopen_boost (file, "rb");
+ if (!_handle) {
+ throw OpenFileError (file, errno, OpenFileError::READ);
+ }
+ } else {
+ bool const exists = boost::filesystem::exists (file);
+ if (exists) {
+ _handle = fopen_boost (file, "r+b");
+ } else {
+ _handle = fopen_boost (file, "wb");
+ }
+
+ if (!_handle) {
+ throw OpenFileError (file, errno, exists ? OpenFileError::READ_WRITE : OpenFileError::WRITE);
+ }
+ }
+}
+
+InfoFileHandle::~InfoFileHandle ()
+{
+ fclose (_handle);
+}