{
shared_ptr<Player> player (new Player (_film));
player->set_ignore_video ();
-
+
int64_t const len = _film->length().frames (_film->audio_frame_rate());
_samples_per_point = max (int64_t (1), len / _num_points);
_analysis->set_peak (_overall_peak, DCPTime::from_frames (_overall_peak_frame, _film->audio_frame_rate ()));
_analysis->write (_film->audio_analysis_path ());
-
+
set_progress (1);
set_state (FINISHED_OK);
}
if (this == &other) {
return *this;
}
-
+
for (int i = 0; i < COUNT; ++i) {
_data[i] = other._data[i];
}
parent->add_child ("Peak")->add_child_text (raw_convert<string> (_data[PEAK]));
parent->add_child ("RMS")->add_child_text (raw_convert<string> (_data[RMS]));
}
-
+
AudioAnalysis::AudioAnalysis (int channels)
{
_data.resize (channels);
AudioPoint& operator= (AudioPoint const &);
void as_xml (xmlpp::Element *) const;
-
+
float& operator[] (int t) {
return _data[t];
}
_peak = peak;
_peak_time = time;
}
-
+
AudioPoint get_point (int c, int p) const;
int points (int c) const;
int channels () const;
if (this == &other) {
return *this;
}
-
+
deallocate ();
allocate (other._channels, other._frames);
copy_from (&other, other._frames, 0, 0);
_channels = channels;
_frames = frames;
_allocated_frames = frames;
-
+
_data = static_cast<float**> (malloc (_channels * sizeof (float *)));
if (!_data) {
throw bad_alloc ();
}
-
+
for (int i = 0; i < _channels; ++i) {
_data[i] = static_cast<float*> (malloc (frames * sizeof (float)));
if (!_data[i]) {
_data[c][i] = 0;
}
}
-
+
_frames = f;
}
AudioBuffers::make_silent (int c)
{
DCPOMATIC_ASSERT (c >= 0 && c < _channels);
-
+
for (int i = 0; i < _frames; ++i) {
_data[c][i] = 0;
}
/* Prevent the asserts from firing if there is nothing to do */
return;
}
-
+
DCPOMATIC_ASSERT (from->channels() == channels());
DCPOMATIC_ASSERT (from);
* @param to Offset to move to.
* @param frames Number of frames to move.
*/
-
+
void
AudioBuffers::move (int from, int to, int frames)
{
if (frames == 0) {
return;
}
-
+
DCPOMATIC_ASSERT (from >= 0);
DCPOMATIC_ASSERT (from < _frames);
DCPOMATIC_ASSERT (to >= 0);
DCPOMATIC_ASSERT (frames <= _frames);
DCPOMATIC_ASSERT ((from + frames) <= _frames);
DCPOMATIC_ASSERT ((to + frames) <= _allocated_frames);
-
+
for (int i = 0; i < _channels; ++i) {
memmove (_data[i] + to, _data[i] + from, frames * sizeof(float));
}
AudioBuffers::apply_gain (float dB)
{
float const linear = pow (10, dB / 20);
-
+
for (int i = 0; i < _channels; ++i) {
for (int j = 0; j < _frames; ++j) {
_data[i][j] *= linear;
float** data () const {
return _data;
}
-
+
float* data (int) const;
int channels () const {
private:
void allocate (int, int);
void deallocate ();
-
+
/** Number of channels */
int _channels;
/** Number of frames (where a frame is one sample across all channels) */
{
shared_ptr<AudioContent> ref = dynamic_pointer_cast<AudioContent> (c[0]);
DCPOMATIC_ASSERT (ref);
-
+
for (size_t i = 0; i < c.size(); ++i) {
shared_ptr<AudioContent> ac = dynamic_pointer_cast<AudioContent> (c[i]);
boost::mutex::scoped_lock lm (_mutex);
_audio_gain = g;
}
-
+
signal_changed (AudioContentProperty::AUDIO_GAIN);
}
boost::mutex::scoped_lock lm (_mutex);
_audio_delay = d;
}
-
+
signal_changed (AudioContentProperty::AUDIO_DELAY);
}
}
i->set_mapping (stream_mapping);
}
-
+
signal_changed (AudioContentProperty::AUDIO_STREAMS);
}
BOOST_FOREACH (AudioStreamPtr i, audio_streams ()) {
channels += i->channels ();
}
-
+
AudioMapping merged (channels, MAX_DCP_AUDIO_CHANNELS);
-
+
int c = 0;
int s = 0;
BOOST_FOREACH (AudioStreamPtr i, audio_streams ()) {
{
shared_ptr<const Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
-
+
/* Resample to a DCI-approved sample rate */
double t = has_rate_above_48k() ? 96000 : 48000;
void set_audio_gain (double);
void set_audio_delay (int);
-
+
double audio_gain () const {
boost::mutex::scoped_lock lm (_mutex);
return _audio_gain;
}
std::string processing_description () const;
-
+
private:
/** Gain to apply to audio in dB */
double _audio_gain;
{
public:
AudioDecoder (boost::shared_ptr<const AudioContent>);
-
+
boost::shared_ptr<const AudioContent> audio_content () const {
return _audio_content;
}
void audio (AudioStreamPtr stream, boost::shared_ptr<const AudioBuffers>, ContentTime);
void flush ();
void seek (ContentTime t, bool accurate);
-
-private:
+
+private:
boost::shared_ptr<const AudioContent> _audio_content;
/** An AudioDecoderStream object to manage each stream in _audio_content */
std::map<AudioStreamPtr, boost::shared_ptr<AudioDecoderStream> > _streams;
shared_ptr<ContentAudio> dec;
Frame const end = frame + length - 1;
-
+
if (frame < _decoded.frame || end > (_decoded.frame + length * 4)) {
/* Either we have no decoded data, or what we do have is a long way from what we want: seek */
seek (ContentTime::from_frames (frame, _content->resampled_audio_frame_rate()), accurate);
(to be set up shortly)
*/
Frame decoded_offset = 0;
-
+
/* Now enough pass() calls will either:
* (a) give us what we want, or
* (b) hit the end of the decoder.
!_decoder->pass ()
)
{}
-
+
decoded_offset = frame - _decoded.frame;
} else {
while (
!_decoder->pass ()
)
{}
-
+
/* Use decoded_offset of 0, as we don't really care what frames we return */
}
*/
return;
}
-
+
/* Resize _decoded to fit the new data */
int new_size = 0;
if (_decoded.audio->frames() == 0) {
/* Otherwise we need to extend _decoded to include the new stuff */
new_size = _position.get() + data->frames() - _decoded.frame;
}
-
+
_decoded.audio->ensure_size (new_size);
_decoded.audio->set_frames (new_size);
{
public:
AudioDecoderStream (boost::shared_ptr<const AudioContent>, AudioStreamPtr, AudioDecoder* decoder);
-
+
ContentAudio get (Frame time, Frame length, bool accurate);
void audio (boost::shared_ptr<const AudioBuffers>, ContentTime);
void flush ();
void seek (ContentTime time, bool accurate);
-
+
private:
void reset_decoded ();
AudioFilter::sinc_blackman (float cutoff, bool invert) const
{
vector<float> ir (_M + 1);
-
+
/* Impulse response */
-
+
for (int i = 0; i <= _M; ++i) {
if (i == (_M / 2)) {
ir[i] = 2 * M_PI * cutoff;
ir[i] *= (0.42 - 0.5 * cos (2 * M_PI * i / _M) + 0.08 * cos (4 * M_PI * i / _M));
}
}
-
+
/* Normalise */
-
+
float sum = 0;
for (int i = 0; i <= _M; ++i) {
sum += ir[i];
}
-
+
for (int i = 0; i <= _M; ++i) {
ir[i] /= sum;
}
-
+
/* Frequency inversion (swapping low-pass for high-pass, or whatever) */
-
+
if (invert) {
for (int i = 0; i <= _M; ++i) {
ir[i] = -ir[i];
}
ir[_M / 2] += 1;
}
-
+
return ir;
}
AudioFilter::run (shared_ptr<AudioBuffers> in)
{
shared_ptr<AudioBuffers> out (new AudioBuffers (in->channels(), in->frames()));
-
+
if (!_tail) {
_tail.reset (new AudioBuffers (in->channels(), _M + 1));
_tail->make_silent ();
}
-
+
for (int i = 0; i < in->channels(); ++i) {
for (int j = 0; j < in->frames(); ++j) {
float s = 0;
s += in->data(i)[j - k] * _ir[k];
}
}
-
+
out->data(i)[j] = s;
}
}
-
+
int const amount = min (in->frames(), _tail->frames());
if (amount < _tail->frames ()) {
_tail->move (amount, 0, _tail->frames() - amount);
}
_tail->copy_from (in.get(), amount, in->frames() - amount, _tail->frames () - amount);
-
+
return out;
}
{
vector<float> lpf = sinc_blackman (lower, false);
vector<float> hpf = sinc_blackman (higher, true);
-
+
_ir.resize (_M + 1);
for (int i = 0; i <= _M; ++i) {
_ir[i] = lpf[i] + hpf[i];
}
-
+
/* We now have a band-stop, so invert for band-pass */
for (int i = 0; i <= _M; ++i) {
_ir[i] = -_ir[i];
}
-
+
_ir[_M / 2] += 1;
}
{
_input_channels = input_channels;
_output_channels = output_channels;
-
+
_gain.resize (_input_channels);
for (int i = 0; i < _input_channels; ++i) {
_gain[i].resize (_output_channels);
static float const minus_96_db = 0.000015849;
list<int> mapped;
-
+
for (vector<vector<float> >::const_iterator i = _gain.begin(); i != _gain.end(); ++i) {
for (size_t j = 0; j < i->size(); ++j) {
if (abs ((*i)[j]) > minus_96_db) {
mapped.sort ();
mapped.unique ();
-
+
return mapped;
}
AudioMapping (cxml::ConstNodePtr, int);
/* Default copy constructor is fine */
-
+
void as_xml (xmlpp::Node *) const;
void make_zero ();
int output_channels () const {
return _output_channels;
}
-
+
std::string digest () const;
std::list<int> mapped_output_channels () const;
void unmap_all ();
-
+
private:
void setup (int input_channels, int output_channels);
public:
AudioStream (int frame_rate, int channels);
AudioStream (int frame_rate, AudioMapping mapping);
-
+
void set_mapping (AudioMapping mapping);
void set_frame_rate (int frame_rate);
private:
friend struct audio_sampling_rate_test;
-
+
int _frame_rate;
AudioMapping _mapping;
};
: min (n)
, max (n)
{}
-
+
ChannelCount (int min_, int max_)
: min (min_)
, max (max_)
Screen (cxml::ConstNodePtr);
void as_xml (xmlpp::Element *) const;
-
+
boost::shared_ptr<Cinema> cinema;
std::string name;
boost::optional<dcp::Certificate> certificate;
void add_screen (boost::shared_ptr<Screen>);
void remove_screen (boost::shared_ptr<Screen>);
-
+
std::string name;
std::string email;
std::list<boost::shared_ptr<Screen> > screens () const {
return _screens;
}
-private:
+private:
std::list<boost::shared_ptr<Screen> > _screens;
};
std::string name () const {
return _name;
}
-
+
static std::vector<CinemaSoundProcessor const *> all ();
static void setup_cinema_sound_processors ();
static CinemaSoundProcessor const * from_id (std::string id);
ColourConversion::ColourConversion ()
: dcp::ColourConversion (dcp::ColourConversion::srgb_to_xyz ())
{
-
+
}
ColourConversion::ColourConversion (dcp::ColourConversion conversion_)
: dcp::ColourConversion (conversion_)
{
-
+
}
ColourConversion::ColourConversion (cxml::NodePtr node, int version)
} else {
/* Version 1.x */
-
+
if (node->bool_child ("InputGammaLinearised")) {
_in.reset (new dcp::ModifiedGammaTransferFunction (node->number_child<float> ("InputGamma"), 0.04045, 0.055, 12.92));
} else {
}
_yuv_to_rgb = static_cast<dcp::YUVToRGB> (node->optional_number_child<int>("YUVToRGB").get_value_or (dcp::YUV_TO_RGB_REC601));
-
+
list<cxml::NodePtr> m = node->node_children ("Matrix");
if (!m.empty ()) {
/* Read in old <Matrix> nodes and convert them to chromaticities */
node->number_child<double> ("AdjustedWhiteX"), node->number_child<double> ("AdjustedWhiteY")
);
}
- }
-
+ }
+
_out.reset (new dcp::GammaTransferFunction (node->number_child<double> ("OutputGamma")));
}
}
digester.add (dynamic_pointer_cast<const dcp::GammaTransferFunction> (_out)->gamma ());
-
+
return digester.get ();
}
std::string name;
/** an internal short (non-internationalised) name for this preset */
std::string id;
-
+
static std::vector<PresetColourConversion> all () {
return _presets;
}
static PresetColourConversion from_id (std::string id);
static void setup_colour_conversion_presets ();
-
+
private:
static std::vector<PresetColourConversion> _presets;
};
_check_for_test_updates = false;
_maximum_j2k_bandwidth = 250000000;
_log_types = Log::TYPE_GENERAL | Log::TYPE_WARNING | Log::TYPE_ERROR | Log::TYPE_DEBUG;
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
_win32_console = false;
-#endif
+#endif
_allowed_dcp_frame_rates.clear ();
_allowed_dcp_frame_rates.push_back (24);
_servers.push_back ((*i)->content ());
}
}
-
+
_tms_ip = f.string_child ("TMSIP");
_tms_path = f.string_child ("TMSPath");
_tms_user = f.string_child ("TMSUser");
} else if (f.optional_string_child ("DCPIssuer")) {
_dcp_issuer = f.string_child ("DCPIssuer");
}
-
+
if (version && version.get() >= 2) {
_default_isdcf_metadata = ISDCFMetadata (f.node_child ("ISDCFMetadata"));
} else {
_default_isdcf_metadata = ISDCFMetadata (f.node_child ("DCIMetadata"));
}
-
+
_default_still_length = f.optional_number_child<int>("DefaultStillLength").get_value_or (10);
_default_j2k_bandwidth = f.optional_number_child<int>("DefaultJ2KBandwidth").get_value_or (200000000);
_default_audio_delay = f.optional_number_child<int>("DefaultAudioDelay").get_value_or (0);
_allow_any_dcp_frame_rate = f.optional_bool_child ("AllowAnyDCPFrameRate");
_log_types = f.optional_number_child<int> ("LogTypes").get_value_or (Log::TYPE_GENERAL | Log::TYPE_WARNING | Log::TYPE_ERROR);
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
_win32_console = f.optional_bool_child ("Win32Console").get_value_or (false);
-#endif
+#endif
list<cxml::NodePtr> his = f.node_children ("History");
for (list<cxml::NodePtr>::const_iterator i = his.begin(); i != his.end(); ++i) {
root->add_child("DefaultDirectory")->add_child_text (_default_directory.string ());
root->add_child("ServerPortBase")->add_child_text (raw_convert<string> (_server_port_base));
root->add_child("UseAnyServers")->add_child_text (_use_any_servers ? "1" : "0");
-
+
for (vector<string>::const_iterator i = _servers.begin(); i != _servers.end(); ++i) {
root->add_child("Server")->add_child_text (*i);
}
root->add_child("MaximumJ2KBandwidth")->add_child_text (raw_convert<string> (_maximum_j2k_bandwidth));
root->add_child("AllowAnyDCPFrameRate")->add_child_text (_allow_any_dcp_frame_rate ? "1" : "0");
root->add_child("LogTypes")->add_child_text (raw_convert<string> (_log_types));
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
root->add_child("Win32Console")->add_child_text (_win32_console ? "1" : "0");
-#endif
+#endif
xmlpp::Element* signer = root->add_child ("Signer");
dcp::CertificateChain::List certs = _signer->certificates().root_to_leaf ();
{
/* Remove existing instances of this path in the history */
_history.erase (remove (_history.begin(), _history.end(), p), _history.end ());
-
+
_history.insert (_history.begin (), p);
if (_history.size() > HISTORY_SIZE) {
_history.pop_back ();
std::string tms_ip () const {
return _tms_ip;
}
-
+
/** @return The path on a TMS that we should changed DCPs to */
std::string tms_path () const {
return _tms_path;
std::list<boost::shared_ptr<Cinema> > cinemas () const {
return _cinemas;
}
-
+
std::list<int> allowed_dcp_frame_rates () const {
return _allowed_dcp_frame_rates;
}
bool allow_any_dcp_frame_rate () const {
return _allow_any_dcp_frame_rate;
}
-
+
ISDCFMetadata default_isdcf_metadata () const {
return _default_isdcf_metadata;
}
std::string kdm_bcc () const {
return _kdm_bcc;
}
-
+
std::string kdm_email () const {
return _kdm_email;
}
return _log_types;
}
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
bool win32_console () const {
return _win32_console;
}
-#endif
+#endif
std::vector<boost::filesystem::path> history () const {
return _history;
if (!_language) {
return;
}
-
+
_language = boost::none;
changed ();
}
void set_kdm_bcc (std::string f) {
maybe_set (_kdm_bcc, f);
}
-
+
void set_kdm_email (std::string e) {
maybe_set (_kdm_email, e);
}
maybe_set (_log_types, t);
}
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
void set_win32_console (bool c) {
maybe_set (_win32_console, c);
}
-#endif
+#endif
void clear_history () {
_history.clear ();
}
void add_to_history (boost::filesystem::path p);
-
+
void changed ();
boost::signals2::signal<void ()> Changed;
void write () const;
-
+
static Config* instance ();
static void drop ();
static void restore_defaults ();
/** maximum allowed J2K bandwidth in bits per second */
int _maximum_j2k_bandwidth;
int _log_types;
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
bool _win32_console;
-#endif
+#endif
std::vector<boost::filesystem::path> _history;
-
+
/** Singleton instance, or 0 */
static Config* _instance;
};
if (job) {
job->sub (_("Computing digest"));
}
-
+
boost::mutex::scoped_lock lm (_mutex);
vector<boost::filesystem::path> p = _paths;
lm.unlock ();
if (p == _position) {
return;
}
-
+
_position = p;
}
if (!film) {
return shared_ptr<Content> ();
}
-
+
/* This is a bit naughty, but I can't think of a compelling reason not to do it ... */
xmlpp::Document doc;
xmlpp::Node* node = doc.create_root_node ("Content");
Content::identifier () const
{
SafeStringStream s;
-
+
s << Content::digest()
<< "_" << position().get()
<< "_" << trim_start().get()
* @param job Job to use to report progress, or 0.
*/
virtual void examine (boost::shared_ptr<Job> job);
-
+
/** @return Quick one-line summary of the content, as will be presented in the
* film editor.
*/
virtual std::string summary () const = 0;
-
+
/** @return Technical details of this content; these are written to logs to
* help with debugging.
*/
virtual std::string technical_summary () const;
-
+
virtual void as_xml (xmlpp::Node *) const;
virtual DCPTime full_length () const = 0;
virtual std::string identifier () const;
boost::mutex::scoped_lock lm (_mutex);
return _paths[i];
}
-
+
bool paths_valid () const;
/** @return Digest of the content's file(s). Note: this is
}
void set_trim_end (DCPTime);
-
+
DCPTime trim_end () const {
boost::mutex::scoped_lock lm (_mutex);
return _trim_end;
}
DCPTime length_after_trim () const;
-
+
void set_change_signals_frequent (bool f) {
_change_signals_frequent = f;
}
/** Paths of our data files */
std::vector<boost::filesystem::path> _paths;
-
+
private:
std::string _digest;
DCPTime _position;
: audio (new AudioBuffers (0, 0))
, frame (0)
{}
-
+
ContentAudio (boost::shared_ptr<AudioBuffers> a, Frame f)
: audio (a)
, frame (f)
string const type = node->string_child ("Type");
boost::shared_ptr<Content> content;
-
+
if (type == "FFmpeg") {
content.reset (new FFmpegContent (film, node, version, notes));
} else if (type == "Image") {
{}
ContentTimePeriod period () const;
-
+
std::list<dcp::SubtitleString> subs;
};
, part (p)
, frame (f)
{}
-
+
boost::shared_ptr<const ImageProxy> image;
Eyes eyes;
Part part;
cpu_info ()
{
string info;
-
+
#ifdef DCPOMATIC_LINUX
/* This use of ifstream is ok; the filename can never
be non-Latin
if (sysctlbyname ("machdep.cpu.brand_string", buffer, &N, 0, 0) == 0) {
info = buffer;
}
-#endif
+#endif
#ifdef DCPOMATIC_WINDOWS
HKEY key;
}
info = string (value.begin(), value.end());
-
+
RegCloseKey (key);
-#endif
-
+#endif
+
return info;
}
if (_NSGetExecutablePath (buffer, &size)) {
throw StringError ("_NSGetExecutablePath failed");
}
-
+
boost::filesystem::path path (buffer);
path = boost::filesystem::canonical (path);
path = path.parent_path ();
#endif
#ifdef DCPOMATIC_OSX
return app_contents() / "Resources";
-#endif
+#endif
}
void
CloseHandle (child_stderr_read);
#endif
-#ifdef DCPOMATIC_LINUX
+#ifdef DCPOMATIC_LINUX
string ffprobe = "ffprobe \"" + content.string() + "\" 2> \"" + out.string() + "\"";
LOG_GENERAL (N_("Probing with %1"), ffprobe);
system (ffprobe.c_str ());
boost::filesystem::path path = app_contents();
path /= "MacOS";
path /= "ffprobe";
-
+
string ffprobe = path.string() + " \"" + content.string() + "\" 2> \"" + out.string() + "\"";
LOG_GENERAL (N_("Probing with %1"), ffprobe);
system (ffprobe.c_str ());
mount_info ()
{
list<pair<string, string> > m;
-
+
#ifdef DCPOMATIC_LINUX
FILE* f = setmntent ("/etc/mtab", "r");
if (!f) {
return m;
}
-
+
while (true) {
struct mntent* mnt = getmntent (f);
if (!mnt) {
wchar_t dir[512];
GetModuleFileName (GetModuleHandle (0), dir, sizeof (dir));
PathRemoveFileSpec (dir);
-
+
boost::filesystem::path path = dir;
path /= "openssl.exe";
return path;
-#else
+#else
/* We assume that it's on the path for Linux and OS X */
return "openssl";
#endif
{
#ifdef DCPOMATIC_WINDOWS
return _fseeki64 (stream, offset, whence);
-#else
+#else
return fseek (stream, offset, whence);
-#endif
+#endif
}
void
{
#ifdef DCPOMATIC_WINDOWS
SetThreadExecutionState (ES_SYSTEM_REQUIRED);
-#endif
+#endif
}
Waker::Waker ()
// IOPMAssertionCreateWithName (kIOPMAssertionTypeNoIdleSleep, kIOPMAssertionLevelOn, CFSTR ("Encoding DCP"), &_assertion_id);
/* but it's not available on 10.5, so we use this */
IOPMAssertionCreate (kIOPMAssertionTypeNoIdleSleep, kIOPMAssertionLevelOn, &_assertion_id);
-#endif
+#endif
}
Waker::~Waker ()
{
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
IOPMAssertionRelease (_assertion_id);
-#endif
+#endif
}
if (!f) {
throw FileError (_("could not open file for reading"), file);
}
-
+
size_t const r = fread (_data.get(), 1, _size, f);
if (r != size_t (_size)) {
fclose (f);
throw FileError (_("could not read from file"), file);
}
-
+
fclose (f);
}
DCPContent::examine (shared_ptr<Job> job)
{
bool const could_be_played = can_be_played ();
-
+
job->set_progress_unknown ();
Content::examine (job);
-
+
shared_ptr<DCPExaminer> examiner (new DCPExaminer (shared_from_this ()));
take_from_video_examiner (examiner);
take_from_audio_examiner (examiner);
}
DCPTime full_length () const;
-
+
void examine (boost::shared_ptr<Job>);
std::string summary () const;
std::string technical_summary () const;
boost::mutex::scoped_lock lm (_mutex);
return _has_subtitles;
}
-
+
boost::filesystem::path directory () const;
bool encrypted () const {
}
bool can_be_played () const;
-
+
private:
void read_directory (boost::filesystem::path);
-
+
std::string _name;
bool _has_subtitles;
/** true if our DCP is encrypted */
/** All available DCP content types */
static std::vector<DCPContentType const *> _dcp_content_types;
};
-
+
#endif
float const vfr = _dcp_content->video_frame_rate ();
int64_t const frame = _next.frames (vfr);
-
+
if ((*_reel)->main_picture ()) {
shared_ptr<dcp::PictureAsset> asset = (*_reel)->main_picture()->asset ();
shared_ptr<dcp::MonoPictureAsset> mono = dynamic_pointer_cast<dcp::MonoPictureAsset> (asset);
shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), asset->size(), dcp::EYE_LEFT)),
frame
);
-
+
video (
shared_ptr<ImageProxy> (new J2KImageProxy (stereo->get_frame (entry_point + frame), asset->size(), dcp::EYE_RIGHT)),
frame
++_reel;
}
}
-
+
return false;
}
private:
bool pass ();
void seek (ContentTime t, bool accurate);
-
+
std::list<ContentTimePeriod> image_subtitles_during (ContentTimePeriod, bool starting) const;
std::list<ContentTimePeriod> text_subtitles_during (ContentTimePeriod, bool starting) const;
_video_length += (*i)->main_picture()->duration();
}
-
+
if ((*i)->main_sound ()) {
shared_ptr<dcp::SoundAsset> asset = (*i)->main_sound()->asset ();
_encrypted = dcp.encrypted ();
_kdm_valid = true;
-
+
/* Check that we can read the first picture frame */
try {
if (!dcp.cpls().empty () && !dcp.cpls().front()->reels().empty ()) {
shared_ptr<dcp::PictureAsset> asset = dcp.cpls().front()->reels().front()->main_picture()->asset ();
shared_ptr<dcp::MonoPictureAsset> mono = dynamic_pointer_cast<dcp::MonoPictureAsset> (asset);
shared_ptr<dcp::StereoPictureAsset> stereo = dynamic_pointer_cast<dcp::StereoPictureAsset> (asset);
-
+
if (mono) {
mono->get_frame(0)->xyz_image ();
} else {
stereo->get_frame(0)->xyz_image (dcp::EYE_LEFT);
}
-
+
}
} catch (dcp::DCPReadError& e) {
_kdm_valid = false;
{
public:
DCPExaminer (boost::shared_ptr<const DCPContent>);
-
+
boost::optional<float> video_frame_rate () const {
return _video_frame_rate;
}
-
+
dcp::Size video_size () const {
return _video_size.get_value_or (dcp::Size (1998, 1080));
}
-
+
Frame video_length () const {
return _video_length;
}
int audio_channels () const {
return _audio_channels.get_value_or (0);
}
-
+
Frame audio_length () const {
return _audio_length;
}
-
+
int audio_frame_rate () const {
return _audio_frame_rate.get_value_or (48000);
}
DCPSubtitle::load (boost::filesystem::path file) const
{
shared_ptr<dcp::SubtitleAsset> sc;
-
+
try {
sc.reset (new dcp::InteropSubtitleAsset (file));
} catch (...) {
-
+
}
if (!sc) {
: Content (film, path)
, SubtitleContent (film, path)
{
-
+
}
DCPSubtitleContent::DCPSubtitleContent (shared_ptr<const Film> film, cxml::ConstNodePtr node, int version)
/* Default to turning these subtitles on */
set_use_subtitles (true);
-
+
boost::mutex::scoped_lock lm (_mutex);
shared_ptr<dcp::InteropSubtitleAsset> iop = dynamic_pointer_cast<dcp::InteropSubtitleAsset> (sc);
{
return Content::technical_summary() + " - " + _("DCP XML subtitles");
}
-
+
void
DCPSubtitleContent::as_xml (xmlpp::Node* node) const
{
{
return list<ContentTimePeriod> ();
}
-
+
list<ContentTimePeriod>
DCPSubtitleDecoder::text_subtitles_during (ContentTimePeriod p, bool starting) const
{
ContentTime::from_seconds (i->in().as_seconds ()),
ContentTime::from_seconds (i->out().as_seconds ())
);
-
+
if ((starting && p.contains (period.from)) || (!starting && p.overlaps (period))) {
d.push_back (period);
}
, _burn_subtitles (b)
, _log (l)
{
-
+
}
DCPVideo::DCPVideo (shared_ptr<const PlayerVideo> frame, shared_ptr<const cxml::Node> node, shared_ptr<Log> log)
parameters.tile_size_on = false;
parameters.cp_tdx = 1;
parameters.cp_tdy = 1;
-
+
/* Tile part */
parameters.tp_flag = 'C';
parameters.tp_on = 1;
-
+
/* Tile and Image shall be at (0,0) */
parameters.cp_tx0 = 0;
parameters.cp_ty0 = 0;
parameters.cblockw_init = 32;
parameters.cblockh_init = 32;
parameters.csty |= 0x01;
-
+
/* The progression order shall be CPRL */
parameters.prog_order = CPRL;
-
+
/* No ROI */
parameters.roi_compno = -1;
-
+
parameters.subsampling_dx = 1;
parameters.subsampling_dy = 1;
-
+
/* 9-7 transform */
parameters.irreversible = 1;
-
+
parameters.tcp_rates[0] = 0;
parameters.tcp_numlayers++;
parameters.cp_disto_alloc = 1;
if (_resolution == RESOLUTION_4K) {
parameters.numpocs = 2;
parameters.POC[0].tile = 1;
- parameters.POC[0].resno0 = 0;
+ parameters.POC[0].resno0 = 0;
parameters.POC[0].compno0 = 0;
parameters.POC[0].layno1 = 1;
parameters.POC[0].resno1 = parameters.numresolution - 1;
parameters.POC[0].compno1 = 3;
parameters.POC[0].prg1 = CPRL;
parameters.POC[1].tile = 1;
- parameters.POC[1].resno0 = parameters.numresolution - 1;
+ parameters.POC[1].resno0 = parameters.numresolution - 1;
parameters.POC[1].compno0 = 0;
parameters.POC[1].layno1 = 1;
parameters.POC[1].resno1 = parameters.numresolution;
parameters.POC[1].compno1 = 3;
parameters.POC[1].prg1 = CPRL;
}
-
+
parameters.cp_comment = strdup (N_("DCP-o-matic"));
parameters.cp_cinema = _resolution == RESOLUTION_2K ? CINEMA2K_24 : CINEMA4K_24;
/* 3 components, so use MCT */
parameters.tcp_mct = 1;
-
+
/* set max image */
parameters.max_comp_size = max_comp_size;
parameters.tcp_rates[0] = ((float) (3 * xyz->size().width * xyz->size().height * 12)) / (max_cs_len * 8);
add_metadata (root);
LOG_GENERAL (N_("Sending frame %1 to remote"), _index);
-
+
/* Send XML metadata */
string xml = doc.write_to_string ("UTF-8");
socket->write (xml.length() + 1);
socket->read (e.data().get(), e.size());
LOG_GENERAL (N_("Finished remotely-encoded frame %1"), _index);
-
+
return e;
}
Eyes eyes () const;
bool same (boost::shared_ptr<const DCPVideo> other) const;
-
+
private:
void add_metadata (xmlpp::Element *) const;
-
+
boost::shared_ptr<const PlayerVideo> _frame;
int _index; ///< frame index within the DCP's intrinsic duration
int _frames_per_second; ///< Frames per second that we will use for the DCP
boost::system::error_code ec = boost::asio::error::would_block;
boost::asio::async_write (_socket, boost::asio::buffer (data, size), boost::lambda::var(ec) = boost::lambda::_1);
-
+
do {
_io_service.run_one ();
} while (ec == boost::asio::error::would_block);
do {
_io_service.run_one ();
} while (ec == boost::asio::error::would_block);
-
+
if (ec) {
throw NetworkError (String::compose (_("error during async_read (%1)"), ec.value ()));
}
void write (uint32_t n);
void write (uint8_t const * data, int size);
-
+
void read (uint8_t* data, int size);
uint32_t read_uint32 ();
-
+
private:
void check ();
/* Explicit conversion from type O */
Time (Time<O, S> d, FrameRateChange f);
-
+
Type get () const {
return _t;
}
to a frame boundary at the start rather than the end.
*/
int64_t ff = frames (r);
-
+
h = ff / (3600 * r);
ff -= h * 3600 * r;
m = ff / (60 * r);
return o.str ();
}
-
+
static Time<S, O> from_seconds (double s) {
return Time<S, O> (s * HZ);
}
static Time<S, O> min () {
return Time<S, O> (-INT64_MAX);
}
-
+
static Time<S, O> max () {
return Time<S, O> (INT64_MAX);
}
-
+
private:
friend struct dcptime_round_up_test;
-
+
Type _t;
static const int HZ = 96000;
};
{
public:
ContentTimePeriod () {}
-
+
ContentTimePeriod (ContentTime f, ContentTime t)
: from (f)
, to (t)
public:
virtual ~Decoder () {}
-protected:
+protected:
friend class AudioDecoderStream;
-
+
/** Seek so that the next pass() will yield the next thing
* (video/sound frame, subtitle etc.) at or after the requested
* time. Pass accurate = true to try harder to ensure that, at worst,
float const t = min (to, 4.0f);
db += (t - from) * 20;
}
-
+
if (to > 4) {
float const t = max (from, 4.0f);
db += (to - t) * 3.33333333333333333;
}
lock.unlock ();
-
+
terminate_threads ();
LOG_GENERAL (N_("Mopping up %1"), _queue.size());
LOG_ERROR (N_("Local encode failed (%1)"), e.what ());
}
}
-}
+}
/** @return an estimate of the current number of frames we are encoding per second,
* or 0 if not known.
Encoder::frame_done ()
{
boost::mutex::scoped_lock lock (_state_mutex);
-
+
struct timeval tv;
gettimeofday (&tv, 0);
_time_history.push_front (tv);
Encoder::enqueue (shared_ptr<PlayerVideo> pv)
{
_waker.nudge ();
-
+
boost::mutex::scoped_lock lock (_mutex);
/* XXX: discard 3D here if required */
encodings.
*/
int remote_backoff = 0;
-
+
while (true) {
LOG_TIMING ("[%1] encoder thread sleeps", boost::this_thread::get_id());
shared_ptr<DCPVideo> vf = _queue.front ();
LOG_TIMING ("[%1] encoder thread pops frame %2 (%3) from queue", boost::this_thread::get_id(), vf->index(), vf->eyes ());
_queue.pop_front ();
-
+
lock.unlock ();
optional<Data> encoded;
if (server) {
try {
encoded = vf->encode_remotely (server.get ());
-
+
if (remote_backoff > 0) {
LOG_GENERAL ("%1 was lost, but now she is found; removing backoff", server->host_name ());
}
-
+
/* This job succeeded, so remove any backoff */
remote_backoff = 0;
-
+
} catch (std::exception& e) {
if (remote_backoff < 60) {
/* back off more */
vf->index(), server->host_name(), e.what(), remote_backoff
);
}
-
+
} else {
try {
LOG_TIMING ("[%1] encoder thread begins local encode of %2", boost::this_thread::get_id(), vf->index());
int video_frames_out () const;
private:
-
+
void frame_done ();
-
+
void encoder_thread (boost::optional<ServerDescription>);
void terminate_threads ();
void add_worker_threads (ServerDescription);
info.dwOSVersionInfoSize = sizeof (info);
GetVersionEx (&info);
LOG_GENERAL ("Windows version %1.%2.%3 SP %4", info.dwMajorVersion, info.dwMinorVersion, info.dwBuildNumber, info.szCSDVersion);
-#endif
+#endif
#if __GNUC__
#if __x86_64__
LOG_GENERAL_NC ("Built for 32-bit");
#endif
#endif
-
+
LOG_GENERAL ("CPU: %1, %2 processors", cpu_info(), boost::thread::hardware_concurrency ());
list<pair<string, string> > const m = mount_info ();
for (list<pair<string, string> >::const_iterator i = m.begin(); i != m.end(); ++i) {
WriteFileError::WriteFileError (boost::filesystem::path f, int e)
: FileError (String::compose (_("could not write to file %1 (%2)"), f.string(), strerror (e)), f)
{
-
+
}
MissingSettingError::MissingSettingError (string s)
SubRipError::SubRipError (string saw, string expecting, boost::filesystem::path f)
: FileError (String::compose (_("Error in SubRip file: saw %1 while expecting %2"), saw.empty() ? "[nothing]" : saw, expecting), f)
{
-
+
}
InvalidSignerError::InvalidSignerError ()
}
}
-protected:
-
+protected:
+
void store_current () {
boost::mutex::scoped_lock lm (_mutex);
_exception = boost::current_exception ();
_avio_context = avio_alloc_context (_avio_buffer, _avio_buffer_size, 0, this, avio_read_wrapper, 0, avio_seek_wrapper);
_format_context = avformat_alloc_context ();
_format_context->pb = _avio_context;
-
+
AVDictionary* options = 0;
/* These durations are in microseconds, and represent how far into the content file
we will look for streams.
*/
av_dict_set (&options, "analyzeduration", raw_convert<string> (5 * 60 * 1000000).c_str(), 0);
av_dict_set (&options, "probesize", raw_convert<string> (5 * 60 * 1000000).c_str(), 0);
-
+
if (avformat_open_input (&_format_context, 0, 0, &options) < 0) {
throw OpenFileError (_ffmpeg_content->path(0).string ());
}
*/
if (_video_stream == -1 && video_stream_undefined_frame_rate != -1) {
_video_stream = video_stream_undefined_frame_rate;
- }
-
+ }
+
if (_video_stream < 0) {
throw DecodeError (N_("could not find video stream"));
}
}
}
}
-
+
if (duplicates) {
/* Put in our own IDs */
for (uint32_t i = 0; i < _format_context->nb_streams; ++i) {
for (uint32_t i = 0; i < _format_context->nb_streams; ++i) {
AVCodecContext* context = _format_context->streams[i]->codec;
-
+
AVCodec* codec = avcodec_find_decoder (context->codec_id);
if (codec) {
*/
AVDictionary* options = 0;
av_dict_set (&options, "disable_footer", "1", 0);
-
+
if (avcodec_open2 (context, codec, &options) < 0) {
throw DecodeError (N_("could not open decoder"));
}
if (!_ffmpeg_content->subtitle_stream ()) {
return 0;
}
-
+
return _ffmpeg_content->subtitle_stream()->stream(_format_context)->codec;
}
if (whence == AVSEEK_SIZE) {
return _file_group.length ();
}
-
+
return _file_group.seek (pos, whence);
}
protected:
AVCodecContext* video_codec_context () const;
AVCodecContext* subtitle_codec_context () const;
-
+
boost::shared_ptr<const FFmpegContent> _ffmpeg_content;
uint8_t* _avio_buffer;
int _avio_buffer_size;
AVIOContext* _avio_context;
FileGroup _file_group;
-
+
AVFormatContext* _format_context;
AVPacket _packet;
AVFrame* _frame;
void as_xml (xmlpp::Node *) const;
/* XXX: should probably be locked */
-
+
boost::optional<ContentTime> first_audio;
private:
if (!_subtitle_streams.empty ()) {
_subtitle_stream = _subtitle_streams.front ();
}
-
+
_audio_streams = examiner->audio_streams ();
if (!_audio_streams.empty ()) {
}
string filt = Filter::ffmpeg_string (_filters);
-
+
return Content::technical_summary() + " - "
+ VideoContent::technical_summary() + " - "
+ AudioContent::technical_summary() + " - "
FFmpegContent::audio_streams () const
{
boost::mutex::scoped_lock lm (_mutex);
-
+
vector<AudioStreamPtr> s;
copy (_audio_streams.begin(), _audio_streams.end(), back_inserter (s));
return s;
boost::shared_ptr<FFmpegContent> shared_from_this () {
return boost::dynamic_pointer_cast<FFmpegContent> (Content::shared_from_this ());
}
-
+
void examine (boost::shared_ptr<Job>);
std::string summary () const;
std::string technical_summary () const;
/* VideoContent */
void set_default_colour_conversion ();
-
+
/* AudioContent */
std::vector<AudioStreamPtr> audio_streams () const;
bool has_subtitles () const;
void set_filters (std::vector<Filter const *> const &);
-
+
std::vector<boost::shared_ptr<FFmpegSubtitleStream> > subtitle_streams () const {
boost::mutex::scoped_lock lm (_mutex);
return _subtitle_streams;
private:
friend struct ffmpeg_pts_offset_test;
friend struct audio_sampling_rate_test;
-
+
std::vector<boost::shared_ptr<FFmpegSubtitleStream> > _subtitle_streams;
boost::shared_ptr<FFmpegSubtitleStream> _subtitle_stream;
std::vector<boost::shared_ptr<FFmpegAudioStream> > _audio_streams;
FFmpegDecoder::flush ()
{
/* Get any remaining frames */
-
+
_packet.data = 0;
_packet.size = 0;
-
+
/* XXX: should we reset _packet.data and size after each *_decode_* call? */
-
+
while (decode_video_packet ()) {}
-
+
decode_audio_packet ();
AudioDecoder::flush ();
}
av_strerror (r, buf, sizeof(buf));
LOG_ERROR (N_("error on av_read_frame (%1) (%2)"), buf, r);
}
-
+
flush ();
return true;
}
}
}
break;
-
+
case AV_SAMPLE_FMT_S16:
{
int16_t* p = reinterpret_cast<int16_t *> (data[0]);
}
}
break;
-
+
case AV_SAMPLE_FMT_S32:
{
int32_t* p = reinterpret_cast<int32_t *> (data[0]);
}
}
break;
-
+
case AV_SAMPLE_FMT_FLTP:
{
float** p = reinterpret_cast<float**> (data);
/* XXX: it seems debatable whether PTS should be used here...
http://www.mjbshaw.com/2012/04/seeking-in-ffmpeg-know-your-timestamp.html
*/
-
+
ContentTime u = time - _pts_offset;
if (u < ContentTime ()) {
u = ContentTime ();
avcodec_flush_buffers (video_codec_context());
/* XXX: should be flushing audio buffers? */
-
+
if (subtitle_codec_context ()) {
avcodec_flush_buffers (subtitle_codec_context ());
}
/* Audio packets can contain multiple frames, so we may have to call avcodec_decode_audio4
several times.
*/
-
+
AVPacket copy_packet = _packet;
/* XXX: inefficient */
/* The packet's stream may not be an audio one; just ignore it in this method if so */
return;
}
-
+
while (copy_packet.size > 0) {
int frame_finished;
av_frame_get_best_effort_timestamp (_frame) *
av_q2d ((*stream)->stream (_format_context)->time_base))
+ _pts_offset;
-
+
int const data_size = av_samples_get_buffer_size (
0, (*stream)->stream(_format_context)->codec->channels, _frame->nb_samples, audio_sample_format (*stream), 1
);
audio (*stream, deinterleave_audio (*stream, _frame->data, data_size), ct);
}
-
+
copy_packet.data += decode_result;
copy_packet.size -= decode_result;
}
boost::mutex::scoped_lock lm (_filter_graphs_mutex);
shared_ptr<FilterGraph> graph;
-
+
list<shared_ptr<FilterGraph> >::iterator i = _filter_graphs.begin();
while (i != _filter_graphs.end() && !(*i)->can_process (dcp::Size (_frame->width, _frame->height), (AVPixelFormat) _frame->format)) {
++i;
for (list<pair<shared_ptr<Image>, int64_t> >::iterator i = images.begin(); i != images.end(); ++i) {
shared_ptr<Image> image = i->first;
-
+
if (i->second != AV_NOPTS_VALUE) {
double const pts = i->second * av_q2d (_format_context->streams[_video_stream]->time_base) + _pts_offset.seconds ();
video (
return true;
}
-
+
void
FFmpegDecoder::decode_subtitle_packet ()
{
if (avcodec_decode_subtitle2 (subtitle_codec_context(), &sub, &got_subtitle, &_packet) < 0 || !got_subtitle) {
return;
}
-
+
if (sub.num_rects <= 0) {
/* Sometimes we get an empty AVSubtitle, which is used by some codecs to
indicate that the previous subtitle should stop. We can ignore it here.
/* We have to look up the `to' time in the stream's records */
period.to = ffmpeg_content()->subtitle_stream()->find_subtitle_to (sub_period.from);
}
-
+
AVSubtitleRect const * rect = sub.rects[0];
switch (rect->type) {
cout << "XXX: SUBTITLE_ASS " << rect->ass << "\n";
break;
}
-
+
avsubtitle_free (&sub);
}
G, third B, fourth A.
*/
shared_ptr<Image> image (new Image (PIX_FMT_RGBA, dcp::Size (rect->w, rect->h), true));
-
+
/* Start of the first line in the subtitle */
uint8_t* sub_p = rect->pict.data[0];
/* sub_p looks up into a BGRA palette which is here
uint32_t const * palette = (uint32_t *) rect->pict.data[1];
/* Start of the output data */
uint32_t* out_p = (uint32_t *) image->data()[0];
-
+
for (int y = 0; y < rect->h; ++y) {
uint8_t* sub_line_p = sub_p;
uint32_t* out_line_p = out_p;
sub_p += rect->pict.linesize[0];
out_p += image->stride()[0] / sizeof (uint32_t);
}
-
+
dcp::Size const vs = _ffmpeg_content->video_size ();
dcpomatic::Rect<double> const scaled_rect (
static_cast<double> (rect->x) / vs.width,
static_cast<double> (rect->w) / vs.width,
static_cast<double> (rect->h) / vs.height
);
-
+
image_subtitle (period, image, scaled_rect);
}
std::list<ContentTimePeriod> image_subtitles_during (ContentTimePeriod, bool starting) const;
std::list<ContentTimePeriod> text_subtitles_during (ContentTimePeriod, bool starting) const;
-
+
boost::shared_ptr<Log> _log;
-
+
std::list<boost::shared_ptr<FilterGraph> > _filter_graphs;
boost::mutex _filter_graphs_mutex;
if (s->codec->channel_layout == 0) {
s->codec->channel_layout = av_get_default_channel_layout (s->codec->channels);
}
-
+
_audio_streams.push_back (
shared_ptr<FFmpegAudioStream> (
new FFmpegAudioStream (audio_stream_name (s), s->id, s->codec->sample_rate, s->codec->channels)
if (_packet.stream_index == _video_stream) {
video_packet (context);
}
-
+
for (size_t i = 0; i < _audio_streams.size(); ++i) {
if (_audio_streams[i]->uses_index (_format_context, _packet.stream_index)) {
audio_packet (context, _audio_streams[i]);
FFmpegExaminer::frame_time (AVStream* s) const
{
optional<ContentTime> t;
-
+
int64_t const bet = av_frame_get_best_effort_timestamp (_frame);
if (bet != AV_NOPTS_VALUE) {
t = ContentTime::from_seconds (bet * av_q2d (s->time_base));
if (lang) {
n << lang->value;
}
-
+
AVDictionaryEntry const * title = av_dict_get (s->metadata, "title", 0, 0);
if (title) {
if (!n.str().empty()) {
{
public:
FFmpegExaminer (boost::shared_ptr<const FFmpegContent>, boost::shared_ptr<Job> job = boost::shared_ptr<Job> ());
-
+
boost::optional<float> video_frame_rate () const;
dcp::Size video_size () const;
Frame video_length () const;
std::vector<boost::shared_ptr<FFmpegSubtitleStream> > subtitle_streams () const {
return _subtitle_streams;
}
-
+
std::vector<boost::shared_ptr<FFmpegAudioStream> > audio_streams () const {
return _audio_streams;
}
boost::optional<ContentTime> first_video () const {
return _first_video;
}
-
+
private:
void video_packet (AVCodecContext *);
void audio_packet (AVCodecContext *, boost::shared_ptr<FFmpegAudioStream>);
void subtitle_packet (AVCodecContext *, boost::shared_ptr<FFmpegSubtitleStream>);
-
+
std::string stream_name (AVStream* s) const;
std::string audio_stream_name (AVStream* s) const;
std::string subtitle_stream_name (AVStream* s) const;
: name (n)
, _id (i)
{}
-
+
FFmpegStream (cxml::ConstNodePtr);
void as_xml (xmlpp::Node *) const;
friend bool operator== (FFmpegStream const & a, FFmpegStream const & b);
friend bool operator!= (FFmpegStream const & a, FFmpegStream const & b);
-
+
private:
int _id;
};
_subtitles[period.from] = period.to;
}
-list<ContentTimePeriod>
+list<ContentTimePeriod>
FFmpegSubtitleStream::subtitles_during (ContentTimePeriod period, bool starting) const
{
list<ContentTimePeriod> d;
-
+
/* XXX: inefficient */
for (map<ContentTime, ContentTime>::const_iterator i = _subtitles.begin(); i != _subtitles.end(); ++i) {
if ((starting && period.contains (i->first)) || (!starting && period.overlaps (ContentTimePeriod (i->first, i->second)))) {
FFmpegSubtitleStream (std::string n, int i)
: FFmpegStream (n, i)
{}
-
+
FFmpegSubtitleStream (cxml::ConstNodePtr);
void as_xml (xmlpp::Node *) const;
/* Already open */
return;
}
-
+
if (_current_file) {
fclose (_current_file);
}
}
#ifdef DCPOMATIC_WINDOWS
full_pos += _ftelli64 (_current_file);
-#else
+#else
full_pos += ftell (_current_file);
-#endif
+#endif
full_pos += pos;
break;
case SEEK_END:
_playlist_changed_connection = _playlist->Changed.connect (bind (&Film::playlist_changed, this));
_playlist_content_changed_connection = _playlist->ContentChanged.connect (bind (&Film::playlist_content_changed, this, _1, _2, _3));
-
+
/* Make state.directory a complete path without ..s (where possible)
(Code swiped from Adam Bowen on stackoverflow)
*/
-
+
boost::filesystem::path p (boost::filesystem::system_complete (dir));
boost::filesystem::path result;
for (boost::filesystem::path::iterator i = p.begin(); i != p.end(); ++i) {
for (list<boost::signals2::connection>::const_iterator i = _job_connections.begin(); i != _job_connections.end(); ++i) {
i->disconnect ();
}
-}
+}
string
Film::video_identifier () const
SafeStringStream s;
s.imbue (std::locale::classic ());
-
+
s << container()->id()
<< "_" << resolution_to_string (_resolution)
<< "_" << _playlist->video_identifier()
return s.str ();
}
-
+
/** @return The file to write video frame info to */
boost::filesystem::path
Film::info_file () const
if (!ac) {
continue;
}
-
+
digester.add (ac->digest ());
digester.add (ac->audio_mapping().digest ());
digester.add (ac->audio_gain ());
Film::make_dcp ()
{
set_isdcf_date_today ();
-
+
if (dcp_name().find ("/") != string::npos) {
throw BadSettingError (_("name"), _("cannot contain slashes"));
}
LOG_GENERAL ("DCP video rate %1 fps", video_frame_rate());
LOG_GENERAL ("%1 threads", Config::instance()->num_local_encoding_threads());
LOG_GENERAL ("J2K bandwidth %1", j2k_bandwidth());
-
+
if (container() == 0) {
throw MissingSettingError (_("container"));
}
if (_state_version > current_state_version) {
throw StringError (_("This film was created with a newer version of DCP-o-matic, and it cannot be loaded into this version. Sorry!"));
}
-
+
_name = f.string_child ("Name");
if (_state_version >= 9) {
_use_isdcf_name = f.bool_child ("UseISDCFName");
boost::filesystem::path p;
p /= _directory;
p /= d;
-
+
boost::filesystem::create_directories (p);
-
+
return p;
}
p /= f;
boost::filesystem::create_directories (p.parent_path ());
-
+
return p;
}
split (words, raw_name, is_any_of (" "));
string fixed_name;
-
+
/* Add each word to fixed_name */
for (vector<string>::const_iterator i = words.begin(); i != words.end(); ++i) {
string w = *i;
++caps;
}
}
-
+
/* If w is all caps make the rest of it lower case, otherwise
leave it alone.
*/
if (dm.temp_version) {
d << "-Temp";
}
-
+
if (dm.pre_release) {
d << "-Pre";
}
-
+
if (dm.red_band) {
d << "-RedBand";
}
-
+
if (!dm.chain.empty ()) {
d << "-" << dm.chain;
}
if (video_frame_rate() != 24) {
d << "-" << video_frame_rate();
}
-
+
if (container()) {
d << "_" << container()->isdcf_name();
}
ContentList cl = content ();
-
+
/* XXX: this uses the first bit of content only */
/* The standard says we don't do this for trailers, for some strange reason */
break;
}
}
-
+
if (content_ratio && content_ratio != container()) {
d << "-" << content_ratio->isdcf_name();
}
copy (c.begin(), c.end(), back_inserter (mapped));
}
}
-
+
mapped.sort ();
mapped.unique ();
-
+
/* Count them */
-
+
for (list<int>::const_iterator i = mapped.begin(); i != mapped.end(); ++i) {
if (*i >= audio_channels()) {
/* This channel is mapped but is not included in the DCP */
continue;
}
-
+
if (static_cast<dcp::Channel> (*i) == dcp::LFE) {
++lfe;
} else {
}
}
}
-
+
if (non_lfe) {
d << "_" << non_lfe << lfe;
}
/* XXX: HI/VI */
d << "_" << resolution_to_string (_resolution);
-
+
if (!dm.studio.empty ()) {
d << "_" << dm.studio;
}
} else {
d << "_SMPTE";
}
-
+
if (three_d ()) {
d << "-3D";
}
filtered += unfiltered[i];
}
}
-
+
return filtered;
}
} else if (e == EYES_RIGHT) {
s << ".R";
}
-
+
s << ".j2c";
if (t) {
Film::cpls () const
{
vector<CPLSummary> out;
-
+
boost::filesystem::path const dir = directory ();
for (boost::filesystem::directory_iterator i = boost::filesystem::directory_iterator(dir); i != boost::filesystem::directory_iterator(); ++i) {
if (
}
}
}
-
+
return out;
}
if (dynamic_pointer_cast<FFmpegContent> (c)) {
run_ffprobe (c->path(0), file ("ffprobe.log"), _log);
}
-
+
shared_ptr<Job> j (new ExamineContentJob (shared_from_this(), c));
_job_connections.push_back (
j->Finished.connect (bind (&Film::maybe_add_content, this, boost::weak_ptr<Job> (j), boost::weak_ptr<Content> (c)))
);
-
+
JobManager::instance()->add (j);
}
if (!job || !job->finished_ok ()) {
return;
}
-
+
shared_ptr<Content> content = c.lock ();
if (content) {
add_content (content);
{
signal_changed (CONTENT);
signal_changed (NAME);
-}
+}
int
Film::audio_frame_rate () const
if (!signer->valid ()) {
throw InvalidSignerError ();
}
-
+
return dcp::DecryptedKDM (
cpl, key(), from, until, "DCP-o-matic", cpl->content_title_text(), dcp::LocalTime().as_string()
).encrypt (signer, target, formulation);
Film::subtitle_language () const
{
set<string> languages;
-
+
ContentList cl = content ();
BOOST_FOREACH (shared_ptr<Content>& c, cl) {
shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (c);
if (audio_processor ()) {
return audio_processor()->input_names ();
}
-
+
vector<string> n;
n.push_back (_("L"));
n.push_back (_("R"));
uint64_t required_disk_space () const;
bool should_be_enough_disk_space (double& required, double& available, bool& can_hard_link) const;
-
+
/* Proxies for some Playlist methods */
ContentList content () const;
dcp::LocalTime until,
dcp::Formulation formulation
) const;
-
+
std::list<dcp::EncryptedKDM> make_kdms (
std::list<boost::shared_ptr<Screen> >,
boost::filesystem::path cpl_file,
std::vector<std::string> audio_output_names () const;
void repeat_content (ContentList, int);
-
+
/** Identifiers for the parts of our state;
used for signalling changes.
*/
bool is_signed () const {
return _signed;
}
-
+
bool encrypted () const {
return _encrypted;
}
AudioProcessor const * audio_processor () const {
return _audio_processor;
}
-
+
/* SET */
* must not be relative.
*/
boost::filesystem::path _directory;
-
+
/** Name for DCP-o-matic */
std::string _name;
/** True if a auto-generated ISDCF-compliant name should be used for our DCP */
Filter::setup_filters ()
{
/* Note: "none" is a magic id name, so don't use it here */
-
+
maybe_add (N_("mcdeint"), _("Motion compensating deinterlacer"), _("De-interlacing"), N_("mcdeint"));
maybe_add (N_("kerndeint"), _("Kernel deinterlacer"), _("De-interlacing"), N_("kerndeint"));
maybe_add (N_("yadif"), _("Yet Another Deinterlacing Filter"), _("De-interlacing"), N_("yadif"));
std::string vf () const {
return _vf;
}
-
+
std::string category () const {
return _category;
}
-
+
static std::vector<Filter const *> all ();
static Filter const * from_id (std::string);
static void setup_filters ();
}
_frame = av_frame_alloc ();
-
+
AVFilterGraph* graph = avfilter_graph_alloc();
if (graph == 0) {
throw DecodeError (N_("could not create filter graph."));
pixel_fmts[0] = _pixel_format;
pixel_fmts[1] = PIX_FMT_NONE;
sink_params->pixel_fmts = pixel_fmts;
-
+
if (avfilter_graph_create_filter (&_buffer_sink_context, buffer_sink, N_("out"), 0, sink_params, graph) < 0) {
throw DecodeError (N_("could not create buffer sink."));
}
if (avfilter_graph_parse (graph, filters.c_str(), inputs, outputs, 0) < 0) {
throw DecodeError (N_("could not set up filter graph."));
}
-
+
if (avfilter_graph_config (graph, 0) < 0) {
throw DecodeError (N_("could not configure filter graph."));
}
if (r < 0) {
throw DecodeError (String::compose (N_("could not push buffer into filter chain (%1)."), r));
}
-
+
while (true) {
if (av_buffersink_get_frame (_buffer_sink_context, _frame) < 0) {
break;
}
-
+
images.push_back (make_pair (shared_ptr<Image> (new Image (_frame)), av_frame_get_best_effort_timestamp (_frame)));
av_frame_unref (_frame);
}
}
-
+
return images;
}
: _id (node->string_child ("Id"))
, _file (node->optional_string_child ("File"))
{
-
+
}
void
boost::signals2::signal<void()> Changed;
-private:
+private:
/** Font ID, used to describe it in the subtitle content */
std::string _id;
boost::optional<boost::filesystem::path> _file;
= Ff + Fd - Ff frames
= Fd frames
= Fd/f seconds
-
+
So if we accept a difference of 1 frame, ie 1/f seconds, we can
say that
1/f = Fd/f
ie 1 = Fd
ie d = 1/F
-
+
So for a 3hr film, ie F = 3 * 60 * 60 = 10800, the acceptable
FPS error is 1/F ~= 0.0001 ~= 10-e4
*/
FrameRateChange::description () const
{
string description;
-
+
if (!skip && repeat == 1 && !change_speed) {
description = _("Content and DCP have the same rate.\n");
} else {
if (!d) {
throw PixelFormatError ("lines()", _pixel_format);
}
-
+
return pow (2.0f, d->log2_chroma_h);
}
if ((d->flags & PIX_FMT_PLANAR) == 0) {
return 1;
}
-
+
return d->nb_components;
}
sws_getCoefficients (lut[yuv_to_rgb]), 0,
0, 1 << 16, 1 << 16
);
-
+
/* Prepare input data pointers with crop */
uint8_t* scale_in_data[components()];
for (int c = 0; c < components(); ++c) {
sws_freeContext (scale_context);
- return out;
+ return out;
}
shared_ptr<Image>
sws_getCoefficients (lut[yuv_to_rgb]), 0,
0, 1 << 16, 1 << 16
);
-
+
sws_scale (
scale_context,
data(), stride(),
static uint16_t const ten_bit_uv = (1 << 9) - 1;
/* U/V black value for 16-bit colour */
static uint16_t const sixteen_bit_uv = (1 << 15) - 1;
-
+
switch (_pixel_format) {
case PIX_FMT_YUV420P:
case PIX_FMT_YUV422P:
case PIX_FMT_YUV444P9BE:
yuv_16_black (swap_16 (nine_bit_uv), false);
break;
-
+
case PIX_FMT_YUV422P10LE:
case PIX_FMT_YUV444P10LE:
yuv_16_black (ten_bit_uv, false);
case PIX_FMT_YUV444P16LE:
yuv_16_black (sixteen_bit_uv, false);
break;
-
+
case PIX_FMT_YUV444P10BE:
case PIX_FMT_YUV422P10BE:
yuv_16_black (swap_16 (ten_bit_uv), false);
case AV_PIX_FMT_YUVA444P9BE:
yuv_16_black (swap_16 (nine_bit_uv), true);
break;
-
+
case AV_PIX_FMT_YUVA420P9LE:
case AV_PIX_FMT_YUVA422P9LE:
case AV_PIX_FMT_YUVA444P9LE:
yuv_16_black (nine_bit_uv, true);
break;
-
+
case AV_PIX_FMT_YUVA420P10BE:
case AV_PIX_FMT_YUVA422P10BE:
case AV_PIX_FMT_YUVA444P10BE:
yuv_16_black (swap_16 (ten_bit_uv), true);
break;
-
+
case AV_PIX_FMT_YUVA420P10LE:
case AV_PIX_FMT_YUVA422P10LE:
case AV_PIX_FMT_YUVA444P10LE:
yuv_16_black (ten_bit_uv, true);
break;
-
+
case AV_PIX_FMT_YUVA420P16BE:
case AV_PIX_FMT_YUVA422P16BE:
case AV_PIX_FMT_YUVA444P16BE:
yuv_16_black (swap_16 (sixteen_bit_uv), true);
break;
-
+
case AV_PIX_FMT_YUVA420P16LE:
case AV_PIX_FMT_YUVA422P16LE:
case AV_PIX_FMT_YUVA444P16LE:
tp[0] = op[0] * alpha + tp[0] * (1 - alpha);
tp[1] = op[1] * alpha + tp[1] * (1 - alpha);
tp[2] = op[2] * alpha + tp[2] * (1 - alpha);
-
+
tp += this_bpp;
op += other_bpp;
}
tp[1] = op[1] * alpha + tp[1] * (1 - alpha);
tp[2] = op[2] * alpha + tp[2] * (1 - alpha);
tp[3] = op[3] * alpha + tp[3] * (1 - alpha);
-
+
tp += this_bpp;
op += other_bpp;
}
tp[1] = op[0] * alpha + tp[1] * (1 - alpha);
tp[3] = op[1] * alpha + tp[3] * (1 - alpha);
tp[5] = op[2] * alpha + tp[5] * (1 - alpha);
-
+
tp += this_bpp;
op += other_bpp;
}
DCPOMATIC_ASSERT (false);
}
}
-
+
void
Image::copy (shared_ptr<const Image> other, Position<int> position)
{
uint8_t * const op = other->data()[0] + oy * other->stride()[0];
memcpy (tp, op, N * 3);
}
-}
+}
void
Image::read_from_socket (shared_ptr<Socket> socket)
if (d->nb_components > 3) {
bpp[3] = floor ((d->comp[3].depth_minus1 + 1 + 7) / 8) / pow (2.0f, d->log2_chroma_w);
}
-
+
if ((d->flags & PIX_FMT_PLANAR) == 0) {
/* Not planar; sum them up */
return bpp[0] + bpp[1] + bpp[2] + bpp[3];
{
_data = (uint8_t **) wrapped_av_malloc (4 * sizeof (uint8_t *));
_data[0] = _data[1] = _data[2] = _data[3] = 0;
-
+
_line_size = (int *) wrapped_av_malloc (4 * sizeof (int));
_line_size[0] = _line_size[1] = _line_size[2] = _line_size[3] = 0;
-
+
_stride = (int *) wrapped_av_malloc (4 * sizeof (int));
_stride[0] = _stride[1] = _stride[2] = _stride[3] = 0;
Image (boost::shared_ptr<const Image>, bool);
Image& operator= (Image const &);
~Image ();
-
+
uint8_t * const * data () const;
int * line_size () const;
int const * stride () const;
boost::shared_ptr<Image> scale (dcp::Size, dcp::YUVToRGB yuv_to_rgb, AVPixelFormat, bool aligned) const;
boost::shared_ptr<Image> crop (Crop c, bool aligned) const;
boost::shared_ptr<Image> crop_scale_window (Crop c, dcp::Size, dcp::Size, dcp::YUVToRGB yuv_to_rgb, AVPixelFormat, bool aligned) const;
-
+
void make_black ();
void make_transparent ();
void alpha_blend (boost::shared_ptr<const Image> image, Position<int> pos);
void read_from_socket (boost::shared_ptr<Socket>);
void write_to_socket (boost::shared_ptr<Socket>) const;
-
+
AVPixelFormat pixel_format () const {
return _pixel_format;
}
private:
friend struct pixel_formats_test;
-
+
void allocate ();
void swap (Image &);
float bytes_per_pixel (int) const;
if (_paths.empty()) {
throw FileError (_("No valid image files were found in the folder."), p);
}
-
+
sort (_paths.begin(), _paths.end(), ImageFilenameSorter ());
}
: Content (film, node)
, VideoContent (film, node, version)
{
-
+
}
string
shared_ptr<const Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
-
+
shared_ptr<ImageExaminer> examiner (new ImageExaminer (film, shared_from_this(), job));
take_from_video_examiner (examiner);
}
if (_video_frame_rate == r) {
return;
}
-
+
_video_frame_rate = r;
}
-
+
signal_changed (VideoContentProperty::VIDEO_FRAME_RATE);
}
/* VideoContent */
void set_default_colour_conversion ();
-
+
void set_video_length (Frame);
bool still () const;
void set_video_frame_rate (float);
_image.reset (new MagickImageProxy (path));
}
}
-
+
video (_image, _video_position);
++_video_position;
return false;
private:
bool pass ();
void seek (ContentTime, bool);
-
+
boost::shared_ptr<const ImageContent> _image_content;
boost::shared_ptr<ImageProxy> _image;
Frame _video_position;
: _film (film)
, _image_content (content)
{
-#ifdef DCPOMATIC_IMAGE_MAGICK
+#ifdef DCPOMATIC_IMAGE_MAGICK
using namespace MagickCore;
#endif
boost::filesystem::path path = content->path(0).string ();
boost::optional<int> extract_number (boost::filesystem::path p)
{
p = p.leaf ();
-
+
std::string number;
for (size_t i = 0; i < p.string().size(); ++i) {
if (isdigit (p.string()[i])) {
: image (i)
, rectangle (r)
{}
-
+
boost::shared_ptr<Image> image;
/** Area that the subtitle covers on its corresponding video, expressed in
* proportions of the image size; e.g. rectangle.x = 0.5 would mean that
/* Download the ZIP file to temp_zip */
CURL* curl = curl_easy_init ();
curl_easy_setopt (curl, CURLOPT_URL, url.c_str ());
-
+
ScopedTemporary temp_zip;
FILE* f = temp_zip.open ("wb");
curl_easy_setopt (curl, CURLOPT_WRITEFUNCTION, get_from_zip_url_data);
}
/* Open the ZIP file and read `file' out of it */
-
+
struct zip* zip = zip_open (temp_zip.c_str(), 0, 0);
if (!zip) {
return optional<string> (_("Could not open downloaded ZIP file"));
}
-
+
struct zip_file* zip_file = zip_fopen (zip, file.c_str(), 0);
if (!zip_file) {
return optional<string> (_("Unexpected ZIP file contents"));
}
-
+
ScopedTemporary temp_cert;
f = temp_cert.open ("wb");
char buffer[4096];
}
}
temp_cert.close ();
-
+
load (temp_cert.file ());
return optional<string> ();
}
, two_d_version_of_three_d (node->optional_bool_child ("TwoDVersionOfThreeD").get_value_or (false))
, mastered_luminance (node->optional_string_child ("MasteredLuminance").get_value_or (""))
{
-
+
}
void
, red_band (false)
, two_d_version_of_three_d (false)
{}
-
+
ISDCFMetadata (cxml::ConstNodePtr);
void as_xml (xmlpp::Node *) const;
} else {
dcp::xyz_to_rgb (oj, dcp::ColourConversion::srgb_to_xyz(), image->data()[0], image->stride()[0], note);
}
-
+
return image;
}
Data j2k () const {
return _data;
}
-
+
dcp::Size size () const {
return _size;
}
-
+
private:
Data _data;
dcp::Size _size;
set_error (e.what(), m);
set_progress (1);
set_state (FINISHED_ERROR);
-
+
} catch (OpenFileError& e) {
set_error (
set_error (_("Out of memory"), _("There was not enough memory to do this."));
set_progress (1);
set_state (FINISHED_ERROR);
-
+
} catch (std::exception& e) {
set_error (
set_progress (1);
set_state (FINISHED_ERROR);
-
+
} catch (...) {
set_error (
boost::mutex::scoped_lock lm (_state_mutex);
return _state == PAUSED;
}
-
+
/** Set the state of this job.
* @param s New state.
*/
if (finished) {
emit (boost::bind (boost::ref (Finished)));
- }
+ }
}
/** @return DCPTime (in seconds) that this sub-job has been running */
if (_start_time == 0) {
return 0;
}
-
+
return time (0) - _start_time;
}
boost::mutex::scoped_lock lm (_progress_mutex);
_sub_name = n;
}
-
+
set_progress (0, true);
}
{
LOG_ERROR_NC (s);
LOG_ERROR_NC (d);
-
+
_film->log()->log (String::compose ("Error in job: %1 (%2)", s, d), Log::TYPE_ERROR);
boost::mutex::scoped_lock lm (_state_mutex);
_error_summary = s;
/* 100% makes it sound like we've finished when we haven't */
pc = 99;
}
-
+
s << pc << N_("%");
-
+
if (t > 10 && r > 0) {
/// TRANSLATORS: remaining here follows an amount of time that is remaining
/// on an operation.
if (progress().get_value_or(0) == 0) {
return elapsed_time ();
}
-
+
return elapsed_time() / progress().get() - elapsed_time();
}
virtual std::string json_name () const = 0;
/** Run this job in the current thread. */
virtual void run () = 0;
-
+
void start ();
void pause ();
void resume ();
FINISHED_ERROR, ///< the job has finished in error
FINISHED_CANCELLED ///< the job was cancelled
};
-
+
void set_state (State);
void set_error (std::string s, std::string d);
, _last_active_jobs (false)
, _scheduler (new boost::thread (boost::bind (&JobManager::scheduler, this)))
{
-
+
}
JobManager::~JobManager ()
}
emit (boost::bind (boost::ref (JobAdded), weak_ptr<Job> (j)));
-
+
return j;
}
}
return false;
-}
+}
void
JobManager::scheduler ()
if (_terminate) {
return;
}
-
+
for (list<shared_ptr<Job> >::iterator i = _jobs.begin(); i != _jobs.end(); ++i) {
if (!(*i)->finished ()) {
active_jobs = true;
}
-
+
if ((*i)->running ()) {
/* Something is already happening */
break;
}
-
+
if ((*i)->is_new()) {
(*i)->start ();
-
+
/* Only start one job at once */
break;
}
private:
/* This function is part of the test suite */
friend void ::wait_for_jobs ();
-
+
JobManager ();
~JobManager ();
void scheduler ();
-
+
mutable boost::mutex _mutex;
std::list<boost::shared_ptr<Job> > _jobs;
bool _terminate;
JSONServer::request (string url, shared_ptr<tcp::socket> socket)
{
cout << "request: " << url << "\n";
-
+
map<string, string> r = split_get_request (url);
for (map<string, string>::iterator i = r.begin(); i != r.end(); ++i) {
cout << i->first << " => " << i->second << "\n";
}
-
+
string action;
if (r.find ("action") != r.end ()) {
action = r["action"];
}
-
+
stringstream json;
if (action == "status") {
-
+
list<shared_ptr<Job> > jobs = JobManager::instance()->get ();
-
+
json << "{ \"jobs\": [";
for (list<shared_ptr<Job> >::iterator i = jobs.begin(); i != jobs.end(); ++i) {
if ((*i)->film()) {
json << "\"dcp\": \"" << (*i)->film()->dcp_name() << "\", ";
}
-
+
json << "\"name\": \"" << (*i)->json_name() << "\", ";
if ((*i)->progress ()) {
json << "\"progress\": " << (*i)->progress().get() << ", ";
}
json << "\"status\": \"" << (*i)->json_status() << "\"";
json << " }";
-
+
list<shared_ptr<Job> >::iterator j = i;
++j;
if (j != jobs.end ()) {
}
}
json << "] }";
-
+
if (json.str().empty ()) {
json << "{ }";
}
}
-
+
stringstream reply;
reply << "HTTP/1.1 200 OK\r\n"
<< "Content-Length: " << json.str().length() << "\r\n"
void request (std::string url, boost::shared_ptr<boost::asio::ip::tcp::socket> socket);
};
-
+
: screen (s)
, kdm (k)
{}
-
+
shared_ptr<Screen> screen;
dcp::EncryptedKDM kdm;
};
}
throw FileError ("could not create ZIP file", zip_file);
}
-
+
list<shared_ptr<string> > kdm_strings;
-
+
for (list<ScreenKDM>::const_iterator i = screen_kdms.begin(); i != screen_kdms.end(); ++i) {
shared_ptr<string> kdm (new string (i->kdm.as_xml ()));
kdm_strings.push_back (kdm);
-
+
struct zip_source* source = zip_source_buffer (zip, kdm->c_str(), kdm->length(), 0);
if (!source) {
throw StringError ("could not create ZIP source");
}
-
+
if (zip_add (zip, kdm_filename (film, *i).c_str(), source) == -1) {
throw StringError ("failed to add KDM to ZIP archive");
}
}
-
+
if (zip_close (zip) == -1) {
throw StringError ("failed to close ZIP archive");
}
)
{
list<dcp::EncryptedKDM> kdms = film->make_kdms (screens, cpl, from, to, formulation);
-
+
list<ScreenKDM> screen_kdms;
-
+
list<shared_ptr<Screen> >::iterator i = screens.begin ();
list<dcp::EncryptedKDM>::iterator j = kdms.begin ();
while (i != screens.end() && j != kdms.end ()) {
list<CinemaKDMs> cinema_kdms;
while (!screen_kdms.empty ()) {
-
+
/* Get all the screens from a single cinema */
CinemaKDMs ck;
-
+
list<ScreenKDM>::iterator i = screen_kdms.begin ();
ck.cinema = i->screen->cinema;
ck.screen_kdms.push_back (*i);
list<ScreenKDM>::iterator j = i;
++i;
screen_kdms.remove (*j);
-
+
while (i != screen_kdms.end ()) {
if (i->screen->cinema == ck.cinema) {
ck.screen_kdms.push_back (*i);
list<CinemaKDMs> cinema_kdms = make_cinema_kdms (film, screens, cpl, from, to, formulation);
for (list<CinemaKDMs>::const_iterator i = cinema_kdms.begin(); i != cinema_kdms.end(); ++i) {
-
+
boost::filesystem::path zip_file = boost::filesystem::temp_directory_path ();
zip_file /= boost::filesystem::unique_path().string() + ".zip";
i->make_zip_file (film, zip_file);
-
+
/* Send email */
-
+
quickmail_initialize ();
SafeStringStream start;
start << from.date() << " " << from.time_of_day();
SafeStringStream end;
end << to.date() << " " << to.time_of_day();
-
+
string subject = Config::instance()->kdm_subject();
boost::algorithm::replace_all (subject, "$CPL_NAME", film->dcp_name ());
boost::algorithm::replace_all (subject, "$START_TIME", start.str ());
boost::algorithm::replace_all (subject, "$END_TIME", end.str ());
boost::algorithm::replace_all (subject, "$CINEMA_NAME", i->cinema->name);
quickmail mail = quickmail_create (Config::instance()->kdm_from().c_str(), subject.c_str ());
-
+
quickmail_add_to (mail, i->cinema->email.c_str ());
if (!Config::instance()->kdm_cc().empty ()) {
quickmail_add_cc (mail, Config::instance()->kdm_cc().c_str ());
}
quickmail_add_header (mail, "Content-Type: text/plain; charset=UTF-8");
-
+
string body = Config::instance()->kdm_email().c_str();
boost::algorithm::replace_all (body, "$CPL_NAME", film->dcp_name ());
boost::algorithm::replace_all (body, "$START_TIME", start.str ());
boost::algorithm::replace_all (body, "$END_TIME", end.str ());
boost::algorithm::replace_all (body, "$CINEMA_NAME", i->cinema->name);
-
+
SafeStringStream screens;
for (list<ScreenKDM>::const_iterator j = i->screen_kdms.begin(); j != i->screen_kdms.end(); ++j) {
screens << j->screen->name << ", ";
Config::instance()->mail_user().c_str(),
Config::instance()->mail_password().c_str()
);
-
+
if (error) {
quickmail_destroy (mail);
throw KDMError (String::compose ("Failed to send KDM email (%1)", error));
if (type & TYPE_WARNING) {
s << "WARNING: ";
}
-
+
s << message;
do_log (s.str ());
}
head_amount = size;
tail_amount = 0;
}
-
+
FILE* f = fopen_boost (_file, "r");
if (!f) {
return "";
string out;
char* buffer = new char[max(head_amount, tail_amount) + 1];
-
+
int N = fread (buffer, 1, head_amount, f);
buffer[N] = '\0';
out += string (buffer);
out += "\n .\n .\n .\n";
fseek (f, - tail_amount - 1, SEEK_END);
-
+
N = fread (buffer, 1, tail_amount, f);
buffer[N] = '\0';
out += string (buffer) + "\n";
virtual std::string head_and_tail (int amount = 1024) const = 0;
protected:
-
+
/** mutex to protect the log */
mutable boost::mutex _mutex;
-
+
private:
virtual void do_log (std::string m) = 0;
void config_changed ();
-
+
/** bit-field of log types which should be put into the log (others are ignored) */
int _types;
boost::signals2::scoped_connection _config_connection;
return "";
}
-private:
+private:
void do_log (std::string) {}
};
MagickImageProxy::MagickImageProxy (boost::filesystem::path path)
{
/* Read the file into a Blob */
-
+
boost::uintmax_t const size = boost::filesystem::file_size (path);
FILE* f = fopen_boost (path, "rb");
if (!f) {
throw OpenFileError (path);
}
-
+
uint8_t* data = new uint8_t[size];
if (fread (data, 1, size, f) != size) {
delete[] data;
throw ReadFileError (path);
}
-
+
fclose (f);
_blob.update (data, size);
delete[] data;
MagickImageProxy::image (optional<dcp::NoteHandler>) const
{
boost::mutex::scoped_lock lm (_mutex);
-
+
if (_image) {
return _image;
}
using namespace MagickCore;
#else
using namespace MagickLib;
-#endif
+#endif
magick_image->write (0, i, size.width, 1, "RGB", CharPixel, p);
p += _image->stride()[0];
}
if (_blob.length() != mp->_blob.length()) {
return false;
}
-
+
return memcmp (_blob.data(), mp->_blob.data(), _blob.length()) == 0;
}
void send_binary (boost::shared_ptr<Socket>) const;
bool same (boost::shared_ptr<const ImageProxy> other) const;
-private:
+private:
Magick::Blob _blob;
mutable boost::shared_ptr<Image> _image;
mutable boost::mutex _mutex;
if (!_digest) {
unsigned char digest[MD5_DIGEST_LENGTH];
MD5_Final (digest, &_context);
-
+
SafeStringStream s;
for (int i = 0; i < MD5_DIGEST_LENGTH; ++i) {
s << hex << setfill('0') << setw(2) << ((int) digest[i]);
}
-
+
_digest = s.str ();
}
-
+
return _digest.get ();
}
}
void add (std::string const & s);
-
+
std::string get () const;
private:
, decoder (d)
, frc (f)
{}
-
+
boost::shared_ptr<Content> content;
boost::shared_ptr<Decoder> decoder;
FrameRateChange frc;
if (!(*i)->paths_valid ()) {
continue;
}
-
+
shared_ptr<Decoder> decoder;
optional<FrameRateChange> frc;
if (!vc) {
continue;
}
-
+
DCPTime const overlap = max (vc->position(), (*i)->position()) - min (vc->end(), (*i)->end());
if (overlap > best_overlap_t) {
best_overlap = vc;
property == VideoContentProperty::VIDEO_FRAME_TYPE ||
property == DCPContentProperty::CAN_BE_PLAYED
) {
-
+
_have_valid_pieces = false;
Changed (frequent);
property == VideoContentProperty::VIDEO_FADE_IN ||
property == VideoContentProperty::VIDEO_FADE_OUT
) {
-
+
Changed (frequent);
}
}
Player::transform_image_subtitles (list<ImageSubtitle> subs) const
{
list<PositionImage> all;
-
+
for (list<ImageSubtitle>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
if (!i->image) {
continue;
/* We will scale the subtitle up to fit _video_container_size */
dcp::Size scaled_size (i->rectangle.width * _video_container_size.width, i->rectangle.height * _video_container_size.height);
-
+
/* Then we need a corrective translation, consisting of two parts:
*
* 1. that which is the result of the scaling of the subtitle by _video_container_size; this will be
pvf.push_back (black_player_video_frame (time));
return pvf;
}
-
+
dcp::Size image_size = content->scale().size (content, _video_container_size, _film->frame_size ());
for (list<ContentVideo>::const_iterator i = content_video.begin(); i != content_video.end(); ++i) {
for (list<shared_ptr<PlayerVideo> >::const_iterator i = pvf.begin(); i != pvf.end(); ++i) {
(*i)->set_subtitle (merge (sub_images));
}
- }
-
+ }
+
return pvf;
}
shared_ptr<AudioBuffers> audio (new AudioBuffers (_film->audio_channels(), length_frames));
audio->make_silent ();
-
+
list<shared_ptr<Piece> > ov = overlaps<AudioContent> (time, time + length);
if (ov.empty ()) {
return audio;
Frame const content_frame = dcp_to_content_audio (*i, request);
BOOST_FOREACH (AudioStreamPtr j, content->audio_streams ()) {
-
+
/* Audio from this piece's decoder stream (which might be more or less than what we asked for) */
ContentAudio all = decoder->get_audio (j, content_frame, request_frames, accurate);
if (_audio_processor) {
dcp_mapped = _audio_processor->run (dcp_mapped);
}
-
+
all.audio = dcp_mapped;
audio->accumulate_frames (
list<ContentImageSubtitle> image = subtitle_decoder->get_image_subtitles (ContentTimePeriod (from, to), starting);
for (list<ContentImageSubtitle>::iterator i = image.begin(); i != image.end(); ++i) {
-
+
/* Apply content's subtitle offsets */
i->sub.rectangle.x += subtitle_content->subtitle_x_offset ();
i->sub.rectangle.y += subtitle_content->subtitle_y_offset ();
/* Apply a corrective translation to keep the subtitle centred after that scale */
i->sub.rectangle.x -= i->sub.rectangle.width * (subtitle_content->subtitle_x_scale() - 1);
i->sub.rectangle.y -= i->sub.rectangle.height * (subtitle_content->subtitle_y_scale() - 1);
-
+
ps.image.push_back (i->sub);
}
class PlayerVideo;
class ImageProxy;
class Font;
-
+
class PlayerStatistics
{
public:
, good (0)
, skip (0)
{}
-
+
int black;
int repeat;
int good;
, good (0)
, skip (0)
{}
-
+
DCPTime silence;
int64_t good;
int64_t skip;
void set_burn_subtitles (bool burn);
PlayerStatistics const & statistics () const;
-
+
/** Emitted when something has changed such that if we went back and emitted
* the last frame again it would look different. This is not emitted after
* a seek.
overlaps.push_back (*i);
}
}
-
+
return overlaps;
}
-
+
boost::shared_ptr<const Film> _film;
/** Our pieces are ready to go; if this is false the pieces must be (re-)created before they are used */
: from (f)
, to (t)
{}
-
+
DCPTime from;
DCPTime to;
_in = image_proxy_factory (node->node_child ("In"), socket);
if (node->optional_number_child<int> ("SubtitleX")) {
-
+
_subtitle.position = Position<int> (node->number_child<int> ("SubtitleX"), node->number_child<int> ("SubtitleY"));
_subtitle.image.reset (
new Image (PIX_FMT_RGBA, dcp::Size (node->number_child<int> ("SubtitleWidth"), node->number_child<int> ("SubtitleHeight")), true)
);
-
+
_subtitle.image->read_from_socket (socket);
}
}
PlayerVideo::image (AVPixelFormat pixel_format, bool burn_subtitle, dcp::NoteHandler note) const
{
shared_ptr<Image> im = _in->image (optional<dcp::NoteHandler> (note));
-
+
Crop total_crop = _crop;
switch (_part) {
case PART_LEFT_HALF:
if (_colour_conversion) {
yuv_to_rgb = _colour_conversion.get().yuv_to_rgb();
}
-
+
shared_ptr<Image> out = im->crop_scale_window (total_crop, _inter_size, _out_size, yuv_to_rgb, pixel_format, true);
if (burn_subtitle && _subtitle.image) {
PlayerVideo::has_j2k () const
{
/* XXX: burnt-in subtitle; maybe other things */
-
+
shared_ptr<const J2KImageProxy> j2k = dynamic_pointer_cast<const J2KImageProxy> (_in);
if (!j2k) {
return false;
}
-
+
return _crop == Crop () && _inter_size == j2k->size();
}
Part,
boost::optional<ColourConversion>
);
-
+
PlayerVideo (boost::shared_ptr<cxml::Node>, boost::shared_ptr<Socket>);
void set_subtitle (PositionImage);
-
+
boost::shared_ptr<Image> image (AVPixelFormat pix_fmt, bool burn_subtitle, dcp::NoteHandler note) const;
void add_metadata (xmlpp::Node* node, bool send_subtitles) const;
- any other position changes will be timeline drags which should not result in content
being sequenced.
*/
-
+
if (property == ContentProperty::LENGTH || property == VideoContentProperty::VIDEO_FRAME_TYPE) {
maybe_sequence_video ();
}
-
+
ContentChanged (content, property, frequent);
}
if (!_sequence_video || _sequencing_video) {
return;
}
-
+
_sequencing_video = true;
-
+
DCPTime next_left;
DCPTime next_right;
for (ContentList::iterator i = _content.begin(); i != _content.end(); ++i) {
if (!vc) {
continue;
}
-
+
if (vc->video_frame_type() == VIDEO_FRAME_TYPE_3D_RIGHT) {
vc->set_position (next_right);
next_right = vc->end() + DCPTime::delta ();
}
/* This won't change order, so it does not need a sort */
-
+
_sequencing_video = false;
}
Playlist::video_identifier () const
{
string t;
-
+
for (ContentList::const_iterator i = _content.begin(); i != _content.end(); ++i) {
shared_ptr<const VideoContent> vc = dynamic_pointer_cast<const VideoContent> (*i);
if (vc) {
while (i != _content.end() && *i != c) {
++i;
}
-
+
if (i != _content.end ()) {
_content.erase (i);
Changed ();
while (j != _content.end() && *j != *i) {
++j;
}
-
+
if (j != _content.end ()) {
_content.erase (j);
}
}
/* This won't change order, so it does not need a sort */
-
+
Changed ();
}
if (!best) {
return 24;
}
-
+
return best->dcp;
}
}
_content_connections.clear ();
-
+
for (ContentList::iterator i = _content.begin(); i != _content.end(); ++i) {
_content_connections.push_back ((*i)->Changed.connect (bind (&Playlist::content_changed, this, _1, _2, _3)));
}
}
sort (_content.begin(), _content.end(), ContentSorter ());
-
+
reconnect ();
Changed ();
}
Playlist::move_earlier (shared_ptr<Content> c)
{
sort (_content.begin(), _content.end(), ContentSorter ());
-
+
ContentList::iterator previous = _content.end ();
ContentList::iterator i = _content.begin();
while (i != _content.end() && *i != c) {
return;
}
-
+
DCPTime const p = (*previous)->position ();
(*previous)->set_position (p + c->length_after_trim ());
c->set_position (p);
Playlist::move_later (shared_ptr<Content> c)
{
sort (_content.begin(), _content.end(), ContentSorter ());
-
+
ContentList::iterator i = _content.begin();
while (i != _content.end() && *i != c) {
++i;
std::string video_identifier () const;
DCPTime length () const;
-
+
int best_dcp_frame_rate () const;
DCPTime video_end () const;
FrameRateChange active_frame_rate_change (DCPTime, int dcp_frame_rate) const;
* Third parameter is true if signals are currently being emitted frequently.
*/
mutable boost::signals2::signal<void (boost::weak_ptr<Content>, int, bool)> ContentChanged;
-
+
private:
void content_changed (boost::weak_ptr<Content>, int, bool);
void reconnect ();
{
public:
PositionImage () {}
-
+
PositionImage (boost::shared_ptr<Image> i, Position<int> p)
: image (i)
, position (p)
{}
-
+
boost::shared_ptr<Image> image;
Position<int> position;
if (i == "137") {
i = "138";
}
-
+
vector<Ratio const *>::iterator j = _ratios.begin ();
while (j != _ratios.end() && (*j)->id() != i) {
++j;
return *j;
}
-
+
Ratio const *
Ratio::nearest_from_ratio (float r)
{
Ratio const * nearest = 0;
float distance = FLT_MAX;
-
+
for (vector<Ratio const *>::iterator i = _ratios.begin (); i != _ratios.end(); ++i) {
float const d = fabs ((*i)->ratio() - r);
if (d < distance) {
std::string _nickname;
std::string _isdcf_name;
- static std::vector<Ratio const *> _ratios;
+ static std::vector<Ratio const *> _ratios;
};
#endif
public:
RawImageProxy (boost::shared_ptr<Image>);
RawImageProxy (boost::shared_ptr<cxml::Node> xml, boost::shared_ptr<Socket> socket);
-
+
boost::shared_ptr<Image> image (boost::optional<dcp::NoteHandler> note = boost::optional<dcp::NoteHandler> ()) const;
void add_metadata (xmlpp::Node *) const;
void send_binary (boost::shared_ptr<Socket>) const;
bool same (boost::shared_ptr<const ImageProxy>) const;
-
+
private:
boost::shared_ptr<Image> _image;
};
namespace dcpomatic
{
-
+
/** @struct Rect
* @brief A rectangle.
*/
-template <class T>
+template <class T>
class Rect
{
public:
-
+
Rect ()
: x (0)
, y (0)
{
T const tx = max (x, other.x);
T const ty = max (y, other.y);
-
+
return Rect (
tx, ty,
min (x + width, other.x + other.width) - tx,
image->size().height,
Cairo::ImageSurface::format_stride_for_width (Cairo::FORMAT_ARGB32, image->size().width)
);
-
+
Cairo::RefPtr<Cairo::Context> context = Cairo::Context::create (surface);
Glib::RefPtr<Pango::Layout> layout = Pango::Layout::create (context);
}
layout->set_font_description (font);
layout->set_text (subtitle.text ());
-
+
/* Compute fade factor */
/* XXX */
float fade_factor = 1;
layout->update_from_cairo_context (context);
-
+
context->scale (xscale, yscale);
if (subtitle.effect() == dcp::SHADOW) {
layout->add_to_cairo_context (context);
context->stroke ();
}
-
+
int layout_width;
int layout_height;
layout->get_size (layout_width, layout_height);
extern "C" {
#include "libavutil/channel_layout.h"
#include "libavutil/opt.h"
-}
+}
#include "resampler.h"
#include "audio_buffers.h"
#include "exceptions.h"
/* Sample rates */
av_opt_set_int (_swr_context, "isr", _in_rate, 0);
av_opt_set_int (_swr_context, "osr", _out_rate, 0);
-
+
swr_init (_swr_context);
}
int const resampled_frames = swr_convert (
_swr_context, (uint8_t **) resampled->data(), max_resampled_frames, (uint8_t const **) in->data(), in->frames()
);
-
+
if (resampled_frames < 0) {
char buf[256];
av_strerror (resampled_frames, buf, sizeof(buf));
throw EncodeError (String::compose (_("could not run sample-rate converter for %1 samples (%2) (%3)"), in->frames(), resampled_frames, buf));
}
-
+
resampled->set_frames (resampled_frames);
return resampled;
-}
+}
shared_ptr<const AudioBuffers>
Resampler::flush ()
while (true) {
int const frames = swr_convert (_swr_context, (uint8_t **) pass->data(), pass_size, 0, 0);
-
+
if (frames < 0) {
throw EncodeError (_("could not run sample-rate converter"));
}
-
+
if (frames == 0) {
break;
}
boost::shared_ptr<const AudioBuffers> run (boost::shared_ptr<const AudioBuffers>);
boost::shared_ptr<const AudioBuffers> flush ();
-private:
+private:
SwrContext* _swr_context;
int _in_rate;
int _out_rate;
public:
SafeStringStream ()
{}
-
+
SafeStringStream (std::string s)
: _stream (s)
{}
-
+
template <class T>
std::ostream& operator<< (T val)
{
{
_stream.fill (f);
}
-
+
void precision (int p)
{
_stream.precision (p);
/** Close and delete the temporary file */
ScopedTemporary::~ScopedTemporary ()
{
- close ();
+ close ();
boost::system::error_code ec;
boost::filesystem::remove (_file, ec);
}
ssh_session session;
-private:
+private:
bool _connected;
};
SCPDCPJob::run ()
{
LOG_GENERAL_NC (N_("SCP DCP job starting"));
-
+
SSHSession ss;
-
+
set_status (_("connecting"));
-
+
ssh_options_set (ss.session, SSH_OPTIONS_HOST, Config::instance()->tms_ip().c_str ());
ssh_options_set (ss.session, SSH_OPTIONS_USER, Config::instance()->tms_user().c_str ());
int const port = 22;
ssh_options_set (ss.session, SSH_OPTIONS_PORT, &port);
-
+
int r = ss.connect ();
if (r != SSH_OK) {
throw NetworkError (String::compose (_("Could not connect to server %1 (%2)"), Config::instance()->tms_ip(), ssh_get_error (ss.session)));
}
-
+
int const state = ssh_is_server_known (ss.session);
if (state == SSH_SERVER_ERROR) {
throw NetworkError (String::compose (_("SSH error (%1)"), ssh_get_error (ss.session)));
}
-
+
r = ssh_userauth_password (ss.session, 0, Config::instance()->tms_password().c_str ());
if (r != SSH_AUTH_SUCCESS) {
throw NetworkError (String::compose (_("Failed to authenticate with server (%1)"), ssh_get_error (ss.session)));
}
-
+
SSHSCP sc (ss.session);
-
+
r = ssh_scp_init (sc.scp);
if (r != SSH_OK) {
throw NetworkError (String::compose (_("Could not start SCP session (%1)"), ssh_get_error (ss.session)));
}
-
+
r = ssh_scp_push_directory (sc.scp, _film->dcp_name().c_str(), S_IRWXU);
if (r != SSH_OK) {
throw NetworkError (String::compose (_("Could not create remote directory %1 (%2)"), _film->dcp_name(), ssh_get_error (ss.session)));
}
-
+
boost::filesystem::path const dcp_dir = _film->dir (_film->dcp_name());
-
+
boost::uintmax_t bytes_to_transfer = 0;
for (boost::filesystem::directory_iterator i = boost::filesystem::directory_iterator (dcp_dir); i != boost::filesystem::directory_iterator(); ++i) {
bytes_to_transfer += boost::filesystem::file_size (*i);
}
-
+
boost::uintmax_t buffer_size = 64 * 1024;
char buffer[buffer_size];
boost::uintmax_t bytes_transferred = 0;
-
+
for (boost::filesystem::directory_iterator i = boost::filesystem::directory_iterator (dcp_dir); i != boost::filesystem::directory_iterator(); ++i) {
-
+
string const leaf = boost::filesystem::path(*i).leaf().generic_string ();
-
+
set_status (String::compose (_("copying %1"), leaf));
-
+
boost::uintmax_t to_do = boost::filesystem::file_size (*i);
ssh_scp_push_file (sc.scp, leaf.c_str(), to_do, S_IRUSR | S_IWUSR);
fclose (f);
throw ReadFileError (boost::filesystem::path (*i).string());
}
-
+
r = ssh_scp_write (sc.scp, buffer, t);
if (r != SSH_OK) {
fclose (f);
fclose (f);
}
-
+
set_progress (1);
set_status (N_(""));
set_state (FINISHED_OK);
SendKDMEmailJob::run ()
{
try {
-
+
set_progress_unknown ();
email_kdms (_film, _screens, _dcp, _from, _to, _formulation);
set_progress (1);
set_state (FINISHED_OK);
-
+
} catch (std::exception& e) {
set_progress (1);
set_progress_unknown ();
quickmail mail = quickmail_create (_from.c_str(), "DCP-o-matic problem report");
-
+
quickmail_add_to (mail, "carl@dcpomatic.com");
-
+
string body = _summary + "\n\n";
-
+
body += "log head and tail:\n";
body += "---<8----\n";
body += _film->log()->head_and_tail ();
add_file (body, "metadata.xml");
quickmail_set_body (mail, body.c_str());
-
+
char const* error = quickmail_send (mail, "main.carlh.net", 2525, 0, 0);
-
+
if (error) {
set_state (FINISHED_ERROR);
set_error (error, "");
} else {
set_state (FINISHED_OK);
}
-
+
quickmail_destroy (mail);
set_progress (1);
if (!f) {
return;
}
-
+
body += file.string() + ":\n";
body += "---<8----\n";
uintmax_t const size = boost::filesystem::file_size (_film->file (file));
DCPVideo dcp_video_frame (pvf, xml, _log);
gettimeofday (&after_read, 0);
-
+
Data encoded = dcp_video_frame.encode_locally (boost::bind (&Log::dcp_log, _log.get(), _1, _2));
gettimeofday (&after_encode, 0);
-
+
try {
socket->write (encoded.size ());
socket->write (encoded.data ().get (), encoded.size ());
shared_ptr<Socket> socket = _queue.front ();
_queue.pop_front ();
-
+
lock.unlock ();
int frame = -1;
struct timeval after_read;
struct timeval after_encode;
struct timeval end;
-
+
gettimeofday (&start, 0);
-
+
try {
frame = process (socket, after_read, after_encode);
ip = socket->socket().remote_endpoint().address().to_string();
gettimeofday (&end, 0);
socket.reset ();
-
+
lock.lock ();
if (frame >= 0) {
<< "receive " << (seconds(after_read) - seconds(start)) << "s "
<< "encode " << (seconds(after_encode) - seconds(after_read)) << "s "
<< "send " << (seconds(end) - seconds(after_encode)) << "s.";
-
+
if (_verbose) {
cout << message.str() << "\n";
}
LOG_GENERAL_NC (message.str ());
}
-
+
_full_condition.notify_all ();
}
}
if (_verbose) {
cout << "DCP-o-matic server starting with " << num_threads << " threads.\n";
}
-
+
for (int i = 0; i < num_threads; ++i) {
_worker_threads.push_back (new thread (bind (&Server::worker_thread, this)));
}
_broadcast.thread = new thread (bind (&Server::broadcast_thread, this));
-
+
start_accept ();
_io_service.run ();
}
}
}
-
+
_broadcast.socket->async_receive_from (
boost::asio::buffer (_broadcast.buffer, sizeof (_broadcast.buffer)),
_broadcast.send_endpoint, boost::bind (&Server::broadcast_received, this)
}
boost::mutex::scoped_lock lock (_worker_mutex);
-
+
/* Wait until the queue has gone down a bit */
while (_queue.size() >= _worker_threads.size() * 2 && !_terminate) {
_full_condition.wait (lock);
}
-
+
_queue.push_back (socket);
_empty_condition.notify_all ();
start_accept ();
}
-
+
: _host_name ("")
, _threads (1)
{}
-
+
/** @param h Server host name or IP address in string form.
* @param t Number of threads to use on the server.
*/
{}
/* Default copy constructor is fine */
-
+
/** @return server's host name or IP address in string form */
std::string host_name () const {
return _host_name;
: thread (0)
, socket (0)
{}
-
+
boost::thread* thread;
boost::asio::ip::udp::socket* socket;
char buffer[64];
boost::asio::ip::udp::endpoint send_endpoint;
boost::asio::io_service io_service;
-
+
} _broadcast;
};
socket.set_option (boost::asio::socket_base::broadcast (true));
string const data = DCPOMATIC_HELLO;
-
+
while (!_stop) {
if (Config::instance()->use_any_servers ()) {
/* Broadcast to look for servers */
start_accept ();
return;
}
-
+
uint32_t length;
socket->read (reinterpret_cast<uint8_t*> (&length), sizeof (uint32_t));
length = ntohl (length);
-
+
scoped_array<char> buffer (new char[length]);
socket->read (reinterpret_cast<uint8_t*> (buffer.get()), length);
-
+
string s (buffer.get());
shared_ptr<cxml::Document> xml (new cxml::Document ("ServerAvailable"));
xml->read_string (s);
-
+
string const ip = socket->socket().remote_endpoint().address().to_string ();
if (!server_found (ip) && xml->optional_number_child<int>("Version").get_value_or (0) == SERVER_LINK_VERSION) {
ServerDescription sd (ip, xml->number_child<int> ("Threads"));
boost::signals2::signal<void (ServerDescription)> ServerFound;
bool _disabled;
-
+
/** Thread to periodically issue broadcasts to find encoding servers */
boost::thread* _broadcast_thread;
/** Thread to listen to the responses from servers */
}
friend class Signaller;
-
+
/** A io_service which is used as the conduit for messages */
boost::asio::io_service _service;
/** Object required to keep io_service from stopping when it has nothing to do */
if (signal_manager) {
signal_manager->emit (boost::bind (&Wrapper<T>::signal, w));
}
-
+
boost::mutex::scoped_lock lm (_signaller_mutex);
/* Clean up finished Wrappers */
{
shared_ptr<const Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
-
+
{
boost::mutex::scoped_lock lm (_mutex);
_audio_stream.reset (new AudioStream (examiner->audio_frame_rate(), examiner->audio_channels ()));
AudioStreamPtr audio_stream () const {
return _audio_stream;
}
-
+
void take_from_audio_examiner (boost::shared_ptr<AudioExaminer>);
protected:
/* Here be monsters. See fopen_boost for similar shenanigans */
#ifdef DCPOMATIC_WINDOWS
_sndfile = sf_wchar_open (_sndfile_content->path(0).c_str(), SFM_READ, &_info);
-#else
+#else
_sndfile = sf_open (_sndfile_content->path(0).string().c_str(), SFM_READ, &_info);
#endif
-
+
if (!_sndfile) {
throw DecodeError (_("could not open audio file for reading"));
}
Sndfile (boost::shared_ptr<const SndfileContent> content);
virtual ~Sndfile ();
-protected:
+protected:
boost::shared_ptr<const SndfileContent> _sndfile_content;
SNDFILE* _sndfile;
SF_INFO _info;
boost::shared_ptr<SndfileContent> shared_from_this () {
return boost::dynamic_pointer_cast<SndfileContent> (Content::shared_from_this ());
}
-
+
DCPTime full_length () const;
-
+
void examine (boost::shared_ptr<Job>);
std::string summary () const;
std::string technical_summary () const;
void as_xml (xmlpp::Node *) const;
void take_from_audio_examiner (boost::shared_ptr<AudioExaminer>);
-
+
static bool valid_file (boost::filesystem::path);
private:
boost::mutex::scoped_lock lm (_mutex);
return _audio_length;
}
-
+
Frame _audio_length;
};
, _remaining (_info.frames)
, _deinterleave_buffer (0)
{
-
+
}
SndfileDecoder::~SndfileDecoder ()
if (_remaining == 0) {
return true;
}
-
+
/* Do things in half second blocks as I think there may be limits
to what FFmpeg (and in particular the resampler) can cope with.
*/
sf_count_t const this_time = min (block, _remaining);
int const channels = _sndfile_content->audio_stream()->channels ();
-
+
shared_ptr<AudioBuffers> data (new AudioBuffers (channels, this_time));
if (_sndfile_content->audio_stream()->channels() == 1) {
}
}
}
-
+
data->set_frames (this_time);
audio (_sndfile_content->audio_stream (), data, ContentTime::from_frames (_done, _info.samplerate));
_done += this_time;
private:
bool pass ();
void seek (ContentTime, bool);
-
+
int64_t _done;
int64_t _remaining;
float* _deinterleave_buffer;
{
public:
SndfileExaminer (boost::shared_ptr<const SndfileContent> content);
-
+
int audio_channels () const;
Frame audio_length () const;
int audio_frame_rate () const;
shared_ptr<const Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
-
+
DCPTime len (s.length (), film->active_frame_rate_change (position ()));
/* Default to turning these subtitles on */
boost::shared_ptr<SubRipContent> shared_from_this () {
return boost::dynamic_pointer_cast<SubRipContent> (Content::shared_from_this ());
}
-
+
/* Content */
void examine (boost::shared_ptr<Job>);
std::string summary () const;
}
static std::string const font_id;
-
+
private:
DCPTime _length;
};
SubRipDecoder::seek (ContentTime time, bool accurate)
{
SubtitleDecoder::seek (time, accurate);
-
+
_next = 0;
while (_next < _subtitles.size() && ContentTime::from_seconds (_subtitles[_next].from.all_as_seconds ()) < time) {
++_next;
}
/* XXX: we are ignoring positioning specified in the file */
-
+
list<dcp::SubtitleString> out;
for (list<sub::Line>::const_iterator i = _subtitles[_next].lines.begin(); i != _subtitles[_next].lines.end(); ++i) {
for (list<sub::Block>::const_iterator j = i->blocks.begin(); j != i->blocks.end(); ++j) {
private:
std::list<ContentTimePeriod> image_subtitles_during (ContentTimePeriod, bool starting) const;
std::list<ContentTimePeriod> text_subtitles_during (ContentTimePeriod, bool starting) const;
-
+
size_t _next;
};
, italic (false)
, underline (false)
{}
-
+
std::string text;
bool bold;
bool italic;
} else {
_use_subtitles = false;
}
-
+
if (version >= 7) {
_subtitle_x_offset = node->number_child<float> ("SubtitleXOffset");
_subtitle_y_offset = node->number_child<float> ("SubtitleYOffset");
shared_ptr<SubtitleContent> ref = dynamic_pointer_cast<SubtitleContent> (c[0]);
DCPOMATIC_ASSERT (ref);
list<shared_ptr<Font> > ref_fonts = ref->fonts ();
-
+
for (size_t i = 0; i < c.size(); ++i) {
shared_ptr<SubtitleContent> sc = dynamic_pointer_cast<SubtitleContent> (c[i]);
if (sc->subtitle_x_offset() != ref->subtitle_x_offset()) {
throw JoinError (_("Content to be joined must have the same subtitle X offset."));
}
-
+
if (sc->subtitle_y_offset() != ref->subtitle_y_offset()) {
throw JoinError (_("Content to be joined must have the same subtitle Y offset."));
}
SubtitleContent::as_xml (xmlpp::Node* root) const
{
boost::mutex::scoped_lock lm (_mutex);
-
+
root->add_child("UseSubtitles")->add_child_text (raw_convert<string> (_use_subtitles));
root->add_child("SubtitleXOffset")->add_child_text (raw_convert<string> (_subtitle_x_offset));
root->add_child("SubtitleYOffset")->add_child_text (raw_convert<string> (_subtitle_y_offset));
}
signal_changed (SubtitleContentProperty::USE_SUBTITLES);
}
-
+
void
SubtitleContent::set_subtitle_x_offset (double o)
{
protected:
/** subtitle language (e.g. "German") or empty if it is not known */
std::string _subtitle_language;
-
+
private:
friend struct ffmpeg_pts_offset_test;
void font_changed ();
/* Now look for what we wanted in the data we have collected */
/* XXX: inefficient */
-
+
list<T> out;
for (typename list<T>::const_iterator i = subs.begin(); i != subs.end(); ++i) {
if ((starting && period.contains (i->period().from)) || (!starting && period.overlaps (i->period ()))) {
}
/* Discard anything in _decoded_image_subtitles that is outside 5 seconds either side of period */
-
+
list<ContentImageSubtitle>::iterator i = _decoded_image_subtitles.begin();
while (i != _decoded_image_subtitles.end()) {
list<ContentImageSubtitle>::iterator tmp = i;
protected:
void seek (ContentTime, bool);
-
+
void image_subtitle (ContentTimePeriod period, boost::shared_ptr<Image>, dcpomatic::Rect<double>);
void text_subtitle (std::list<dcp::SubtitleString>);
*/
virtual std::list<ContentTimePeriod> image_subtitles_during (ContentTimePeriod period, bool starting) const = 0;
virtual std::list<ContentTimePeriod> text_subtitles_during (ContentTimePeriod period, bool starting) const = 0;
-
+
boost::shared_ptr<const SubtitleContent> _subtitle_content;
};
return;
}
-
+
set_state (N_(""));
cout << _name << N_(":\n");
public:
PeriodTimer (std::string n);
~PeriodTimer ();
-
+
private:
/** name to use when giving output */
TranscodeJob::TranscodeJob (shared_ptr<const Film> film)
: Job (film)
{
-
+
}
string
/// TRANSLATORS: fps here is an abbreviation for frames per second
s << "; " << fixed << setprecision (1) << fps << " " << _("fps");
}
-
+
return s.str ();
}
{
/* _transcoder might be destroyed by the job-runner thread */
shared_ptr<Transcoder> t = _transcoder;
-
+
if (!t) {
return 0;
}
-
+
float fps = t->current_encoding_rate ();
if (fps == 0) {
{
public:
TranscodeJob (boost::shared_ptr<const Film>);
-
+
std::string name () const;
std::string json_name () const;
void run ();
if (!_film->burn_subtitles ()) {
_writer->write (_player->get_subtitle_fonts ());
}
-
+
for (DCPTime t; t < length; t += frame) {
list<shared_ptr<PlayerVideo> > v = _player->get_video (t, true);
for (list<shared_ptr<PlayerVideo> >::const_iterator i = v.begin(); i != v.end(); ++i) {
if (s.height < minimum) {
s.height = minimum;
}
-
+
return s;
}
, cpl_annotation_text (a)
, cpl_file (f)
{}
-
+
std::string dcp_directory;
std::string cpl_id;
std::string cpl_annotation_text;
curl_easy_setopt (_curl, CURLOPT_WRITEFUNCTION, write_callback_wrapper);
curl_easy_setopt (_curl, CURLOPT_WRITEDATA, this);
curl_easy_setopt (_curl, CURLOPT_TIMEOUT, 20);
-
+
string const agent = "dcpomatic/" + string (dcpomatic_version);
curl_easy_setopt (_curl, CURLOPT_USERAGENT, agent.c_str ());
UpdateChecker::~UpdateChecker ()
{
/* We are not cleaning up our thread, but hey well */
-
+
curl_easy_cleanup (_curl);
curl_global_cleanup ();
delete[] _buffer;
}
--_to_do;
lock.unlock ();
-
+
try {
_offset = 0;
/* Perform the request */
-
+
int r = curl_easy_perform (_curl);
if (r != CURLE_OK) {
set_state (FAILED);
}
/* Parse the reply */
-
+
_buffer[_offset] = '\0';
string s (_buffer);
cxml::Document doc ("Update");
if (version_less_than (dcpomatic_version, stable)) {
_stable = stable;
}
-
+
if (Config::instance()->check_for_test_updates() && version_less_than (dcpomatic_version, test)) {
_test = test;
}
}
}
}
-
+
size_t
UpdateChecker::write_callback (void* data, size_t size, size_t nmemb)
{
} else {
am = raw_convert<int> (ap[2]);
}
-
+
float bm;
if (ends_with (bp[2], "devel")) {
bm = raw_convert<int> (bp[2].substr (0, bp[2].length() - 5)) + 0.5;
} else {
bm = raw_convert<int> (bp[2]);
}
-
+
return am < bm;
}
boost::mutex::scoped_lock lm (_data_mutex);
return _state;
}
-
+
/** @return new stable version, if there is one */
boost::optional<std::string> stable () {
boost::mutex::scoped_lock lm (_data_mutex);
boost::mutex::scoped_lock lm (_data_mutex);
return _test;
}
-
+
/** @return true if the last signal emission was the first */
bool last_emit_was_first () const {
boost::mutex::scoped_lock lm (_data_mutex);
private:
friend struct update_checker_test;
-
+
static UpdateChecker* _instance;
static bool version_less_than (std::string const & a, std::string const & b);
{
public:
UpmixerA (int sampling_rate);
-
+
std::string name () const;
std::string id () const;
ChannelCount in_channels () const;
addr2line (void const * const addr)
{
char addr2line_cmd[512] = { 0 };
- sprintf (addr2line_cmd, "addr2line -f -p -e %.256s %p > %s", program_name.c_str(), addr, backtrace_file.string().c_str());
+ sprintf (addr2line_cmd, "addr2line -f -p -e %.256s %p > %s", program_name.c_str(), addr, backtrace_file.string().c_str());
return system(addr2line_cmd);
}
FILE* f = fopen_boost (backtrace_file, "w");
fprintf (f, "C-style exception %d\n", info->ExceptionRecord->ExceptionCode);
fclose(f);
-
+
if (info->ExceptionRecord->ExceptionCode != EXCEPTION_STACK_OVERFLOW) {
CONTEXT* context = info->ContextRecord;
SymInitialize (GetCurrentProcess (), 0, true);
-
+
STACKFRAME frame = { 0 };
-
+
/* setup initial stack frame */
#if _WIN64
frame.AddrPC.Offset = context->Rip;
frame.AddrStack.Offset = context->Rsp;
frame.AddrFrame.Offset = context->Rbp;
-#else
+#else
frame.AddrPC.Offset = context->Eip;
frame.AddrStack.Offset = context->Esp;
frame.AddrFrame.Offset = context->Ebp;
frame.AddrPC.Mode = AddrModeFlat;
frame.AddrStack.Mode = AddrModeFlat;
frame.AddrFrame.Mode = AddrModeFlat;
-
+
while (
StackWalk (
IMAGE_FILE_MACHINE_I386,
addr2line((void *) frame.AddrPC.Offset);
}
} else {
-#ifdef _WIN64
+#ifdef _WIN64
addr2line ((void *) info->ContextRecord->Rip);
-#else
+#else
addr2line ((void *) info->ContextRecord->Eip);
-#endif
+#endif
}
-
+
return EXCEPTION_CONTINUE_SEARCH;
}
#endif
<< e.what() << std::endl;
}
catch (...) {
- std::cerr << __FUNCTION__ << " caught unknown/unhandled exception."
+ std::cerr << __FUNCTION__ << " caught unknown/unhandled exception."
<< std::endl;
}
*/
std::locale::global (boost::locale::generator().generate (""));
boost::filesystem::path::imbue (std::locale ());
-#endif
-
+#endif
+
avfilter_register_all ();
#ifdef DCPOMATIC_OSX
Pango::init ();
dcp::init ();
-
+
Ratio::setup_ratios ();
PresetColourConversion::setup_colour_conversion_presets ();
VideoContentScale::setup_scales ();
#if defined(DCPOMATIC_WINDOWS) || defined(DCPOMATIC_OSX)
bindtextdomain ("libdcpomatic2", mo_path().string().c_str());
bind_textdomain_codeset ("libdcpomatic2", "UTF8");
-#endif
+#endif
#ifdef DCPOMATIC_LINUX
bindtextdomain ("libdcpomatic2", LINUX_LOCALE_PREFIX);
fclose (f);
--i;
- }
+ }
digester.add (buffer.get(), size - to_do);
return digester.get ();
if (boost::starts_with (f.leaf().string(), "._")) {
return false;
}
-
+
string ext = f.extension().string();
transform (ext.begin(), ext.end(), ext.begin(), ::tolower);
return (
if (ratio < full_frame.ratio ()) {
return dcp::Size (rint (full_frame.height * ratio), full_frame.height);
}
-
+
return dcp::Size (full_frame.width, rint (full_frame.width / ratio));
}
/* End time is not known */
return FFmpegSubtitlePeriod (packet_time + ContentTime::from_seconds (sub.start_display_time / 1e3));
}
-
+
return FFmpegSubtitlePeriod (
packet_time + ContentTime::from_seconds (sub.start_display_time / 1e3),
packet_time + ContentTime::from_seconds (sub.end_display_time / 1e3)
KEY,
VALUE
} state = AWAITING_QUESTION_MARK;
-
+
map<string, string> r;
string k;
string v;
_scale = VideoContentScale (node->node_child ("Scale"));
}
-
+
if (node->optional_node_child ("ColourConversion")) {
_colour_conversion = ColourConversion (node->node_child ("ColourConversion"), version);
}
if (vc->fade_in() != ref->fade_in() || vc->fade_out() != ref->fade_out()) {
throw JoinError (_("Content to be joined must have the same fades."));
}
-
+
_video_length += vc->video_length ();
}
LOG_GENERAL ("Video length obtained from header as %1 frames", _video_length);
set_default_colour_conversion ();
-
+
signal_changed (VideoContentProperty::VIDEO_SIZE);
signal_changed (VideoContentProperty::VIDEO_FRAME_RATE);
signal_changed (VideoContentProperty::VIDEO_SCALE);
{
{
boost::mutex::scoped_lock lm (_mutex);
-
+
if (_crop.left == c) {
return;
}
-
+
_crop.left = c;
}
-
+
signal_changed (VideoContentProperty::VIDEO_CROP);
}
if (_crop.right == c) {
return;
}
-
+
_crop.right = c;
}
-
+
signal_changed (VideoContentProperty::VIDEO_CROP);
}
if (_crop.top == c) {
return;
}
-
+
_crop.top = c;
}
-
+
signal_changed (VideoContentProperty::VIDEO_CROP);
}
if (_crop.bottom == c) {
return;
}
-
+
_crop.bottom = c;
}
if (_video_frame_rate == r) {
return;
}
-
+
_video_frame_rate = r;
}
-
+
signal_changed (VideoContentProperty::VIDEO_FRAME_RATE);
}
VideoContent::fade (Frame f) const
{
DCPOMATIC_ASSERT (f >= 0);
-
+
if (f < fade_in()) {
return float (f) / fade_in();
}
d << " (" << fixed << setprecision(2) << scaled.ratio() << ":1)\n";
}
-
+
if (scaled != container_size) {
d << String::compose (
_("Padded with black to fit container %1 (%2x%3)"),
d << _("Content frame rate");
d << " " << fixed << setprecision(4) << video_frame_rate() << "\n";
-
+
FrameRateChange frc (video_frame_rate(), film->video_frame_rate ());
d << frc.description () << "\n";
virtual std::string identifier () const;
virtual void set_default_colour_conversion ();
-
+
Frame video_length () const {
boost::mutex::scoped_lock lm (_mutex);
return _video_length;
if (_video_frame_type == VIDEO_FRAME_TYPE_3D_ALTERNATE) {
return _video_length / 2;
}
-
+
return _video_length;
}
boost::mutex::scoped_lock lm (_mutex);
return _video_size;
}
-
+
float video_frame_rate () const {
boost::mutex::scoped_lock lm (_mutex);
return _video_frame_rate;
void set_fade_in (Frame);
void set_fade_out (Frame);
-
+
VideoFrameType video_frame_type () const {
boost::mutex::scoped_lock lm (_mutex);
return _video_frame_type;
boost::mutex::scoped_lock lm (_mutex);
return _fade_out;
}
-
+
dcp::Size video_size_after_3d_split () const;
dcp::Size video_size_after_crop () const;
friend struct audio_sampling_rate_test;
void setup_default_colour_conversion ();
-
+
dcp::Size _video_size;
VideoFrameType _video_frame_type;
Crop _crop;
VideoContentScale::id () const
{
SafeStringStream s;
-
+
if (_ratio) {
s << _ratio->id ();
} else {
s << (_scale ? "S1" : "S0");
}
-
+
return s.str ();
}
return VideoContentScale (true);
}
-
+
/** @param display_container Size of the container that we are displaying this content in.
* @param film_container The size of the film's image.
*/
float (display_container.width) / film_container.width,
float (display_container.height) / film_container.height
);
-
+
size.width = rint (size.width * scale);
size.height = rint (size.height * scale);
}
VideoDecoder::decoded_video (Frame frame)
{
list<ContentVideo> output;
-
+
for (list<ContentVideo>::const_iterator i = _decoded_video.begin(); i != _decoded_video.end(); ++i) {
if (i->frame == frame) {
output.push_back (*i);
}
}
}
-
+
/** Called by subclasses when they have a video frame ready */
void
VideoDecoder::video (shared_ptr<const ImageProxy> image, Frame frame)
boost::optional<Frame> from;
boost::optional<Frame> to;
-
+
if (_decoded_video.empty() && _last_seek_time && _last_seek_accurate) {
from = _last_seek_time->frames (_video_content->video_frame_rate ());
to = to_push.front().frame;
job->sub (_("Checking existing image data"));
check_existing_picture_asset ();
-
+
_picture_asset_writer = _picture_asset->start_write (
_film->internal_video_asset_dir() / _film->internal_video_asset_filename(),
_film->interop() ? dcp::INTEROP : dcp::SMPTE,
if (_film->encrypted ()) {
_sound_asset->set_key (_film->key ());
}
-
+
/* Write the sound asset into the film directory so that we leave the creation
of the DCP directory until the last minute.
*/
/* The queue is too big; wait until that is sorted out */
_full_condition.wait (lock);
}
-
+
FILE* file = fopen_boost (_film->info_file (), "rb");
if (!file) {
throw ReadFileError (_film->info_file ());
}
dcp::FrameInfo info = read_frame_info (file, frame, eyes);
fclose (file);
-
+
QueueItem qi;
qi.type = QueueItem::FAKE;
qi.size = info.size;
bool done_something = false;
while (true) {
-
+
if (_finish || _queued_full_in_memory > _maximum_frames_in_memory || have_sequenced_image_at_queue_head ()) {
/* We've got something to do: go and do it */
break;
LOG_WARNING (N_("- type FULL, frame %1, eyes %2"), i->frame, i->eyes);
} else {
LOG_WARNING (N_("- type FAKE, size %1, frame %2, eyes %3"), i->size, i->frame, i->eyes);
- }
+ }
}
LOG_WARNING (N_("Last written frame %1, last written eyes %2"), _last_written_frame, _last_written_eyes);
}
_last_written_frame = qi.frame;
_last_written_eyes = qi.eyes;
-
+
shared_ptr<Job> job = _job.lock ();
DCPOMATIC_ASSERT (job);
int64_t total = _film->length().frames (_film->video_frame_rate ());
);
i->encoded->write_via_temp (_film->j2c_path (i->frame, i->eyes, true), _film->j2c_path (i->frame, i->eyes, false));
-
+
lock.lock ();
i->encoded.reset ();
--_queued_full_in_memory;
if (_thread == 0) {
return;
}
-
+
_finish = true;
_empty_condition.notify_all ();
_full_condition.notify_all ();
if (can_throw) {
rethrow ();
}
-
+
delete _thread;
_thread = 0;
-}
+}
void
Writer::finish ()
if (!_thread) {
return;
}
-
+
terminate_thread (true);
_picture_asset_writer->finalize ();
if (_sound_asset_writer) {
_sound_asset_writer->finalize ();
}
-
+
/* Hard-link the video asset into the DCP */
boost::filesystem::path video_from = _picture_asset->file ();
-
+
boost::filesystem::path video_to;
video_to /= _film->dir (_film->dcp_name());
video_to /= video_asset_filename (_picture_asset);
boost::filesystem::path audio_to;
audio_to /= _film->dir (_film->dcp_name ());
audio_to /= audio_asset_filename (_sound_asset);
-
+
boost::filesystem::rename (_film->file (audio_asset_filename (_sound_asset)), audio_to, ec);
if (ec) {
throw FileError (
_film->dcp_content_type()->libdcp_kind ()
)
);
-
+
dcp.add (cpl);
shared_ptr<dcp::Reel> reel (new dcp::Reel ());
_film->dir (_film->dcp_name ()) / ("sub_" + _subtitle_asset->id() + ".mxf")
);
}
-
+
reel->add (shared_ptr<dcp::ReelSubtitleAsset> (
new dcp::ReelSubtitleAsset (
_subtitle_asset,
)
));
}
-
+
cpl->add (reel);
shared_ptr<Job> job = _job.lock ();
LOG_GENERAL ("Existing frame %1 has no info file", f);
return false;
}
-
+
dcp::FrameInfo info = read_frame_info (file, f, eyes);
fclose (file);
if (info.size == 0) {
LOG_GENERAL ("Existing frame %1 has no info file", f);
return false;
}
-
+
/* Read the data from the asset and hash it */
dcpomatic_fseek (asset, info.offset, SEEK_SET);
Data data (info.size);
s->set_edit_rate (dcp::Fraction (_film->video_frame_rate (), 1));
s->set_time_code_rate (_film->video_frame_rate ());
_subtitle_asset = s;
- }
+ }
}
-
+
for (list<dcp::SubtitleString>::const_iterator i = subs.text.begin(); i != subs.text.end(); ++i) {
_subtitle_asset->add (*i);
}
Writer::frame_info_position (int frame, Eyes eyes) const
{
static int const info_size = 48;
-
+
switch (eyes) {
case EYES_BOTH:
return frame * info_size;
default:
DCPOMATIC_ASSERT (false);
}
-
+
DCPOMATIC_ASSERT (false);
}
dcpomatic_fseek (file, frame_info_position (frame, eyes), SEEK_SET);
fread (&info.offset, sizeof (info.offset), 1, file);
fread (&info.size, sizeof (info.size), 1, file);
-
+
char hash_buffer[33];
fread (hash_buffer, 1, 32, file);
hash_buffer[32] = '\0';
QueueItem ()
: size (0)
{}
-
+
enum Type {
/** a normal frame with some JPEG200 data */
FULL,
~Writer ();
bool can_fake_write (int) const;
-
+
void write (Data, int, Eyes);
void fake_write (int, Eyes);
void repeat (int, Eyes);
* ordering
*/
int _maximum_frames_in_memory;
-
+
/** number of FULL written frames */
int _full_written;
/** number of FAKE written frames */
due to the limit of frames to be held in memory.
*/
int _pushed_to_disk;
-
+
boost::shared_ptr<dcp::PictureAsset> _picture_asset;
boost::shared_ptr<dcp::PictureAssetWriter> _picture_asset_writer;
boost::shared_ptr<dcp::SoundAsset> _sound_asset;
private:
/* Not defined */
FilmChangedDialog (FilmChangedDialog const &);
-
+
wxMessageDialog* _dialog;
};
#define NEEDS_SELECTED_VIDEO_CONTENT 0x8
map<wxMenuItem*, int> menu_items;
-
+
enum {
ID_file_new = 1,
ID_file_open,
#if defined(DCPOMATIC_WINDOWS)
if (Config::instance()->win32_console ()) {
AllocConsole();
-
+
HANDLE handle_out = GetStdHandle(STD_OUTPUT_HANDLE);
int hCrt = _open_osfhandle((intptr_t) handle_out, _O_TEXT);
FILE* hf_out = _fdopen(hCrt, "w");
setvbuf(hf_out, NULL, _IONBF, 1);
*stdout = *hf_out;
-
+
HANDLE handle_in = GetStdHandle(STD_INPUT_HANDLE);
hCrt = _open_osfhandle((intptr_t) handle_in, _O_TEXT);
FILE* hf_in = _fdopen(hCrt, "r");
try
{
maybe_save_then_delete_film ();
-
+
shared_ptr<Film> film (new Film (file));
list<string> const notes = film->read_metadata ();
"in this version. Please check the film's settings carefully.")
);
}
-
+
for (list<string>::const_iterator i = notes.begin(); i != notes.end(); ++i) {
error_dialog (0, std_to_wx (*i));
}
-
+
set_film (film);
}
catch (std::exception& e) {
shared_ptr<Film> film () const {
return _film;
}
-
+
private:
void file_changed (boost::filesystem::path f)
if (!f.empty ()) {
s += " - " + f.string ();
}
-
+
SetTitle (std_to_wx (s));
}
-
+
void file_new ()
{
NewFilmDialog* d = new NewFilmDialog (this);
int const r = d->ShowModal ();
-
+
if (r == wxID_OK) {
if (boost::filesystem::is_directory (d->get_path()) && !boost::filesystem::is_empty(d->get_path())) {
);
return;
}
-
+
maybe_save_then_delete_film ();
new_film (d->get_path ());
}
-
+
d->Destroy ();
}
std_to_wx (Config::instance()->default_directory_or (wx_to_std (wxStandardPaths::Get().GetDocumentsDir())).string ()),
wxDEFAULT_DIALOG_STYLE | wxDD_DIR_MUST_EXIST
);
-
+
int r;
while (true) {
r = c->ShowModal ();
break;
}
}
-
+
if (r == wxID_OK) {
load_film (wx_to_std (c->GetPath ()));
}
load_film (history[n]);
}
}
-
+
void file_exit ()
{
/* false here allows the close handler to veto the close request */
if (!_film) {
return;
}
-
+
KDMDialog* d = new KDMDialog (this, _film);
if (d->ShowModal () != wxID_OK) {
d->Destroy ();
} catch (...) {
error_dialog (this, _("An unknown exception occurred."));
}
-
+
d->Destroy ();
}
(*i)->scale_and_crop_to_fit_height ();
}
}
-
+
void jobs_send_dcp_to_tms ()
{
_film->send_dcp_to_tms ();
}
}
}
-#endif
+#endif
#ifdef DCPOMATIC_OSX
int r = system (string ("open -R " + _film->dir (_film->dcp_name (false)).string ()).c_str ());
if (WEXITSTATUS (r)) {
error_dialog (this, _("Could not show DCP"));
}
-#endif
+#endif
}
void tools_hints ()
d->Destroy ();
return r;
}
-
+
void close (wxCloseEvent& ev)
{
if (!should_close ()) {
now (without, as far as I can see, any way for us to find out).
*/
_config_changed_connection.disconnect ();
-
+
maybe_save_then_delete_film ();
ev.Skip ();
}
bool const dcp_creation = (i != jobs.end ()) && !(*i)->finished ();
bool const have_cpl = _film && !_film->cpls().empty ();
bool const have_selected_video_content = !_film_editor->content_panel()->selected_video().empty();
-
+
for (map<wxMenuItem*, int>::iterator j = menu_items.begin(); j != menu_items.end(); ++j) {
-
+
bool enabled = true;
-
+
if ((j->second & NEEDS_FILM) && !_film) {
enabled = false;
}
-
+
if ((j->second & NOT_DURING_DCP_CREATION) && dcp_creation) {
enabled = false;
}
-
+
if ((j->second & NEEDS_CPL) && !have_cpl) {
enabled = false;
}
-
+
if ((j->second & NEEDS_SELECTED_VIDEO_CONTENT) && !have_selected_video_content) {
enabled = false;
}
-
+
j->first->Enable (enabled);
}
}
if (!_film) {
return;
}
-
+
if (_film->dirty ()) {
FilmChangedDialog d (_film->name ());
switch (d.run ()) {
break;
}
}
-
+
_film.reset ();
}
wxMenuItem* item = menu->Append (id, text);
menu_items.insert (make_pair (item, sens));
}
-
+
void setup_menu (wxMenuBar* m)
{
_file_menu = new wxMenu;
_history_position = _file_menu->GetMenuItems().GetCount();
-#ifndef __WXOSX__
+#ifndef __WXOSX__
_file_menu->AppendSeparator ();
#endif
-
-#ifdef __WXOSX__
+
+#ifdef __WXOSX__
add_item (_file_menu, _("&Exit"), wxID_EXIT, ALWAYS);
#else
add_item (_file_menu, _("&Quit"), wxID_EXIT, ALWAYS);
-#endif
-
-#ifdef __WXOSX__
+#endif
+
+#ifdef __WXOSX__
add_item (_file_menu, _("&Preferences...\tCtrl-P"), wxID_PREFERENCES, ALWAYS);
#else
wxMenu* edit = new wxMenu;
wxMenu* content = new wxMenu;
add_item (content, _("Scale to fit &width"), ID_content_scale_to_fit_width, NEEDS_FILM | NEEDS_SELECTED_VIDEO_CONTENT);
add_item (content, _("Scale to fit &height"), ID_content_scale_to_fit_height, NEEDS_FILM | NEEDS_SELECTED_VIDEO_CONTENT);
-
+
wxMenu* jobs_menu = new wxMenu;
add_item (jobs_menu, _("&Make DCP\tCtrl-M"), ID_jobs_make_dcp, NEEDS_FILM | NOT_DURING_DCP_CREATION);
add_item (jobs_menu, _("Make &KDMs...\tCtrl-K"), ID_jobs_make_kdms, NEEDS_FILM);
add_item (tools, _("Check for updates"), ID_tools_check_for_updates, 0);
tools->AppendSeparator ();
add_item (tools, _("Restore default preferences"), ID_tools_restore_default_preferences, ALWAYS);
-
+
wxMenu* help = new wxMenu;
-#ifdef __WXOSX__
+#ifdef __WXOSX__
add_item (help, _("About DCP-o-matic"), wxID_ABOUT, ALWAYS);
-#else
+#else
add_item (help, _("About"), wxID_ABOUT, ALWAYS);
-#endif
+#endif
add_item (help, _("Report a problem..."), ID_help_report_a_problem, ALWAYS);
-
+
m->Append (_file_menu, _("&File"));
-#ifndef __WXOSX__
+#ifndef __WXOSX__
m->Append (edit, _("&Edit"));
#endif
m->Append (content, _("&Content"));
}
delete _history_separator;
_history_separator = 0;
-
+
int pos = _history_position;
-
+
vector<boost::filesystem::path> history = Config::instance()->history ();
-
+
if (!history.empty ()) {
_history_separator = _file_menu->InsertSeparator (pos++);
}
-
+
for (size_t i = 0; i < history.size(); ++i) {
SafeStringStream s;
if (i < 9) {
_history_items = history.size ();
}
-
+
FilmEditor* _film_editor;
FilmViewer* _film_viewer;
HintsDialog* _hints_dialog;
, _frame (0)
{}
-private:
-
+private:
+
bool OnInit ()
try
{
SetAppName (_("DCP-o-matic"));
-
+
if (!wxApp::OnInit()) {
return false;
}
-
-#ifdef DCPOMATIC_LINUX
+
+#ifdef DCPOMATIC_LINUX
unsetenv ("UBUNTU_MENUPROXY");
#endif
-#ifdef __WXOSX__
+#ifdef __WXOSX__
ProcessSerialNumber serial;
GetCurrentProcess (&serial);
TransformProcessType (&serial, kProcessTransformToForegroundApplication);
-#endif
+#endif
wxInitAllImageHandlers ();
/* This will terminate the program */
return false;
}
-
+
void OnUnhandledException ()
{
error_dialog (0, _("An unknown exception occurred.") + " " + REPORT_PROBLEM);
_sizer->Layout ();
ev.Skip ();
}
-
+
bool should_close ()
{
if (!JobManager::instance()->work_to_do ()) {
d->Destroy ();
return r;
}
-
+
void close (wxCloseEvent& ev)
{
if (!should_close ()) {
{
add_film ();
}
-
+
void file_quit ()
{
if (should_close ()) {
if (_last_parent) {
c->SetPath (std_to_wx (_last_parent.get().string ()));
}
-
+
int r;
while (true) {
r = c->ShowModal ();
break;
}
}
-
+
if (r == wxID_OK) {
try {
shared_ptr<Film> film (new Film (wx_to_std (c->GetPath ())));
if (!wxApp::OnInit()) {
return false;
}
-
-#ifdef DCPOMATIC_LINUX
+
+#ifdef DCPOMATIC_LINUX
unsetenv ("UBUNTU_MENUPROXY");
-#endif
+#endif
/* Enable i18n; this will create a Config object
to look for a force-configured language. This Config
} else {
cout << "\tno colour conversion\n";
}
-
+
}
shared_ptr<AudioContent> audio = dynamic_pointer_cast<AudioContent> (c);
}
film_dir = argv[optind];
-
+
dcpomatic_setup ();
signal_manager = new SignalManager ();
-
+
if (no_remote) {
ServerFinder::instance()->disable ();
}
print_dump (film);
exit (EXIT_SUCCESS);
}
-
+
ContentList content = film->content ();
for (ContentList::const_iterator i = content.begin(); i != content.end(); ++i) {
vector<boost::filesystem::path> paths = (*i)->paths ();
for (list<shared_ptr<Job> >::iterator i = jobs.begin(); i != jobs.end(); ++i) {
if (progress) {
cout << (*i)->name() << ": ";
-
+
if ((*i)->progress ()) {
cout << (*i)->status() << " \n";
} else {
JobManager::drop ();
ServerFinder::drop ();
-
+
return error ? EXIT_FAILURE : EXIT_SUCCESS;
}
-
+
Ratio const * content_ratio = 0;
int still_length = 10;
boost::filesystem::path output;
-
+
int option_index = 0;
while (true) {
static struct option long_options[] = {
film->set_container (container_ratio);
film->set_dcp_content_type (dcp_content_type);
-
+
for (int i = optind; i < argc; ++i) {
shared_ptr<Content> c = content_factory (film, argv[i]);
shared_ptr<VideoContent> vc = dynamic_pointer_cast<VideoContent> (c);
}
film->examine_and_add_content (c);
}
-
+
JobManager* jm = JobManager::instance ();
while (jm->work_to_do ()) {}
cerr << argv[0] << ": " << e.what() << "\n";
exit (EXIT_FAILURE);
}
-
+
return 0;
}
dcp::Formulation formulation = dcp::MODIFIED_TRANSITIONAL_1;
program_name = argv[0];
-
+
int option_index = 0;
while (true) {
static struct option long_options[] = {
}
string const film_dir = argv[optind];
-
+
dcpomatic_setup ();
shared_ptr<Film> film;
if (output.empty ()) {
error ("you must specify --output");
}
-
+
dcp::Certificate certificate (dcp::file_to_string (certificate_file));
dcp::EncryptedKDM kdm = film->make_kdm (certificate, cpl, valid_from.get(), valid_to.get(), formulation);
kdm.as_xml (output);
write_kdm_zip_files (
film, (*i)->screens(), cpl, dcp::LocalTime (valid_from.get()), dcp::LocalTime (valid_to.get()), formulation, output
);
-
+
if (verbose) {
cout << "Wrote ZIP files to " << output << "\n";
}
write_kdm_files (
film, (*i)->screens(), cpl, dcp::LocalTime (valid_from.get()), dcp::LocalTime (valid_to.get()), formulation, output
);
-
+
if (verbose) {
cout << "Wrote KDM files to " << output << "\n";
}
_log = m;
}
- string _log;
+ string _log;
};
static shared_ptr<MemoryLog> memory_log (new MemoryLog);
public:
TaskBarIcon ()
{
-#ifdef __WXMSW__
+#ifdef __WXMSW__
wxIcon icon (std_to_wx ("taskbar_icon"));
#endif
#ifdef __WXGTK__
#ifndef __WXOSX__
/* XXX: fix this for OS X */
SetIcon (icon, std_to_wx ("DCP-o-matic encode server"));
-#endif
+#endif
Bind (wxEVT_COMMAND_MENU_SELECTED, boost::bind (&TaskBarIcon::status, this), ID_status);
Bind (wxEVT_COMMAND_MENU_SELECTED, boost::bind (&TaskBarIcon::quit, this), ID_quit);
}
-
+
wxMenu* CreatePopupMenu ()
{
wxMenu* menu = new wxMenu;
, _icon (0)
{}
-private:
-
+private:
+
bool OnInit ()
{
if (!wxApp::OnInit ()) {
return false;
}
-
+
dcpomatic_setup ();
_icon = new TaskBarIcon;
Bind (wxEVT_TIMER, boost::bind (&App::check, this));
_timer.reset (new wxTimer (this));
_timer->Start (1000);
-
+
return true;
}
} else {
log.reset (new NullLog);
}
-
+
Server server (log, verbose);
-
+
try {
server.run (num_threads);
} catch (boost::system::system_error& e) {
cout << "\033[0;31msizes differ\033[0m\n";
return;
}
-
+
uint8_t* p = local_encoded.data().get ();
uint8_t* q = remote_encoded.data().get ();
for (int i = 0; i < local_encoded.size(); ++i) {
break;
}
}
-
+
if (server_host.empty() || film_dir.empty()) {
help (argv[0]);
exit (EXIT_FAILURE);
server = new ServerDescription (server_host, 1);
film.reset (new Film (film_dir));
film->read_metadata ();
-
+
shared_ptr<Player> player (new Player (film));
DCPTime const frame = DCPTime::from_frames (1, film->video_frame_rate ());
{
wxBoxSizer* overall_sizer = new wxBoxSizer (wxVERTICAL);
wxBoxSizer* sizer = new wxBoxSizer (wxVERTICAL);
-
+
wxFont title_font (*wxNORMAL_FONT);
title_font.SetPointSize (title_font.GetPointSize() + 12);
title_font.SetWeight (wxFONTWEIGHT_BOLD);
wxFont version_font (*wxNORMAL_FONT);
version_font.SetWeight (wxFONTWEIGHT_BOLD);
-
+
wxStaticText* t = new wxStaticText (this, wxID_ANY, _("DCP-o-matic"));
t->SetFont (title_font);
sizer->Add (t, wxSizerFlags().Centre().Border(wxALL, 16));
wxDefaultPosition, wxDefaultSize, wxALIGN_CENTER
);
t->SetFont (subtitle_font);
-
+
sizer->Add (t, wxSizerFlags().Centre().Border(wxALL, 8));
wxHyperlinkCtrl* h = new wxHyperlinkCtrl (
_("(C) 2012-2015 Carl Hetherington, Terrence Meiczinger\n Ole Laursen, Brecht Sanders"),
wxDefaultPosition, wxDefaultSize, wxALIGN_CENTER
);
-
+
sizer->Add (t, wxSizerFlags().Centre().Border(wxLEFT | wxRIGHT, 16));
_notebook = new wxNotebook (this, wxID_ANY);
tested_by.Add (wxT ("Paul Willmott"));
tested_by.Add (wxT ("Wolfgang Woehl"));
add_section (_("Tested by"), tested_by);
-
+
sizer->Add (_notebook, wxSizerFlags().Centre().Border(wxALL, 16).Expand());
overall_sizer->Add (sizer);
if (buttons) {
overall_sizer->Add (buttons, 1, wxEXPAND | wxALL, 4);
}
-
+
SetSizerAndFit (overall_sizer);
}
wxSizer* overall_sizer = new wxBoxSizer (wxHORIZONTAL);
vector<wxSizer*> sizers;
-
+
for (int i = 0; i < N; ++i) {
sizers.push_back (new wxBoxSizer (wxVERTICAL));
overall_sizer->Add (sizers.back (), 1, wxEXPAND | wxALL, 6);
wxBoxSizer* overall_sizer = new wxBoxSizer (wxVERTICAL);
wxBoxSizer* lr_sizer = new wxBoxSizer (wxHORIZONTAL);
-
+
wxBoxSizer* left = new wxBoxSizer (wxVERTICAL);
_plot = new AudioPlot (this);
m->SetFont (subheading_font);
right->Add (m, 1, wxALIGN_CENTER_VERTICAL | wxTOP, 16);
}
-
+
wxString const types[] = {
_("Peak"),
_("RMS")
m->SetFont (subheading_font);
right->Add (m, 1, wxALIGN_CENTER_VERTICAL | wxTOP, 16);
}
-
+
_smoothing = new wxSlider (this, wxID_ANY, AudioPlot::max_smoothing / 2, 1, AudioPlot::max_smoothing);
_smoothing->Bind (wxEVT_SCROLL_THUMBTRACK, boost::bind (&AudioDialog::smoothing_changed, this));
right->Add (_smoothing, 0, wxEXPAND);
overall_sizer->Add (lr_sizer);
-#ifdef DCPOMATIC_LINUX
+#ifdef DCPOMATIC_LINUX
wxSizer* buttons = CreateSeparatedButtonSizer (wxCLOSE);
if (buttons) {
overall_sizer->Add (buttons, wxSizerFlags().Expand().DoubleBorder());
}
-#endif
+#endif
SetSizer (overall_sizer);
overall_sizer->Layout ();
JobManager::instance()->add (job);
return;
}
-
+
_plot->set_analysis (_analysis);
setup_peak_time ();
/* Set up some defaults if no check boxes are checked */
-
+
int i = 0;
while (i < MAX_DCP_AUDIO_CHANNELS && (!_channel_checkbox[i] || !_channel_checkbox[i]->GetValue ())) {
++i;
{
shared_ptr<const Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
-
+
if (!boost::filesystem::exists (film->audio_analysis_path ())) {
/* We analysed and still nothing showed up, so maybe it was cancelled or it failed.
Give up.
if (!_analysis || !_analysis->peak ()) {
return;
}
-
+
shared_ptr<Film> film = _film.lock ();
if (!film) {
return;
}
-
+
float peak_dB = 20 * log10 (_analysis->peak().get());
-
+
_peak_time->SetLabel (
wxString::Format (
_("Peak is %.2fdB at %s"),
time_to_timecode (_analysis->peak_time().get(), film->video_frame_rate ()).data ()
)
);
-
+
if (peak_dB > -3) {
_peak_time->SetForegroundColour (wxColour (255, 0, 0));
} else {
return r;
}
-
+
if (_gain->GetValue() <= -144) {
return 0;
}
-
+
return pow (10, _gain->GetValue () / 20);
}
AudioGainDialog (wxWindow *, int, int, float);
float value () const;
-
+
private:
wxSpinCtrlDouble* _gain;
};
}
height = max (0, height);
-
+
if (value > 0) {
/* Make sure we get a little bit of the marker if there is any gain */
height = max (3, height);
{
return wxSize (INDICATOR_SIZE + 4, INDICATOR_SIZE + 4);
}
-
+
wxGridCellRenderer* Clone () const
{
return new ValueRenderer;
update_cells ();
Changed (_map);
_last_tooltip_column = -1;
-}
+}
void
AudioMappingView::left_click (wxGridEvent& ev)
}
int const d = ev.GetCol() - 1;
-
+
if (_map.get (ev.GetRow(), d) > 0) {
_map.set (ev.GetRow(), d, 0);
} else {
AudioMappingView::edit ()
{
int const d = _menu_column - 1;
-
+
AudioGainDialog* dialog = new AudioGainDialog (this, _menu_row, _menu_column - 1, _map.get (_menu_row, d));
if (dialog->ShowModal () == wxID_OK) {
_map.set (_menu_row, d, dialog->value ());
map_values_changed ();
}
-
+
dialog->Destroy ();
}
for (int i = 0; i < _grid->GetNumberRows (); ++i) {
row_names.push_back (wx_to_std (_grid->GetCellValue (i, 0)));
}
-
+
if (_grid->GetNumberRows ()) {
_grid->DeleteRows (0, _grid->GetNumberRows ());
}
_grid->SetCellRenderer (i, j + 1, new ValueRenderer);
}
}
-
+
for (int i = 0; i < _map.input_channels(); ++i) {
if (i < int (row_names.size ())) {
_grid->SetCellValue (i, 0, std_to_wx (row_names[i]));
float const dB = 20 * log10 (gain);
s = wxString::Format (_("Audio will be passed from content channel %d to DCP channel %d with gain %.1fdB."), row + 1, column, dB);
}
-
+
_grid->GetGridWindow()->SetToolTip (s + " " + _("Right click to change gain."));
_last_tooltip_row = row;
_last_tooltip_column = column;
boost::mem_fn (&AudioContent::audio_gain),
boost::mem_fn (&AudioContent::set_audio_gain)
);
-
+
_gain->add (grid, wxGBPosition (r, 1));
add_label_to_grid_bag_sizer (grid, this, _("dB"), false, wxGBPosition (r, 2));
_gain_calculate_button = new wxButton (this, wxID_ANY, _("Calculate..."));
boost::mem_fn (&AudioContent::audio_delay),
boost::mem_fn (&AudioContent::set_audio_delay)
);
-
+
_delay->add (grid, wxGBPosition (r, 1));
/// TRANSLATORS: this is an abbreviation for milliseconds, the unit of time
add_label_to_grid_bag_sizer (grid, this, _("ms"), false, wxGBPosition (r, 2));
d->Destroy ();
return;
}
-
+
_gain->wrapped()->SetValue (
Config::instance()->cinema_sound_processor()->db_for_fader_change (
d->wanted_fader (),
I think.
*/
_gain->view_changed ();
-
+
d->Destroy ();
}
void film_changed (Film::Property);
void film_content_changed (int);
void content_selection_changed ();
-
+
private:
void gain_calculate_button_clicked ();
void mapping_changed (AudioMapping);
: wxPanel (parent, wxID_ANY, wxDefaultPosition, wxDefaultSize, wxFULL_REPAINT_ON_RESIZE)
, _smoothing (max_smoothing / 2)
{
-#ifndef __WXOSX__
+#ifndef __WXOSX__
SetDoubleBuffered (true);
-#endif
+#endif
for (int i = 0; i < MAX_DCP_AUDIO_CHANNELS; ++i) {
_channel_visible[i] = false;
#if MAX_DCP_AUDIO_CHANNELS != 12
#warning AudioPlot::AudioPlot is expecting the wrong MAX_DCP_AUDIO_CHANNELS
-#endif
-
+#endif
+
Bind (wxEVT_PAINT, boost::bind (&AudioPlot::paint, this));
-
+
SetMinSize (wxSize (640, 512));
}
gc->GetTextExtent (wxT ("-80dB"), &metrics.db_label_width, &db_label_height, &db_label_descent, &db_label_leading);
metrics.db_label_width += 8;
-
+
int const data_width = GetSize().GetWidth() - metrics.db_label_width;
/* Assume all channels have the same number of points */
metrics.x_scale = data_width / float (_analysis->points (0));
gc->StrokePath (grid);
gc->DrawText (_("Time"), data_width, metrics.height - metrics.y_origin + db_label_height / 2);
-
+
if (_type_visible[AudioPoint::PEAK]) {
for (int c = 0; c < MAX_DCP_AUDIO_CHANNELS; ++c) {
wxGraphicsPath p = gc->CreatePath ();
if (p < 1e-4) {
p = 1e-4;
}
-
+
return metrics.height - (20 * log10(p) - _minimum) * metrics.y_scale - metrics.y_origin;
}
if (_analysis->points (channel) == 0) {
return;
}
-
+
path.MoveToPoint (metrics.db_label_width, y_for_linear (_analysis->get_point(channel, 0)[AudioPoint::PEAK], metrics));
float peak = 0;
if (_analysis->points (channel) == 0) {
return;
}
-
+
path.MoveToPoint (metrics.db_label_width, y_for_linear (_analysis->get_point(channel, 0)[AudioPoint::RMS], metrics));
list<float> smoothing;
int const before = _smoothing / 2;
int const after = _smoothing - before;
-
+
/* Pre-load the smoothing list */
for (int i = 0; i < before; ++i) {
smoothing.push_back (first);
std::string name () const;
std::string email () const;
-
+
private:
wxTextCtrl* _name;
wxTextCtrl* _email;
_input_B = new wxTextCtrl (this, wxID_ANY, wxT (""));
s->Add (_input_B, 1, wxEXPAND | wxRIGHT, DCPOMATIC_SIZER_GAP);
table->Add (s, wxGBPosition (r, 1), wxGBSpan (1, 3));
- }
+ }
++r;
-
+
wxClientDC dc (parent);
wxSize size = dc.GetTextExtent (wxT ("-0.12345678901"));
size.SetHeight (-1);
/* YUV to RGB conversion */
subhead (table, this, _("YUV to RGB conversion"), r);
-
+
add_label_to_grid_bag_sizer (table, this, _("YUV to RGB matrix"), true, wxGBPosition (r, 0));
_yuv_to_rgb = new wxChoice (this, wxID_ANY);
_yuv_to_rgb->Append (_("Rec. 601"));
size = dc.GetTextExtent (wxT ("0.12345678"));
size.SetHeight (-1);
-
+
wxFlexGridSizer* rgb_to_xyz_sizer = new wxFlexGridSizer (3, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
size = dc.GetTextExtent (wxT ("0.12345678"));
size.SetHeight (-1);
-
+
wxFlexGridSizer* bradford_sizer = new wxFlexGridSizer (3, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
table->Add (bradford_sizer, wxGBPosition (r - 2, 3), wxGBSpan (2, 1));
subhead (table, this, _("Output gamma correction"), r);
-
+
add_label_to_grid_bag_sizer (table, this, _("Output gamma"), true, wxGBPosition (r, 0));
wxBoxSizer* output_sizer = new wxBoxSizer (wxHORIZONTAL);
/// TRANSLATORS: this means the mathematical reciprocal operation, i.e. we are dividing 1 by the control that
update_rgb_to_xyz ();
update_bradford ();
-
+
set_spin_ctrl (_output_gamma, dynamic_pointer_cast<const dcp::GammaTransferFunction> (conversion.out ())->gamma ());
}
_input_threshold->Enable (lin);
_input_A->Enable (lin);
_input_B->Enable (lin);
-
+
Changed ();
}
{
_adjusted_white_x->Enable (_adjust_white->GetValue ());
_adjusted_white_y->Enable (_adjust_white->GetValue ());
-
+
boost::numeric::ublas::matrix<double> m = get().bradford ();
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
if (fabs (_last_spin_ctrl_value[sc] - sc->GetValue()) < 1e-3) {
return;
}
-
+
Changed ();
}
void set_spin_ctrl (wxSpinCtrlDouble *, double);
std::map<wxSpinCtrlDouble*, double> _last_spin_ctrl_value;
-
+
wxSpinCtrlDouble* _input_gamma;
wxSpinCtrlDouble* _input_power;
wxTextCtrl* _input_threshold;
config_changed ();
_panel->Bind (wxEVT_DESTROY, boost::bind (&Page::window_destroyed, this));
-
+
return _panel;
}
-
+
int _border;
wxPanel* _panel;
: StockPage (Kind_General, panel_size, border)
{}
-private:
+private:
void setup ()
{
wxFlexGridSizer* table = new wxFlexGridSizer (2, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
table->AddGrowableCol (1, 1);
_panel->GetSizer()->Add (table, 1, wxALL | wxEXPAND, _border);
-
+
_set_language = new wxCheckBox (_panel, wxID_ANY, _("Set language"));
table->Add (_set_language, 1);
_language = new wxChoice (_panel, wxID_ANY);
_language->Append (wxT ("Svenska"));
_language->Append (wxT ("Русский"));
table->Add (_language);
-
+
wxStaticText* restart = add_label_to_sizer (table, _panel, _("(restart DCP-o-matic to see language changes)"), false);
wxFont font = restart->GetFont();
font.SetStyle (wxFONTSTYLE_ITALIC);
font.SetPointSize (font.GetPointSize() - 1);
restart->SetFont (font);
table->AddSpacer (0);
-
+
add_label_to_sizer (table, _panel, _("Threads to use for encoding on this host"), true);
_num_local_encoding_threads = new wxSpinCtrl (_panel);
table->Add (_num_local_encoding_threads, 1);
_check_for_updates = new wxCheckBox (_panel, wxID_ANY, _("Check for updates on startup"));
table->Add (_check_for_updates, 1, wxEXPAND | wxALL);
table->AddSpacer (0);
-
+
_check_for_test_updates = new wxCheckBox (_panel, wxID_ANY, _("Check for testing updates as well as stable ones"));
table->Add (_check_for_test_updates, 1, wxEXPAND | wxALL);
table->AddSpacer (0);
_set_language->Bind (wxEVT_COMMAND_CHECKBOX_CLICKED, boost::bind (&GeneralPage::set_language_changed, this));
_language->Bind (wxEVT_COMMAND_CHOICE_SELECTED, boost::bind (&GeneralPage::language_changed, this));
-
+
_num_local_encoding_threads->SetRange (1, 128);
_num_local_encoding_threads->Bind (wxEVT_COMMAND_SPINCTRL_UPDATED, boost::bind (&GeneralPage::num_local_encoding_threads_changed, this));
void config_changed ()
{
Config* config = Config::instance ();
-
+
checked_set (_set_language, config->language ());
-
+
if (config->language().get_value_or ("") == "fr") {
_language->SetSelection (3);
} else if (config->language().get_value_or ("") == "it") {
}
setup_language_sensitivity ();
-
+
checked_set (_num_local_encoding_threads, config->num_local_encoding_threads ());
checked_set (_check_for_updates, config->check_for_updates ());
checked_set (_check_for_test_updates, config->check_for_test_updates ());
break;
}
}
-
+
void check_for_updates_changed ()
{
Config::instance()->set_check_for_updates (_check_for_updates->GetValue ());
}
-
+
void check_for_test_updates_changed ()
{
Config::instance()->set_check_for_test_updates (_check_for_test_updates->GetValue ());
DefaultsPage (wxSize panel_size, int border)
: StandardPage (panel_size, border)
{}
-
+
wxString GetName () const
{
return _("Defaults");
}
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
wxBitmap GetLargeIcon () const
{
return wxBitmap ("defaults", wxBITMAP_TYPE_PNG_RESOURCE);
}
-#endif
+#endif
-private:
+private:
void setup ()
{
wxFlexGridSizer* table = new wxFlexGridSizer (2, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
table->AddGrowableCol (1, 1);
_panel->GetSizer()->Add (table, 1, wxALL | wxEXPAND, _border);
-
+
{
add_label_to_sizer (table, _panel, _("Default duration of still images"), true);
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
add_label_to_sizer (s, _panel, _("s"), false);
table->Add (s, 1);
}
-
+
add_label_to_sizer (table, _panel, _("Default directory for new films"), true);
#ifdef DCPOMATIC_USE_OWN_DIR_PICKER
_directory = new DirPickerCtrl (_panel);
-#else
+#else
_directory = new wxDirPickerCtrl (_panel, wxDD_DIR_MUST_EXIST);
#endif
table->Add (_directory, 1, wxEXPAND);
-
+
add_label_to_sizer (table, _panel, _("Default ISDCF name details"), true);
_isdcf_metadata_button = new wxButton (_panel, wxID_ANY, _("Edit..."));
table->Add (_isdcf_metadata_button);
add_label_to_sizer (table, _panel, _("Default container"), true);
_container = new wxChoice (_panel, wxID_ANY);
table->Add (_container);
-
+
add_label_to_sizer (table, _panel, _("Default content type"), true);
_dcp_content_type = new wxChoice (_panel, wxID_ANY);
table->Add (_dcp_content_type);
-
+
{
add_label_to_sizer (table, _panel, _("Default JPEG2000 bandwidth"), true);
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
add_label_to_sizer (s, _panel, _("Mbit/s"), false);
table->Add (s, 1);
}
-
+
{
add_label_to_sizer (table, _panel, _("Default audio delay"), true);
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
_still_length->SetRange (1, 3600);
_still_length->Bind (wxEVT_COMMAND_SPINCTRL_UPDATED, boost::bind (&DefaultsPage::still_length_changed, this));
-
+
_directory->Bind (wxEVT_COMMAND_DIRPICKER_CHANGED, boost::bind (&DefaultsPage::directory_changed, this));
-
+
_isdcf_metadata_button->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&DefaultsPage::edit_isdcf_metadata_clicked, this));
-
+
vector<Ratio const *> ratios = Ratio::all ();
for (size_t i = 0; i < ratios.size(); ++i) {
_container->Append (std_to_wx (ratios[i]->nickname ()));
}
-
+
_container->Bind (wxEVT_COMMAND_CHOICE_SELECTED, boost::bind (&DefaultsPage::container_changed, this));
-
+
vector<DCPContentType const *> const ct = DCPContentType::all ();
for (size_t i = 0; i < ct.size(); ++i) {
_dcp_content_type->Append (std_to_wx (ct[i]->pretty_name ()));
}
-
+
_dcp_content_type->Bind (wxEVT_COMMAND_CHOICE_SELECTED, boost::bind (&DefaultsPage::dcp_content_type_changed, this));
-
+
_j2k_bandwidth->SetRange (50, 250);
_j2k_bandwidth->Bind (wxEVT_COMMAND_SPINCTRL_UPDATED, boost::bind (&DefaultsPage::j2k_bandwidth_changed, this));
-
+
_audio_delay->SetRange (-1000, 1000);
_audio_delay->Bind (wxEVT_COMMAND_SPINCTRL_UPDATED, boost::bind (&DefaultsPage::audio_delay_changed, this));
_dcp_content_type->SetSelection (i);
}
}
-
+
checked_set (_still_length, config->default_still_length ());
_directory->SetPath (std_to_wx (config->default_directory_or (wx_to_std (wxStandardPaths::Get().GetDocumentsDir())).string ()));
checked_set (_j2k_bandwidth, config->default_j2k_bandwidth() / 1000000);
checked_set (_audio_delay, config->default_audio_delay ());
checked_set (_issuer, config->dcp_issuer ());
}
-
+
void j2k_bandwidth_changed ()
{
Config::instance()->set_default_j2k_bandwidth (_j2k_bandwidth->GetValue() * 1000000);
}
-
+
void audio_delay_changed ()
{
Config::instance()->set_default_audio_delay (_audio_delay->GetValue());
vector<Ratio const *> ratio = Ratio::all ();
Config::instance()->set_default_container (ratio[_container->GetSelection()]);
}
-
+
void dcp_content_type_changed ()
{
vector<DCPContentType const *> ct = DCPContentType::all ();
{
Config::instance()->set_dcp_issuer (wx_to_std (_issuer->GetValue ()));
}
-
+
wxSpinCtrl* _j2k_bandwidth;
wxSpinCtrl* _audio_delay;
wxButton* _isdcf_metadata_button;
EncodingServersPage (wxSize panel_size, int border)
: StandardPage (panel_size, border)
{}
-
+
wxString GetName () const
{
return _("Servers");
}
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
wxBitmap GetLargeIcon () const
{
return wxBitmap ("servers", wxBITMAP_TYPE_PNG_RESOURCE);
}
-#endif
+#endif
-private:
+private:
void setup ()
{
_use_any_servers = new wxCheckBox (_panel, wxID_ANY, _("Use all servers"));
_panel->GetSizer()->Add (_use_any_servers, 0, wxALL, _border);
-
+
vector<string> columns;
columns.push_back (wx_to_std (_("IP address / host name")));
_servers_list = new EditableList<string, ServerDialog> (
boost::bind (&Config::set_servers, Config::instance(), _1),
boost::bind (&EncodingServersPage::server_column, this, _1)
);
-
+
_panel->GetSizer()->Add (_servers_list, 1, wxEXPAND | wxALL, _border);
-
+
_use_any_servers->Bind (wxEVT_COMMAND_CHECKBOX_CLICKED, boost::bind (&EncodingServersPage::use_any_servers_changed, this));
}
checked_set (_use_any_servers, Config::instance()->use_any_servers ());
_servers_list->refresh ();
}
-
+
void use_any_servers_changed ()
{
Config::instance()->set_use_any_servers (_use_any_servers->GetValue ());
{
return wxBitmap ("keys", wxBITMAP_TYPE_PNG_RESOURCE);
}
-#endif
+#endif
private:
void setup ()
{
wxStaticText* m = new wxStaticText (_panel, wxID_ANY, _("Certificate chain for signing DCPs and KDMs:"));
_panel->GetSizer()->Add (m, 0, wxALL, _border);
-
+
wxBoxSizer* certificates_sizer = new wxBoxSizer (wxHORIZONTAL);
_panel->GetSizer()->Add (certificates_sizer, 0, wxLEFT | wxRIGHT, _border);
-
+
_certificates = new wxListCtrl (_panel, wxID_ANY, wxDefaultPosition, wxSize (400, 200), wxLC_REPORT | wxLC_SINGLE_SEL);
{
wxFont font = ip.GetFont ();
font.SetFamily (wxFONTFAMILY_TELETYPE);
ip.SetFont (font);
-
+
_certificates->InsertColumn (1, ip);
}
_panel->GetSizer()->Add (table, 1, wxALL | wxEXPAND, _border);
int r = 0;
-
+
_remake_certificates = new wxButton (_panel, wxID_ANY, _("Re-make certificates..."));
table->Add (_remake_certificates, wxGBPosition (r, 0), wxGBSpan (1, 3));
++r;
_export_decryption_certificate = new wxButton (_panel, wxID_ANY, _("Export DCP decryption certificate..."));
table->Add (_export_decryption_certificate, wxGBPosition (r, 0), wxGBSpan (1, 3));
++r;
-
+
_add_certificate->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&KeysPage::add_certificate, this));
_remove_certificate->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&KeysPage::remove_certificate, this));
_certificates->Bind (wxEVT_COMMAND_LIST_ITEM_SELECTED, boost::bind (&KeysPage::update_sensitivity, this));
update_decryption_private_key ();
update_sensitivity ();
}
-
+
void add_certificate ()
{
wxFileDialog* d = new wxFileDialog (_panel, _("Select Certificate File"));
-
+
if (d->ShowModal() == wxID_OK) {
try {
dcp::Certificate c (dcp::file_to_string (wx_to_std (d->GetPath ())));
error_dialog (_panel, wxString::Format (_("Could not read certificate file (%s)"), e.what ()));
}
}
-
+
d->Destroy ();
update_sensitivity ();
if (i == -1) {
return;
}
-
+
_certificates->DeleteItem (i);
_signer->certificates().remove (i);
Config::instance()->set_signer (_signer);
update_certificate_list ();
update_signer_private_key ();
}
-
+
d->Destroy ();
}
void update_signer_private_key ()
{
checked_set (_signer_private_key, dcp::private_key_fingerprint (_signer->key ()));
- }
+ }
void load_signer_private_key ()
{
error_dialog (_panel, wxString::Format (_("Could not read key file (%s)"), std_to_wx (p.string ())));
return;
}
-
+
_signer->set_key (dcp::file_to_string (p));
Config::instance()->set_signer (_signer);
update_signer_private_key ();
error_dialog (_panel, wxString::Format (_("Could not read certificate file (%s)"), e.what ()));
}
}
-
+
d->Destroy ();
update_sensitivity ();
void load_decryption_certificate ()
{
wxFileDialog* d = new wxFileDialog (_panel, _("Select Certificate File"));
-
+
if (d->ShowModal() == wxID_OK) {
try {
dcp::Certificate c (dcp::file_to_string (wx_to_std (d->GetPath ())));
error_dialog (_panel, wxString::Format (_("Could not read certificate file (%s)"), e.what ()));
}
}
-
+
d->Destroy ();
}
error_dialog (_panel, wxString::Format (_("Could not read key file (%s)"), e.what ()));
}
}
-
+
d->Destroy ();
}
_panel, _("Select Certificate File"), wxEmptyString, wxEmptyString, wxT ("PEM files (*.pem)|*.pem"),
wxFD_SAVE | wxFD_OVERWRITE_PROMPT
);
-
+
if (d->ShowModal () == wxID_OK) {
FILE* f = fopen_boost (wx_to_std (d->GetPath ()), "w");
if (!f) {
return _("TMS");
}
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
wxBitmap GetLargeIcon () const
{
return wxBitmap ("tms", wxBITMAP_TYPE_PNG_RESOURCE);
}
-#endif
+#endif
-private:
+private:
void setup ()
{
wxFlexGridSizer* table = new wxFlexGridSizer (2, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
table->AddGrowableCol (1, 1);
_panel->GetSizer()->Add (table, 1, wxALL | wxEXPAND, _border);
-
+
add_label_to_sizer (table, _panel, _("IP address"), true);
_tms_ip = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_tms_ip, 1, wxEXPAND);
-
+
add_label_to_sizer (table, _panel, _("Target path"), true);
_tms_path = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_tms_path, 1, wxEXPAND);
-
+
add_label_to_sizer (table, _panel, _("User name"), true);
_tms_user = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_tms_user, 1, wxEXPAND);
-
+
add_label_to_sizer (table, _panel, _("Password"), true);
_tms_password = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_tms_password, 1, wxEXPAND);
-
+
_tms_ip->Bind (wxEVT_COMMAND_TEXT_UPDATED, boost::bind (&TMSPage::tms_ip_changed, this));
_tms_path->Bind (wxEVT_COMMAND_TEXT_UPDATED, boost::bind (&TMSPage::tms_path_changed, this));
_tms_user->Bind (wxEVT_COMMAND_TEXT_UPDATED, boost::bind (&TMSPage::tms_user_changed, this));
void config_changed ()
{
Config* config = Config::instance ();
-
+
checked_set (_tms_ip, config->tms_ip ());
checked_set (_tms_path, config->tms_path ());
checked_set (_tms_user, config->tms_user ());
checked_set (_tms_password, config->tms_password ());
}
-
+
void tms_ip_changed ()
{
Config::instance()->set_tms_ip (wx_to_std (_tms_ip->GetValue ()));
}
-
+
void tms_path_changed ()
{
Config::instance()->set_tms_path (wx_to_std (_tms_path->GetValue ()));
}
-
+
void tms_user_changed ()
{
Config::instance()->set_tms_user (wx_to_std (_tms_user->GetValue ()));
}
-
+
void tms_password_changed ()
{
Config::instance()->set_tms_password (wx_to_std (_tms_password->GetValue ()));
public:
KDMEmailPage (wxSize panel_size, int border)
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
/* We have to force both width and height of this one */
: StandardPage (wxSize (480, 128), border)
#else
: StandardPage (panel_size, border)
-#endif
+#endif
{}
-
+
wxString GetName () const
{
return _("KDM Email");
}
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
wxBitmap GetLargeIcon () const
{
return wxBitmap ("kdm_email", wxBITMAP_TYPE_PNG_RESOURCE);
}
-#endif
+#endif
-private:
+private:
void setup ()
{
wxFlexGridSizer* table = new wxFlexGridSizer (2, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
add_label_to_sizer (table, _panel, _("Outgoing mail server"), true);
_mail_server = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_mail_server, 1, wxEXPAND | wxALL);
-
+
add_label_to_sizer (table, _panel, _("Mail user name"), true);
_mail_user = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_mail_user, 1, wxEXPAND | wxALL);
-
+
add_label_to_sizer (table, _panel, _("Mail password"), true);
_mail_password = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_mail_password, 1, wxEXPAND | wxALL);
-
+
wxStaticText* plain = add_label_to_sizer (table, _panel, _("(password will be stored on disk in plaintext)"), false);
wxFont font = plain->GetFont();
font.SetStyle (wxFONTSTYLE_ITALIC);
add_label_to_sizer (table, _panel, _("Subject"), true);
_kdm_subject = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_kdm_subject, 1, wxEXPAND | wxALL);
-
+
add_label_to_sizer (table, _panel, _("From address"), true);
_kdm_from = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_kdm_from, 1, wxEXPAND | wxALL);
add_label_to_sizer (table, _panel, _("BCC address"), true);
_kdm_bcc = new wxTextCtrl (_panel, wxID_ANY);
table->Add (_kdm_bcc, 1, wxEXPAND | wxALL);
-
+
_kdm_email = new wxTextCtrl (_panel, wxID_ANY, wxEmptyString, wxDefaultPosition, wxSize (480, 128), wxTE_MULTILINE);
_panel->GetSizer()->Add (_kdm_email, 1, wxEXPAND | wxALL, _border);
void config_changed ()
{
Config* config = Config::instance ();
-
+
checked_set (_mail_server, config->mail_server ());
checked_set (_mail_user, config->mail_user ());
checked_set (_mail_password, config->mail_password ());
checked_set (_kdm_bcc, config->kdm_bcc ());
checked_set (_kdm_email, Config::instance()->kdm_email ());
}
-
+
void mail_server_changed ()
{
Config::instance()->set_mail_server (wx_to_std (_mail_server->GetValue ()));
}
-
+
void mail_user_changed ()
{
Config::instance()->set_mail_user (wx_to_std (_mail_user->GetValue ()));
}
-
+
void mail_password_changed ()
{
Config::instance()->set_mail_password (wx_to_std (_mail_password->GetValue ()));
{
Config::instance()->set_kdm_subject (wx_to_std (_kdm_subject->GetValue ()));
}
-
+
void kdm_from_changed ()
{
Config::instance()->set_kdm_from (wx_to_std (_kdm_from->GetValue ()));
{
Config::instance()->set_kdm_bcc (wx_to_std (_kdm_bcc->GetValue ()));
}
-
+
void kdm_email_changed ()
{
if (_kdm_email->GetValue().IsEmpty ()) {
, _log_timing (0)
{}
-private:
+private:
void setup ()
{
wxFlexGridSizer* table = new wxFlexGridSizer (2, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
#ifdef __WXOSX__
wxStaticText* m = new wxStaticText (_panel, wxID_ANY, _("Log:"));
table->Add (m, 0, wxALIGN_TOP | wxLEFT | wxRIGHT | wxEXPAND | wxALL | wxALIGN_RIGHT, 6);
-#else
+#else
wxStaticText* m = new wxStaticText (_panel, wxID_ANY, _("Log"));
table->Add (m, 0, wxALIGN_TOP | wxLEFT | wxRIGHT | wxEXPAND | wxALL, 6);
-#endif
-
+#endif
+
{
wxBoxSizer* t = new wxBoxSizer (wxVERTICAL);
_log_general = new wxCheckBox (_panel, wxID_ANY, _("General"));
table->Add (t, 0, wxALL, 6);
}
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
_win32_console = new wxCheckBox (_panel, wxID_ANY, _("Open console window"));
table->Add (_win32_console, 1, wxEXPAND | wxALL);
table->AddSpacer (0);
-#endif
-
+#endif
+
_maximum_j2k_bandwidth->SetRange (1, 1000);
_maximum_j2k_bandwidth->Bind (wxEVT_COMMAND_SPINCTRL_UPDATED, boost::bind (&AdvancedPage::maximum_j2k_bandwidth_changed, this));
_allow_any_dcp_frame_rate->Bind (wxEVT_COMMAND_CHECKBOX_CLICKED, boost::bind (&AdvancedPage::allow_any_dcp_frame_rate_changed, this));
_log_timing->Bind (wxEVT_COMMAND_CHECKBOX_CLICKED, boost::bind (&AdvancedPage::log_changed, this));
#ifdef DCPOMATIC_WINDOWS
_win32_console->Bind (wxEVT_COMMAND_CHECKBOX_CLICKED, boost::bind (&AdvancedPage::win32_console_changed, this));
-#endif
+#endif
}
void config_changed ()
{
Config* config = Config::instance ();
-
+
checked_set (_maximum_j2k_bandwidth, config->maximum_j2k_bandwidth() / 1000000);
checked_set (_allow_any_dcp_frame_rate, config->allow_any_dcp_frame_rate ());
checked_set (_log_general, config->log_types() & Log::TYPE_GENERAL);
Config::instance()->set_log_types (types);
}
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
void win32_console_changed ()
{
Config::instance()->set_win32_console (_win32_console->GetValue ());
}
-#endif
-
+#endif
+
wxSpinCtrl* _maximum_j2k_bandwidth;
wxCheckBox* _allow_any_dcp_frame_rate;
wxCheckBox* _log_general;
wxCheckBox* _log_warning;
wxCheckBox* _log_error;
wxCheckBox* _log_timing;
-#ifdef DCPOMATIC_WINDOWS
+#ifdef DCPOMATIC_WINDOWS
wxCheckBox* _win32_console;
-#endif
+#endif
};
-
+
wxPreferencesEditor*
create_config_dialog ()
{
wxSize ps = wxSize (-1, -1);
int const border = 8;
#endif
-
+
e->AddPage (new GeneralPage (ps, border));
e->AddPage (new DefaultsPage (ps, border));
e->AddPage (new EncodingServersPage (ps, border));
if (_setting) {
return;
}
-
+
optional<size_t> preset = _editor->get().preset ();
_preset_check->SetValue (preset);
}
}
-
+
void check_for_preset ();
void preset_check_clicked ();
void preset_choice_changed ();
-
+
wxCheckBox* _preset_check;
wxChoice* _preset_choice;
ColourConversionEditor* _editor;
++n;
}
}
-
+
_join->Enable (n > 1);
-
+
_find_missing->Enable (_content.size() == 1 && !_content.front()->paths_valid ());
_properties->Enable (_content.size() == 1);
_re_examine->Enable (!_content.empty ());
} else {
_kdm->Enable (false);
}
-
+
_remove->Enable (!_content.empty ());
_parent->PopupMenu (_menu, p);
}
if (_content.empty ()) {
return;
}
-
+
RepeatDialog* d = new RepeatDialog (_parent);
if (d->ShowModal() != wxID_OK) {
d->Destroy ();
if (!fc) {
continue;
}
-
+
shared_ptr<TimelineVideoContentView> video;
shared_ptr<TimelineAudioContentView> audio;
if (!film) {
return;
}
-
+
shared_ptr<Content> content;
/* XXX: a bit nasty */
}
shared_ptr<Job> j (new ExamineContentJob (film, content));
-
+
_job_connection = j->Finished.connect (
bind (
&ContentMenu::maybe_found_missing,
boost::weak_ptr<Content> (content)
)
);
-
+
JobManager::instance()->add (j);
}
DCPOMATIC_ASSERT (!_content.empty ());
shared_ptr<DCPContent> dcp = dynamic_pointer_cast<DCPContent> (_content.front ());
DCPOMATIC_ASSERT (dcp);
-
+
wxFileDialog* d = new wxFileDialog (_parent, _("Select KDM"));
-
+
if (d->ShowModal() == wxID_OK) {
dcp->add_kdm (dcp::EncryptedKDM (dcp::file_to_string (wx_to_std (d->GetPath ()))));
shared_ptr<Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
film->examine_content (dcp);
}
-
+
d->Destroy ();
}
public:
ContentMenu (wxWindow* p);
~ContentMenu ();
-
+
void popup (boost::weak_ptr<Film>, ContentList, TimelineContentViewList, wxPoint);
private:
void kdm ();
void remove ();
void maybe_found_missing (boost::weak_ptr<Job>, boost::weak_ptr<Content>, boost::weak_ptr<Content>);
-
+
wxMenu* _menu;
/** Film that we are working with; set up by popup() */
boost::weak_ptr<Film> _film;
{
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
-
+
_content = new wxListCtrl (_panel, wxID_ANY, wxDefaultPosition, wxSize (320, 160), wxLC_REPORT | wxLC_NO_HEADER);
_content->DragAcceptFiles (true);
s->Add (_content, 1, wxEXPAND | wxTOP | wxBOTTOM, 6);
_content->SetColumnWidth (0, 512);
wxBoxSizer* b = new wxBoxSizer (wxVERTICAL);
-
+
_add_file = new wxButton (_panel, wxID_ANY, _("Add file(s)..."));
_add_file->SetToolTip (_("Add video, image or sound files to the film."));
b->Add (_add_file, 0, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
-
+
_add_folder = new wxButton (_panel, wxID_ANY, _("Add folder..."));
_add_folder->SetToolTip (_("Add a folder of image files (which will be used as a moving image sequence) or a DCP."));
b->Add (_add_folder, 1, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
-
+
_remove = new wxButton (_panel, wxID_ANY, _("Remove"));
_remove->SetToolTip (_("Remove the selected piece of content from the film."));
b->Add (_remove, 0, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
-
+
_earlier = new wxButton (_panel, wxID_ANY, _("Up"));
_earlier->SetToolTip (_("Move the selected piece of content earlier in the film."));
b->Add (_earlier, 0, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
-
+
_later = new wxButton (_panel, wxID_ANY, _("Down"));
_later->SetToolTip (_("Move the selected piece of content later in the film."));
b->Add (_later, 0, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
-
+
_timeline = new wxButton (_panel, wxID_ANY, _("Timeline..."));
_timeline->SetToolTip (_("Open the timeline for the film."));
b->Add (_timeline, 0, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
{
ContentList c = selected ();
VideoContentList vc;
-
+
for (ContentList::iterator i = c.begin(); i != c.end(); ++i) {
shared_ptr<VideoContent> t = dynamic_pointer_cast<VideoContent> (*i);
if (t) {
{
ContentList c = selected ();
AudioContentList ac;
-
+
for (ContentList::iterator i = c.begin(); i != c.end(); ++i) {
shared_ptr<AudioContent> t = dynamic_pointer_cast<AudioContent> (*i);
if (t) {
{
ContentList c = selected ();
SubtitleContentList sc;
-
+
for (ContentList::iterator i = c.begin(); i != c.end(); ++i) {
shared_ptr<SubtitleContent> t = dynamic_pointer_cast<SubtitleContent> (*i);
if (t) {
{
ContentList c = selected ();
FFmpegContentList sc;
-
+
for (ContentList::iterator i = c.begin(); i != c.end(); ++i) {
shared_ptr<FFmpegContent> t = dynamic_pointer_cast<FFmpegContent> (*i);
if (t) {
for (list<ContentSubPanel*>::iterator i = _panels.begin(); i != _panels.end(); ++i) {
(*i)->film_changed (p);
}
-}
+}
void
ContentPanel::selection_changed ()
int r = d->ShowModal ();
boost::filesystem::path const path (wx_to_std (d->GetPath ()));
d->Destroy ();
-
+
if (r != wxID_OK) {
return;
}
error_dialog (_panel, _("Could not find a DCP nor a set of images in that folder."));
}
} else {
-
+
ImageSequenceDialog* e = new ImageSequenceDialog (_panel);
r = e->ShowModal ();
float const frame_rate = e->frame_rate ();
}
shared_ptr<Content> content;
-
+
try {
shared_ptr<ImageContent> content (new ImageContent (_film, path));
content->set_video_frame_rate (frame_rate);
_timeline_dialog->Destroy ();
_timeline_dialog = 0;
}
-
+
_timeline_dialog = new TimelineDialog (this, _film);
_timeline_dialog->Show ();
}
if (property == ContentProperty::PATH || property == ContentProperty::POSITION || property == DCPContentProperty::CAN_BE_PLAYED) {
setup ();
}
-
+
for (list<ContentSubPanel*>::iterator i = _panels.begin(); i != _panels.end(); ++i) {
(*i)->film_content_changed (property);
}
{
ContentList content = _film->content ();
sort (content.begin(), content.end(), ContentSorter ());
-
+
/* First, check to see if anything has changed and bail if not; this avoids
flickering on OS X.
*/
if (!valid) {
s = _("MISSING: ") + s;
}
-
+
proposed.push_back (s);
}
-
+
if (existing == proposed) {
return;
}
-
+
/* Something has changed: set up the control */
-
+
string selected_summary;
int const s = _content->GetNextItem (-1, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED);
if (s != -1) {
selected_summary = wx_to_std (_content->GetItemText (s));
}
-
+
_content->DeleteAllItems ();
for (ContentList::iterator i = content.begin(); i != content.end(); ++i) {
bool const needs_kdm = dcp && !dcp->can_be_played ();
string s = (*i)->summary ();
-
+
if (!valid) {
s = _("MISSING: ") + s;
}
if (!_film) {
return;
}
-
+
wxString* paths = event.GetFiles ();
list<boost::filesystem::path> path_list;
for (int i = 0; i < event.GetNumberOfFiles(); i++) {
/* It has been reported that the paths returned from e.g. wxFileDialog are not always sorted;
I can't reproduce that, but sort them anyway.
*/
-
+
paths.sort (ImageFilenameSorter ());
/* XXX: check for lots of files here and do something */
FFmpegContentList selected_ffmpeg ();
void add_file_clicked ();
-
-private:
+
+private:
void selection_changed ();
void add_folder_clicked ();
void remove_clicked ();
}
/* XXX: this could be better wrt audio streams */
-
+
shared_ptr<SingleStreamAudioContent> single = dynamic_pointer_cast<SingleStreamAudioContent> (content);
if (single) {
add_property (
std_to_wx (raw_convert<string> (single->audio_stream()->channels ()))
);
}
-
+
layout ();
}
}
_connections.clear ();
-
+
_content = content;
_wrapped->Enable (!_content.empty ());
}
_ignore_model_changes = false;
}
-
+
private:
-
+
void set_single ()
{
if (_wrapped->IsShown ()) {
if (_button->IsShown ()) {
return;
}
-
+
_wrapped->Hide ();
_sizer->Detach (_wrapped);
_button->Show ();
update_from_model ();
}
}
-
+
T* _wrapped;
wxGridBagSizer* _sizer;
wxGBPosition _position;
_sizer->Add (grid, 0, wxEXPAND | wxALL, 8);
int r = 0;
-
+
add_label_to_grid_bag_sizer (grid, _panel, _("Name"), true, wxGBPosition (r, 0));
_name = new wxTextCtrl (_panel, wxID_ANY);
grid->Add (_name, wxGBPosition(r, 1), wxDefaultSpan, wxEXPAND | wxLEFT | wxRIGHT);
++r;
-
+
int flags = wxALIGN_CENTER_VERTICAL;
#ifdef __WXOSX__
flags |= wxALIGN_RIGHT;
-#endif
+#endif
_use_isdcf_name = new wxCheckBox (_panel, wxID_ANY, _("Use ISDCF name"));
grid->Add (_use_isdcf_name, wxGBPosition (r, 0), wxDefaultSpan, flags);
_notebook->AddPage (make_video_panel (), _("Video"), false);
_notebook->AddPage (make_audio_panel (), _("Audio"), false);
-
+
_signed = new wxCheckBox (_panel, wxID_ANY, _("Signed"));
grid->Add (_signed, wxGBPosition (r, 0), wxGBSpan (1, 2));
++r;
-
+
_encrypted = new wxCheckBox (_panel, wxID_ANY, _("Encrypted"));
grid->Add (_encrypted, wxGBPosition (r, 0), wxGBSpan (1, 2));
++r;
grid->Add (s, wxGBPosition (r, 1));
++r;
}
-
+
add_label_to_grid_bag_sizer (grid, _panel, _("Standard"), true, wxGBPosition (r, 0));
_standard = new wxChoice (_panel, wxID_ANY);
grid->Add (_standard, wxGBPosition (r, 1), wxDefaultSpan, wxALIGN_CENTER_VERTICAL);
if (!_film) {
return;
}
-
+
_film->set_j2k_bandwidth (_j2k_bandwidth->GetValue() * 1000000);
}
_film->set_encrypted (_encrypted->GetValue ());
}
-
+
/** Called when the frame rate choice widget has been changed */
void
DCPPanel::frame_rate_choice_changed ()
++i;
++n;
}
-
+
if (i == ratios.end()) {
checked_set (_container, -1);
checked_set (_container_size, wxT (""));
dcp::Size const size = fit_ratio_within (_film->container()->ratio(), _film->full_frame ());
checked_set (_container_size, wxString::Format ("%dx%d", size.width, size.height));
}
-
+
setup_dcp_name ();
-}
+}
/** Called when the container widget has been changed */
void
DCPPanel::set_film (shared_ptr<Film> film)
{
_film = film;
-
+
film_changed (Film::NAME);
film_changed (Film::USE_ISDCF_NAME);
film_changed (Film::CONTENT);
}
_burn_subtitles->Enable (s);
_signed->Enable (si);
-
+
_encrypted->Enable (s);
_key->Enable (s && _film && _film->encrypted ());
_edit_key->Enable (s && _film && _film->encrypted ());
if (!_film) {
return;
}
-
+
_film->set_video_frame_rate (_film->best_video_frame_rate ());
}
panel->SetSizer (sizer);
int r = 0;
-
+
add_label_to_grid_bag_sizer (grid, panel, _("Container"), true, wxGBPosition (r, 0));
{
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
}
return min;
-}
+}
void
DCPPanel::setup_audio_channels_choice ()
panel->SetSizer (sizer);
int r = 0;
-
+
add_label_to_grid_bag_sizer (grid, panel, _("Channels"), true, wxGBPosition (r, 0));
_audio_channels = new wxChoice (panel, wxID_ANY);
setup_audio_channels_choice ();
_audio_dialog->Destroy ();
_audio_dialog = 0;
}
-
+
AudioDialog* d = new AudioDialog (_panel, _film);
d->Show ();
}
void edit_key_clicked ();
void audio_processor_changed ();
void show_audio_clicked ();
-
+
void setup_frame_rate_widget ();
void setup_container ();
void setup_dcp_name ();
DirPickerCtrl::SetPath (wxString p)
{
_path = p;
-
+
if (_path == wxStandardPaths::Get().GetDocumentsDir()) {
_folder->SetLabel (_("My Documents"));
} else {
private:
void browse_clicked ();
-
+
wxStaticText* _folder;
wxButton* _browse;
wxString _path;
add (_("Country"), true);
_country = add (new wxChoice (this, wxID_ANY));
_country->Append (N_("Hashemite Kingdom of Jordan"));
-
+
add (_("Cinema"), true);
_cinema = add (new wxChoice (this, wxID_ANY));
_cinema->Append (N_("Motion Picture Solutions London Mobile & QC"));
/* Already set up */
return;
}
-
+
_country->Append (_("Fetching..."));
_country->SetSelection (0);
#ifdef DCPOMATIC_OSX
wxMilliSleep (200);
-#endif
+#endif
signal_manager->when_idle (boost::bind (&DolbyCertificateDialog::finish_country_selected, this));
}
downloaded (false);
_message->SetLabel (_("Downloading certificate"));
-#ifdef DCPOMATIC_OSX
+#ifdef DCPOMATIC_OSX
/* This is necessary on OS X, otherwise the SetLabel() above has no visible effect */
wxMilliSleep (200);
-#endif
+#endif
signal_manager->when_idle (boost::bind (&DoremiCertificateDialog::finish_download, this, serial));
}
font.SetStyle (wxFONTSTYLE_ITALIC);
font.SetPointSize (font.GetPointSize() - 1);
_message->SetFont (font);
-
+
_download->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&DownloadCertificateDialog::download, this));
_download->Enable (false);
ok->Enable (done);
}
-
+
protected:
void add_common_widgets ();
void downloaded (bool done);
-
+
boost::function<void (boost::filesystem::path)> _load;
wxStaticText* _message;
wxButton* _download;
void refresh ()
{
_list->DeleteAllItems ();
-
+
std::vector<T> current = _get ();
for (typename std::vector<T>::iterator i = current.begin (); i != current.end(); ++i) {
add_to_control (*i);
}
- }
+ }
-private:
+private:
void add_to_control (T item)
{
dialog->ShowModal ();
add_to_control (dialog->get ());
-
+
std::vector<T> all = _get ();
all.push_back (dialog->get ());
_set (all);
-
+
dialog->Destroy ();
}
T copy (all[item]);
add_to_control (copy);
-
+
all.push_back (copy);
_set (all);
}
dialog->ShowModal ();
all[item] = dialog->get ();
dialog->Destroy ();
-
+
for (int i = 0; i < _columns; ++i) {
_list->SetItem (item, i, std_to_wx (_column (all[item], i)));
}
if (i == -1) {
return;
}
-
+
_list->DeleteItem (i);
std::vector<T> all = _get ();
all.erase (all.begin() + i);
_main_notebook->AddPage (_content_panel->panel (), _("Content"), true);
_dcp_panel = new DCPPanel (_main_notebook, _film);
_main_notebook->AddPage (_dcp_panel->panel (), _("DCP"), false);
-
+
JobManager::instance()->ActiveJobsChanged.connect (
bind (&FilmEditor::active_jobs_changed, this, _1)
);
FilmEditor::film_changed (Film::Property p)
{
ensure_ui_thread ();
-
+
if (!_film) {
return;
}
FilmEditor::film_content_changed (int property)
{
ensure_ui_thread ();
-
+
if (!_film) {
/* We call this method ourselves (as well as using it as a signal handler)
so _film can be 0.
if (_film == film) {
return;
}
-
+
_film = film;
_content_panel->set_film (_film);
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
-
+
/** @file src/wx/film_editor.h
* @brief FilmEditor class.
*/
ContentPanel* content_panel () const {
return _content_panel;
}
-
+
boost::shared_ptr<Film> film () const {
return _film;
}
#ifndef __WXOSX__
_panel->SetDoubleBuffered (true);
#endif
-
+
_panel->SetBackgroundStyle (wxBG_STYLE_PAINT);
-
+
_v_sizer = new wxBoxSizer (wxVERTICAL);
SetSizer (_v_sizer);
_forward_button->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&FilmViewer::forward_clicked, this));
set_film (shared_ptr<Film> ());
-
+
JobManager::instance()->ActiveJobsChanged.connect (
bind (&FilmViewer::active_jobs_changed, this, _1)
);
_film = film;
_frame.reset ();
-
+
update_position_slider ();
update_position_label ();
-
+
if (!_film) {
return;
}
in the preview.
*/
_player->set_burn_subtitles (true);
-
+
_film_connection = _film->Changed.connect (boost::bind (&FilmViewer::film_changed, this, _1));
_player_connection = _player->Changed.connect (boost::bind (&FilmViewer::player_changed, this, _1));
} catch (exception& e) {
error_dialog (this, wxString::Format (_("Could not get video for view (%s)"), std_to_wx(e.what()).data()));
}
-
+
if (!pvf.empty ()) {
try {
_frame = pvf.front()->image (PIX_FMT_RGB24, true, boost::bind (&Log::dcp_log, _film->log().get(), _1, _2));
if (pvf.front()->colour_conversion()) {
yuv_to_rgb = pvf.front()->colour_conversion().get().yuv_to_rgb();
}
-
+
_frame = _frame->scale (_frame->size(), yuv_to_rgb, PIX_FMT_RGB24, false);
_position = pvf.front()->time ();
_inter_position = pvf.front()->inter_position ();
} else {
get (_position + frame, true);
}
-
+
update_position_label ();
update_position_slider ();
}
}
Ratio const * container = _film->container ();
-
+
float const panel_ratio = _panel_size.ratio ();
float const film_ratio = container ? container->ratio () : 1.78;
-
+
if (panel_ratio < film_ratio) {
/* panel is less widscreen than the film; clamp width */
_out_size.width = _panel_size.width;
if (!_film || _film->video_frame_rate() == 0) {
return;
}
-
+
if (_play_button->GetValue()) {
_timer.Start (1000 / _film->video_frame_rate());
} else {
_slider->SetValue (0);
return;
}
-
+
DCPTime const len = _film->length ();
if (len.get ()) {
{
if (a) {
list<shared_ptr<Job> > jobs = JobManager::instance()->get ();
- list<shared_ptr<Job> >::iterator i = jobs.begin ();
+ list<shared_ptr<Job> >::iterator i = jobs.begin ();
while (i != jobs.end() && boost::dynamic_pointer_cast<ExamineContentJob> (*i) == 0) {
++i;
}
-
+
if (i == jobs.end() || (*i)->finished()) {
/* no examine content job running, so we're ok to use the viewer */
a = false;
}
}
-
+
_slider->Enable (!a);
_play_button->Enable (!a);
}
FilmViewer::setup_sensitivity ()
{
bool const c = _film && !_film->content().empty ();
-
+
_slider->Enable (c);
_back_button->Enable (c);
_forward_button->Enable (c);
sizer->Layout ();
sizer->SetSizeHints (this);
}
-
+
void
FilterDialog::active_changed ()
private:
void active_changed ();
-
+
FilterEditor* _filters;
};
{
wxBoxSizer* sizer = new wxBoxSizer (wxVERTICAL);
SetSizer (sizer);
-
+
vector<Filter const *> filters = Filter::all ();
typedef map<string, list<Filter const *> > CategoryMap;
ip.SetWidth (100);
_fonts->InsertColumn (0, ip);
}
-
+
{
wxListItem ip;
ip.SetId (1);
if (!content) {
return;
}
-
+
_fonts->DeleteAllItems ();
list<shared_ptr<Font> > fonts = content->fonts ();
size_t n = 0;
if (item == -1) {
return;
}
-
+
/* The wxFD_CHANGE_DIR here prevents a `could not set working directory' error 123 on Windows when using
non-Latin filenames or paths.
*/
void set_file_clicked ();
void update_sensitivity ();
void selection_changed ();
-
+
boost::weak_ptr<SubtitleContent> _content;
wxListCtrl* _fonts;
wxButton* _set_file;
if (_wanted->GetValue().IsEmpty()) {
return 0;
}
-
+
return lexical_cast<float> (wx_to_std (_wanted->GetValue ()));
}
{
_text->Clear ();
bool hint = false;
-
+
boost::shared_ptr<Film> film = _film.lock ();
if (!film) {
return;
}
ContentList content = film->content ();
-
+
_text->BeginStandardBullet (N_("standard/circle"), 1, 50);
bool big_font_files = false;
_text->WriteText (_("All of your content is at 1.85:1 or narrower but your DCP's container is Scope (2.39:1). This will pillar-box your content inside a Flat (1.85:1) frame. You may prefer to set your DCP's container to Flat (1.85:1) in the \"DCP\" tab."));
_text->Newline ();
}
-
+
if (film->video_frame_rate() != 24 && film->video_frame_rate() != 48) {
hint = true;
_text->WriteText (wxString::Format (_("Your DCP frame rate (%d fps) may cause problems in a few (mostly older) projectors. Use 24 or 48 frames per second to be on the safe side."), film->video_frame_rate()));
private:
void film_changed ();
-
+
boost::weak_ptr<Film> _film;
wxRichTextCtrl* _text;
add (_("Subtitle Language (e.g. FR)"), true);
_subtitle_language = add (new wxTextCtrl (this, wxID_ANY));
-
+
add (_("Territory (e.g. UK)"), true);
_territory = add (new wxTextCtrl (this, wxID_ANY));
add (_("Mastered luminance (e.g. 14fl)"), true);
_mastered_luminance = add (new wxTextCtrl (this, wxID_ANY));
-
+
_content_version->SetRange (1, 1024);
_content_version->SetValue (dm.content_version);
gauge_message->Add (_message, 1, wxEXPAND | wxALIGN_CENTER_VERTICAL | wxALL, 6);
table->Insert (n, gauge_message, 1, wxEXPAND | wxLEFT | wxRIGHT);
++n;
-
+
_cancel = new wxButton (panel, wxID_ANY, _("Cancel"));
_cancel->Bind (wxEVT_COMMAND_BUTTON_CLICKED, &JobRecord::cancel_clicked, this);
table->Insert (n, _cancel, 1, wxALIGN_CENTER_VERTICAL | wxALL, 3);
++n;
-
+
_pause = new wxButton (_panel, wxID_ANY, _("Pause"));
_pause->Bind (wxEVT_COMMAND_BUTTON_CLICKED, &JobRecord::pause_clicked, this);
table->Insert (n, _pause, 1, wxALIGN_CENTER_VERTICAL | wxALL, 3);
++n;
-
+
_details = new wxButton (_panel, wxID_ANY, _("Details..."));
_details->Bind (wxEVT_COMMAND_BUTTON_CLICKED, &JobRecord::details_clicked, this);
_details->Enable (false);
table->Insert (n, _details, 1, wxALIGN_CENTER_VERTICAL | wxALL, 3);
++n;
-
+
_progress_connection = job->Progress.connect (boost::bind (&JobRecord::progress, this));
_finished_connection = job->Finished.connect (boost::bind (&JobRecord::finished, this));
-
+
table->Layout ();
}
void finished ()
{
progress ();
-
+
if (!_job->finished_cancelled ()) {
_gauge->SetValue (100);
}
-
+
_cancel->Enable (false);
_pause->Enable (false);
if (!_job->error_details().empty ()) {
s[0] = toupper (s[0]);
error_dialog (_window, std_to_wx (String::compose ("%1.\n\n%2", s, _job->error_details())));
}
-
+
void cancel_clicked (wxCommandEvent &)
{
_job->cancel ();
_pause->SetLabel (_("Resume"));
}
}
-
+
boost::shared_ptr<Job> _job;
wxScrolledWindow* _window;
wxPanel* _panel;
wxPanel* _panel;
wxFlexGridSizer* _table;
boost::shared_ptr<wxTimer> _timer;
-
+
std::list<boost::shared_ptr<JobRecord> > _job_records;
};
wxStaticText* h = new wxStaticText (this, wxID_ANY, _("Screens"));
h->SetFont (subheading_font);
vertical->Add (h, 0, wxALIGN_CENTER_VERTICAL);
-
+
wxBoxSizer* targets = new wxBoxSizer (wxHORIZONTAL);
_targets = new wxTreeCtrl (this, wxID_ANY, wxDefaultPosition, wxDefaultSize, wxTR_HIDE_ROOT | wxTR_MULTIPLE | wxTR_HAS_BUTTONS);
targets->Add (_targets, 1, wxEXPAND | wxTOP | wxRIGHT, DCPOMATIC_SIZER_GAP);
target_buttons->Add (_edit_cinema, 1, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
_remove_cinema = new wxButton (this, wxID_ANY, _("Remove Cinema"));
target_buttons->Add (_remove_cinema, 1, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
-
+
_add_screen = new wxButton (this, wxID_ANY, _("Add Screen..."));
target_buttons->Add (_add_screen, 1, wxEXPAND | wxALL, DCPOMATIC_BUTTON_STACK_GAP);
_edit_screen = new wxButton (this, wxID_ANY, _("Edit Screen..."));
h = new wxStaticText (this, wxID_ANY, S_("KDM|Timing"));
h->SetFont (subheading_font);
vertical->Add (h, 0, wxALIGN_CENTER_VERTICAL | wxTOP, DCPOMATIC_SIZER_Y_GAP * 2);
-
+
wxFlexGridSizer* table = new wxFlexGridSizer (3, DCPOMATIC_SIZER_X_GAP, DCPOMATIC_SIZER_Y_GAP);
add_label_to_sizer (table, this, _("From"), true);
wxDateTime from;
h = new wxStaticText (this, wxID_ANY, _("CPL"));
h->SetFont (subheading_font);
vertical->Add (h, 0, wxALIGN_CENTER_VERTICAL | wxTOP, DCPOMATIC_SIZER_Y_GAP * 2);
-
+
/* CPL choice */
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
add_label_to_sizer (s, this, _("CPL"), true);
_cpl_annotation_text = new wxStaticText (this, wxID_ANY, "");
table->Add (_cpl_annotation_text);
vertical->Add (table, 0, wxEXPAND | wxTOP, DCPOMATIC_SIZER_GAP + 2);
-
+
_cpls = film->cpls ();
update_cpl_choice ();
-
+
/* Sub-heading: Output */
h = new wxStaticText (this, wxID_ANY, _("Output"));
h->SetFont (subheading_font);
vertical->Add (h, 0, wxALIGN_CENTER_VERTICAL | wxTOP, DCPOMATIC_SIZER_Y_GAP * 2);
-
+
table = new wxFlexGridSizer (2, DCPOMATIC_SIZER_X_GAP, 0);
add_label_to_sizer (table, this, _("KDM type"), true);
table->Add (_write_to, 1, wxEXPAND);
#ifdef DCPOMATIC_USE_OWN_DIR_PICKER
- _folder = new DirPickerCtrl (this);
-#else
+ _folder = new DirPickerCtrl (this);
+#else
_folder = new wxDirPickerCtrl (this, wxID_ANY, wxEmptyString, wxDirSelectorPromptStr, wxDefaultPosition, wxSize (300, -1));
#endif
_folder->SetPath (wxStandardPaths::Get().GetDocumentsDir());
-
+
table->Add (_folder, 1, wxEXPAND);
_email = new wxRadioButton (this, wxID_ANY, _("Send by email"));
table->Add (_email, 1, wxEXPAND);
table->AddSpacer (0);
-
+
vertical->Add (table, 0, wxEXPAND | wxTOP, DCPOMATIC_SIZER_GAP);
/* Make an overall sizer to get a nice border, and put some buttons in */
bool const sc = selected_cinemas().size() == 1;
bool const ss = selected_screens().size() == 1;
bool const sd = _cpl->GetSelection() != -1;
-
+
_edit_cinema->Enable (sc);
_remove_cinema->Enable (sc);
-
+
_add_screen->Enable (sc);
_edit_screen->Enable (ss);
_remove_screen->Enable (ss);
}
pair<wxTreeItemId, shared_ptr<Cinema> > c = selected_cinemas().front();
-
+
CinemaDialog* d = new CinemaDialog (this, "Edit cinema", c.second->name, c.second->email);
if (d->ShowModal () == wxID_OK) {
c.second->name = d->name ();
Config::instance()->changed ();
}
- d->Destroy ();
+ d->Destroy ();
}
void
}
shared_ptr<Cinema> c = selected_cinemas().front().second;
-
+
ScreenDialog* d = new ScreenDialog (this, "Add Screen");
if (d->ShowModal () != wxID_OK) {
return;
}
pair<wxTreeItemId, shared_ptr<Screen> > s = selected_screens().front();
-
+
ScreenDialog* d = new ScreenDialog (this, "Edit screen", s.second->name, s.second->certificate);
if (d->ShowModal () == wxID_OK) {
s.second->name = d->name ();
KDMDialog::update_cpl_choice ()
{
_cpl->Clear ();
-
+
for (vector<CPLSummary>::const_iterator i = _cpls.begin(); i != _cpls.end(); ++i) {
_cpl->Append (std_to_wx (i->cpl_id));
error_dialog (this, _("This is not a valid CPL file"));
return;
}
-
+
update_cpl_choice ();
_cpl->SetSelection (_cpls.size() - 1);
update_cpl_summary ();
boost::posix_time::ptime from () const;
/** @return KDM until time in local time */
boost::posix_time::ptime until () const;
-
+
boost::filesystem::path cpl () const;
boost::filesystem::path directory () const;
bool write_to () const;
void cpl_browse_clicked ();
static boost::posix_time::ptime posix_time (wxDatePickerCtrl *, wxTimePickerCtrl *);
-
+
wxTreeCtrl* _targets;
wxButton* _add_cinema;
wxButton* _edit_cinema;
}
validator.SetIncludes (list);
-
+
_key = add (new wxTextCtrl (this, wxID_ANY, _(""), wxDefaultPosition, size, 0, validator));
_key->SetValue (std_to_wx (key.hex ()));
_key->SetMaxLength (32);
private:
void key_changed ();
void random ();
-
+
wxTextCtrl* _key;
wxButton* _random;
};
std::string leaf_common_name () const {
return wx_to_std (_leaf_common_name->GetValue ());
}
-
+
private:
wxTextCtrl* _organisation;
wxTextCtrl* _intermediate_common_name;
wxTextCtrl* _leaf_common_name;
};
-
+
add (_("Create in folder"), true);
#ifdef DCPOMATIC_USE_OWN_DIR_PICKER
- _folder = new DirPickerCtrl (this);
-#else
+ _folder = new DirPickerCtrl (this);
+#else
_folder = new wxDirPickerCtrl (this, wxID_ANY, wxEmptyString, wxDirSelectorPromptStr, wxDefaultPosition, wxSize (300, -1));
#endif
if (!_directory) {
_directory = Config::instance()->default_directory_or (wx_to_std (wxStandardPaths::Get().GetDocumentsDir()));
}
-
+
_folder->SetPath (std_to_wx (_directory.get().string()));
add (_folder);
wxTextCtrl* _name;
#ifdef DCPOMATIC_USE_OWN_DIR_PICKER
DirPickerCtrl* _folder;
-#else
+#else
wxDirPickerCtrl* _folder;
-#endif
+#endif
static boost::optional<boost::filesystem::path> _directory;
};
#ifdef __WXOSX__
flags |= wxALIGN_RIGHT;
t += wxT (":");
-#endif
+#endif
wxStaticText* m = new wxStaticText (this, wxID_ANY, t);
_table->Add (m, 1, flags, 6);
std::string name () const;
boost::optional<dcp::Certificate> certificate () const;
-
+
private:
void select_certificate ();
void load_certificate (boost::filesystem::path);
void download_certificate ();
void setup_sensitivity ();
-
+
wxTextCtrl* _name;
wxChoice* _manufacturer;
wxButton* _load_certificate;
{
wxBoxSizer* s = new wxBoxSizer (wxVERTICAL);
SetSizer (s);
-
+
_list = new wxListCtrl (this, wxID_ANY, wxDefaultPosition, wxSize (400, 200), wxLC_REPORT | wxLC_SINGLE_SEL);
{
SetSizer (s);
s->Layout ();
s->SetSizeHints (this);
-
+
_server_finder_connection = ServerFinder::instance()->connect (boost::bind (&ServersListDialog::server_found, this, _1));
}
add_label_to_sizer (s, this, _("%"), false);
grid->Add (s);
}
-
+
{
add_label_to_sizer (grid, this, _("X Scale"), true);
wxBoxSizer* s = new wxBoxSizer (wxHORIZONTAL);
add_label_to_sizer (grid, this, _("Language"), true);
_language = new wxTextCtrl (this, wxID_ANY);
grid->Add (_language, 1, wxEXPAND);
-
+
add_label_to_sizer (grid, this, _("Stream"), true);
_stream = new wxChoice (this, wxID_ANY);
grid->Add (_stream, 1, wxEXPAND);
_fonts_dialog_button = new wxButton (this, wxID_ANY, _("Fonts..."));
grid->Add (_fonts_dialog_button);
grid->AddSpacer (0);
-
+
_x_offset->SetRange (-100, 100);
_y_offset->SetRange (-100, 100);
_x_scale->SetRange (10, 1000);
for (vector<shared_ptr<FFmpegSubtitleStream> >::iterator i = s.begin(); i != s.end(); ++i) {
_stream->Append (std_to_wx ((*i)->name), new wxStringClientData (std_to_wx ((*i)->identifier ())));
}
-
+
if (fcs->subtitle_stream()) {
checked_set (_stream, fcs->subtitle_stream()->identifier ());
} else {
++any_subs;
}
}
-
+
_use->Enable (any_subs > 0);
bool const use = _use->GetValue ();
-
+
_x_offset->Enable (any_subs > 0 && use);
_y_offset->Enable (any_subs > 0 && use);
_x_scale->Enable (any_subs > 0 && use);
}
shared_ptr<FFmpegContent> fcs = fc.front ();
-
+
vector<shared_ptr<FFmpegSubtitleStream> > a = fcs->subtitle_streams ();
vector<shared_ptr<FFmpegSubtitleStream> >::iterator i = a.begin ();
string const s = string_client_data (_stream->GetClientObject (_stream->GetSelection ()));
DCPOMATIC_ASSERT (c.size() == 1);
shared_ptr<SubtitleDecoder> decoder;
-
+
shared_ptr<SubRipContent> sr = dynamic_pointer_cast<SubRipContent> (c.front ());
if (sr) {
decoder.reset (new SubRipDecoder (sr));
}
-
+
shared_ptr<DCPSubtitleContent> dc = dynamic_pointer_cast<DCPSubtitleContent> (c.front ());
if (dc) {
decoder.reset (new DCPSubtitleDecoder (dc));
}
-
+
if (decoder) {
_subtitle_view = new SubtitleView (this, _parent->film(), decoder, c.front()->position ());
_subtitle_view->Show ();
void film_changed (Film::Property);
void film_content_changed (int);
void content_selection_changed ();
-
+
private:
void use_toggled ();
void x_offset_changed ();
void fonts_dialog_clicked ();
void setup_sensitivity ();
-
+
wxCheckBox* _use;
wxSpinCtrl* _x_offset;
wxSpinCtrl* _y_offset;
ip.SetText (_("End"));
ip.SetWidth (100);
_list->InsertColumn (1, ip);
- }
+ }
{
wxListItem ip;
public:
SubtitleView (wxWindow *, boost::shared_ptr<Film>, boost::shared_ptr<SubtitleDecoder>, DCPTime position);
-private:
+private:
wxListCtrl* _list;
};
if (cancel) {
flags |= wxCANCEL;
}
-
+
wxSizer* buttons = CreateSeparatedButtonSizer (flags);
if (buttons) {
_overall_sizer->Add (buttons, wxSizerFlags().Expand().DoubleBorder());
void add (wxString text, bool label);
void add_spacer ();
-
+
void layout ();
-private:
+private:
wxSizer* _overall_sizer;
wxFlexGridSizer* _table;
};
: wxPanel (parent)
{
wxSize const s = TimecodeBase::size (parent);
-
+
wxTextValidator validator (wxFILTER_INCLUDE_CHAR_LIST);
wxArrayString list;
validator.SetIncludes (list);
_sizer = new wxBoxSizer (wxHORIZONTAL);
-
+
_editable = new wxPanel (this);
wxSizer* editable_sizer = new wxBoxSizer (wxHORIZONTAL);
_hours = new wxTextCtrl (_editable, wxID_ANY, wxT(""), wxDefaultPosition, s, 0, validator);
_sizer->Add (_editable);
_fixed = add_label_to_sizer (_sizer, this, wxT ("42"), false);
-
+
_hours->Bind (wxEVT_COMMAND_TEXT_UPDATED, boost::bind (&TimecodeBase::changed, this));
_minutes->Bind (wxEVT_COMMAND_TEXT_UPDATED, boost::bind (&TimecodeBase::changed, this));
_seconds->Bind (wxEVT_COMMAND_TEXT_UPDATED, boost::bind (&TimecodeBase::changed, this));
protected:
void changed ();
void set_clicked ();
-
+
wxSizer* _sizer;
wxPanel* _editable;
wxTextCtrl* _hours;
int s;
int f;
t.split (fps, h, m, s, f);
-
+
checked_set (_hours, boost::lexical_cast<std::string> (h));
checked_set (_minutes, boost::lexical_cast<std::string> (m));
checked_set (_seconds, boost::lexical_cast<std::string> (s));
checked_set (_frames, boost::lexical_cast<std::string> (f));
-
+
checked_set (_fixed, t.timecode (fps));
}
t += T::from_seconds (boost::lexical_cast<int> (s.empty() ? "0" : s));
std::string const f = wx_to_std (_frames->GetValue());
t += T::from_seconds (boost::lexical_cast<double> (f.empty() ? "0" : f) / fps);
-
+
return t;
}
};
{
#ifndef __WXOSX__
SetDoubleBuffered (true);
-#endif
+#endif
Bind (wxEVT_PAINT, boost::bind (&Timeline::paint, this));
Bind (wxEVT_LEFT_DOWN, boost::bind (&Timeline::left_down, this, _1));
++j;
continue;
}
-
+
shared_ptr<Content> test_content = test->content();
-
+
if (test && test->track() && test->track().get() == t) {
bool const no_overlap =
(content->position() < test_content->position() && content->end() < test_content->position()) ||
(content->position() > test_content->end() && content->end() > test_content->end());
-
+
if (!no_overlap) {
/* we have an overlap on track `t' */
++t;
break;
}
}
-
+
++j;
}
if (!cv) {
continue;
}
-
+
if (!ev.ShiftDown ()) {
cv->set_selected (view == *i);
}
-
+
if (view == *i) {
_content_panel->set_selection (cv->content ());
}
if (!_down_view) {
return;
}
-
+
DCPTime new_position = _down_view_position + DCPTime::from_seconds ((p.x - _down_point.x) / pps);
-
+
if (_snap) {
DCPTime const new_end = new_position + _down_view->content()->length_after_trim () - DCPTime (1);
positive is right).
*/
optional<DCPTime> nearest_distance;
-
+
/* Find the nearest content edge; this is inefficient */
for (TimelineViewList::iterator i = _views.begin(); i != _views.end(); ++i) {
shared_ptr<TimelineContentView> cv = dynamic_pointer_cast<TimelineContentView> (*i);
maybe_snap (cv->content()->end() + DCPTime (1), new_position, nearest_distance);
maybe_snap (cv->content()->end() + DCPTime (1), new_end, nearest_distance);
}
-
+
if (nearest_distance) {
/* Snap if it's close; `close' means within a proportion of the time on the timeline */
if (nearest_distance.get().abs() < DCPTime::from_seconds ((width() / pps) / 64)) {
}
}
}
-
+
if (new_position < DCPTime ()) {
new_position = DCPTime ();
}
_down_view->content()->set_position (new_position);
-
+
shared_ptr<Film> film = _film.lock ();
DCPOMATIC_ASSERT (film);
film->set_sequence_video (false);
Timeline::selected_views () const
{
TimelineContentViewList sel;
-
+
for (TimelineViewList::const_iterator i = _views.begin(); i != _views.end(); ++i) {
shared_ptr<TimelineContentView> cv = dynamic_pointer_cast<TimelineContentView> (*i);
if (cv && cv->selected()) {
{
ContentList sel;
TimelineContentViewList views = selected_views ();
-
+
for (TimelineContentViewList::const_iterator i = views.begin(); i != views.end(); ++i) {
sel.push_back ((*i)->content ());
}
{
public:
TimelineAudioContentView (Timeline& tl, boost::shared_ptr<Content> c);
-
+
private:
wxString type () const;
wxColour background_colour () const;
TimelineContentView::bbox () const
{
DCPOMATIC_ASSERT (_track);
-
+
shared_ptr<const Film> film = _timeline.film ();
shared_ptr<const Content> content = _content.lock ();
if (!film || !content) {
return dcpomatic::Rect<int> ();
}
-
+
return dcpomatic::Rect<int> (
time_x (content->position ()) - 8,
y_pos (_track.get()) - 8,
TimelineContentView::do_paint (wxGraphicsContext* gc)
{
DCPOMATIC_ASSERT (_track);
-
+
shared_ptr<const Film> film = _timeline.film ();
shared_ptr<const Content> cont = content ();
if (!film || !cont) {
return;
}
-
+
DCPTime const position = cont->position ();
DCPTime const len = cont->length_after_trim ();
-
+
wxColour selected (background_colour().Red() / 2, background_colour().Green() / 2, background_colour().Blue() / 2);
-
+
gc->SetPen (*wxThePenList->FindOrCreatePen (foreground_colour(), 4, wxPENSTYLE_SOLID));
if (_selected) {
gc->SetBrush (*wxTheBrushList->FindOrCreateBrush (selected, wxBRUSHSTYLE_SOLID));
path.AddLineToPoint (time_x (position), y_pos (_track.get()) + 4);
gc->StrokePath (path);
gc->FillPath (path);
-
+
wxString name = wxString::Format (wxT ("%s [%s]"), std_to_wx (cont->summary()).data(), type().data());
wxDouble name_width;
wxDouble name_height;
wxDouble name_descent;
wxDouble name_leading;
gc->GetTextExtent (name, &name_width, &name_height, &name_descent, &name_leading);
-
+
gc->Clip (wxRegion (time_x (position), y_pos (_track.get()), len.seconds() * _timeline.pixels_per_second().get_value_or(0), _timeline.track_height()));
gc->SetFont (gc->CreateFont (*wxNORMAL_FONT, foreground_colour ()));
gc->DrawText (name, time_x (position) + 12, y_pos (_track.get() + 1) - name_height - 4);
TimelineContentView::content_changed (int p, bool frequent)
{
ensure_ui_thread ();
-
+
if (p == ContentProperty::POSITION || p == ContentProperty::LENGTH) {
force_redraw ();
}
-
+
if (!frequent) {
_timeline.setup_pixels_per_second ();
_timeline.Refresh ();
virtual wxString type () const = 0;
virtual wxColour background_colour () const = 0;
virtual wxColour foreground_colour () const = 0;
-
+
private:
void do_paint (wxGraphicsContext* gc);
if (!film) {
return;
}
-
+
film->set_sequence_video (_sequence_video->GetValue ());
}
void snap_toggled ();
void sequence_video_toggled ();
void film_changed (Film::Property);
-
+
boost::weak_ptr<Film> _film;
Timeline _timeline;
wxCheckBox* _snap;
if (!sc || !sc->use_subtitles ()) {
return wxColour (210, 210, 210, 128);
}
-
+
return wxColour (163, 255, 154, 255);
}
if (!sc || !sc->use_subtitles ()) {
return wxColour (180, 180, 180, 128);
}
-
+
return wxColour (0, 0, 0, 255);
}
{
}
-
+
dcpomatic::Rect<int>
TimelineTimeAxisView::bbox () const
{
if (!_timeline.pixels_per_second()) {
return;
}
-
+
double const pps = _timeline.pixels_per_second().get ();
-
+
gc->SetPen (*wxThePenList->FindOrCreatePen (wxColour (0, 0, 0), 1, wxPENSTYLE_SOLID));
-
+
double mark_interval = rint (128 / pps);
if (mark_interval > 5) {
mark_interval -= int (rint (mark_interval)) % 5;
if (mark_interval > 3600) {
mark_interval -= int (rint (mark_interval)) % 3600;
}
-
+
if (mark_interval < 1) {
mark_interval = 1;
}
-
+
wxGraphicsPath path = gc->CreatePath ();
path.MoveToPoint (_timeline.x_offset(), _y);
path.AddLineToPoint (_timeline.width(), _y);
gc->StrokePath (path);
-
+
gc->SetFont (gc->CreateFont (*wxNORMAL_FONT));
-
+
/* Time in seconds */
DCPTime t;
while ((t.seconds() * pps) < _timeline.width()) {
path.MoveToPoint (time_x (t), _y - 4);
path.AddLineToPoint (time_x (t), _y + 4);
gc->StrokePath (path);
-
+
double tc = t.seconds ();
int const h = tc / 3600;
tc -= h * 3600;
int const m = tc / 60;
tc -= m * 60;
int const s = tc;
-
+
wxString str = wxString::Format (wxT ("%02d:%02d:%02d"), h, m, s);
wxDouble str_width;
wxDouble str_height;
wxDouble str_descent;
wxDouble str_leading;
gc->GetTextExtent (str, &str_width, &str_height, &str_descent, &str_leading);
-
+
int const tx = _timeline.x_offset() + t.seconds() * pps;
if ((tx + str_width) < _timeline.width()) {
gc->DrawText (str, time_x (t), _y + 16);
}
-
+
t += DCPTime::from_seconds (mark_interval);
}
}
dcpomatic::Rect<int> bbox () const;
void set_y (int y);
-private:
+private:
void do_paint (wxGraphicsContext* gc);
private:
public:
TimelineVideoContentView (Timeline& tl, boost::shared_ptr<Content> c);
-private:
+private:
wxString type () const;
wxColour background_colour () const;
wxColour foreground_colour () const;
TimelineView::TimelineView (Timeline& t)
: _timeline (t)
{
-
+
}
void
public:
TimelineView (Timeline& t);
virtual ~TimelineView () {}
-
+
void paint (wxGraphicsContext* g);
void force_redraw ();
protected:
virtual void do_paint (wxGraphicsContext *) = 0;
-
+
int time_x (DCPTime t) const;
-
+
Timeline& _timeline;
private:
_sizer->Add (grid, 0, wxALL, 8);
wxSize size = TimecodeBase::size (this);
-
+
wxSizer* labels = new wxBoxSizer (wxHORIZONTAL);
//// TRANSLATORS: this is an abbreviation for "hours"
wxStaticText* t = new wxStaticText (this, wxID_ANY, _("h"), wxDefaultPosition, size, wxALIGN_CENTRE_HORIZONTAL);
#ifdef DCPOMATIC_LINUX
/* Hack to work around failure to centre text on GTK */
gtk_label_set_line_wrap (GTK_LABEL (t->GetHandle()), FALSE);
-#endif
+#endif
labels->Add (t, 1, wxEXPAND);
add_label_to_sizer (labels, this, wxT (":"), false);
//// TRANSLATORS: this is an abbreviation for "minutes"
t = new wxStaticText (this, wxID_ANY, _("m"), wxDefaultPosition, size, wxALIGN_CENTRE_HORIZONTAL);
#ifdef DCPOMATIC_LINUX
gtk_label_set_line_wrap (GTK_LABEL (t->GetHandle()), FALSE);
-#endif
+#endif
labels->Add (t, 1, wxEXPAND);
add_label_to_sizer (labels, this, wxT (":"), false);
//// TRANSLATORS: this is an abbreviation for "seconds"
t = new wxStaticText (this, wxID_ANY, _("s"), wxDefaultPosition, size, wxALIGN_CENTRE_HORIZONTAL);
#ifdef DCPOMATIC_LINUX
gtk_label_set_line_wrap (GTK_LABEL (t->GetHandle()), FALSE);
-#endif
+#endif
labels->Add (t, 1, wxEXPAND);
add_label_to_sizer (labels, this, wxT (":"), false);
//// TRANSLATORS: this is an abbreviation for "frames"
t = new wxStaticText (this, wxID_ANY, _("f"), wxDefaultPosition, size, wxALIGN_CENTRE_HORIZONTAL);
#ifdef DCPOMATIC_LINUX
gtk_label_set_line_wrap (GTK_LABEL (t->GetHandle()), FALSE);
-#endif
+#endif
labels->Add (t, 1, wxEXPAND);
grid->Add (new wxStaticText (this, wxID_ANY, wxT ("")));
grid->Add (labels);
++current;
}
}
-
+
t = new wxStaticText (this, wxID_ANY, wxT (""));
t->SetLabelMarkup (out);
grid->Add (t, 0, wxALIGN_CENTER_VERTICAL | wxLEFT | wxRIGHT, 6);
for (ContentList::const_iterator i = cl.begin (); i != cl.end(); ++i) {
check.insert ((*i)->full_length ());
}
-
+
if (check.size() == 1) {
_full_length->set (cl.front()->full_length (), _parent->film()->video_frame_rate ());
} else {
for (ContentList::const_iterator i = cl.begin (); i != cl.end(); ++i) {
check.insert ((*i)->length_after_trim ());
}
-
+
if (check.size() == 1) {
_play_length->set (cl.front()->length_after_trim (), _parent->film()->video_frame_rate ());
} else {
/* Here we check to see if we have exactly one different value of various
properties, and fill the controls with that value if so.
*/
-
+
if (property == ContentProperty::POSITION) {
set<DCPTime> check;
} else {
_position->clear ();
}
-
+
} else if (
property == ContentProperty::LENGTH ||
property == VideoContentProperty::VIDEO_FRAME_RATE ||
for (ContentList::const_iterator i = cl.begin (); i != cl.end(); ++i) {
check.insert ((*i)->trim_start ());
}
-
+
if (check.size() == 1) {
_trim_start->set (cl.front()->trim_start (), film_video_frame_rate);
} else {
_trim_start->clear ();
}
-
+
} else if (property == ContentProperty::TRIM_END) {
set<DCPTime> check;
for (ContentList::const_iterator i = cl.begin (); i != cl.end(); ++i) {
check.insert ((*i)->trim_end ());
}
-
+
if (check.size() == 1) {
_trim_end->set (cl.front()->trim_end (), film_video_frame_rate);
} else {
_trim_end->Enable (e);
_play_length->Enable (e);
_video_frame_rate->Enable (e);
-
+
film_content_changed (ContentProperty::POSITION);
film_content_changed (ContentProperty::LENGTH);
film_content_changed (ContentProperty::TRIM_START);
if (i->position() < ph && ph < i->end ()) {
i->set_trim_end (i->position() + i->full_length() - i->trim_start() - ph);
}
-
+
}
}
void film_changed (Film::Property);
void film_content_changed (int);
void content_selection_changed ();
-
+
private:
void position_changed ();
void full_length_changed ();
void update_play_length ();
FilmViewer* _viewer;
-
+
Timecode<DCPTime>* _position;
Timecode<DCPTime>* _full_length;
Timecode<DCPTime>* _trim_start;
wxHyperlinkCtrl* h = new wxHyperlinkCtrl (this, wxID_ANY, "dcpomatic.com/download", "http://dcpomatic.com/download");
table->Add (h);
}
-
+
if (test) {
add_label_to_sizer (table, this, _("Test version ") + std_to_wx (test.get ()), true);
wxHyperlinkCtrl* h = new wxHyperlinkCtrl (this, wxID_ANY, "dcpomatic.com/test-download", "http://dcpomatic.com/test-download");
table->Add (h);
}
-
+
overall_sizer->Add (table, 1, wxEXPAND | wxLEFT | wxRIGHT | wxTOP, DCPOMATIC_DIALOG_BORDER);
wxSizer* buttons = CreateButtonSizer (wxOK);
if (buttons) {
overall_sizer->Add (buttons, 1, wxEXPAND | wxALL);
}
-
+
SetSizerAndFit (overall_sizer);
}
_right_crop->add (crop, wxGBPosition (cr, 3));
++cr;
-
+
add_label_to_grid_bag_sizer (crop, this, _("Top"), true, wxGBPosition (cr, 0));
_top_crop = new ContentSpinCtrl<VideoContent> (
this,
_fade_out = new Timecode<ContentTime> (this);
grid->Add (_fade_out, wxGBPosition (r, 1), wxGBSpan (1, 3));
++r;
-
+
add_label_to_grid_bag_sizer (grid, this, _("Scale to"), true, wxGBPosition (r, 0));
_scale = new ContentChoice<VideoContent, VideoContentScale> (
this,
}
_colour_conversion->Append (_("Custom"));
s->Add (_colour_conversion, 1, wxEXPAND | wxALIGN_CENTER_VERTICAL | wxTOP | wxBOTTOM | wxRIGHT, 6);
-
+
_edit_colour_conversion_button = new wxButton (this, wxID_ANY, _("Edit..."));
s->Add (_edit_colour_conversion_button, 0, wxALIGN_CENTER_VERTICAL);
-
+
grid->Add (s, wxGBPosition (r, 1), wxDefaultSpan, wxALIGN_CENTER_VERTICAL);
}
++r;
_fade_in->Changed.connect (boost::bind (&VideoPanel::fade_in_changed, this));
_fade_out->Changed.connect (boost::bind (&VideoPanel::fade_out_changed, this));
-
+
_filters_button->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&VideoPanel::edit_filters_clicked, this));
_colour_conversion->Bind (wxEVT_COMMAND_CHOICE_SELECTED, boost::bind (&VideoPanel::colour_conversion_changed, this));
_edit_colour_conversion_button->Bind (wxEVT_COMMAND_BUTTON_CLICKED, boost::bind (&VideoPanel::edit_colour_conversion_clicked, this));
vcs = vc.front ();
fcs = dynamic_pointer_cast<FFmpegContent> (vcs);
}
-
+
if (property == VideoContentProperty::VIDEO_FRAME_TYPE) {
setup_description ();
} else if (property == VideoContentProperty::VIDEO_CROP) {
for (VideoContentList::const_iterator i = vc.begin (); i != vc.end(); ++i) {
check.insert ((*i)->fade_in ());
}
-
+
if (check.size() == 1) {
_fade_in->set (ContentTime::from_frames (vc.front()->fade_in (), vc.front()->video_frame_rate ()), vc.front()->video_frame_rate ());
} else {
for (VideoContentList::const_iterator i = vc.begin (); i != vc.end(); ++i) {
check.insert ((*i)->fade_out ());
}
-
+
if (check.size() == 1) {
_fade_out->set (ContentTime::from_frames (vc.front()->fade_out (), vc.front()->video_frame_rate ()), vc.front()->video_frame_rate ());
} else {
{
VideoContentList video_sel = _parent->selected_video ();
FFmpegContentList ffmpeg_sel = _parent->selected_ffmpeg ();
-
+
bool const single = video_sel.size() == 1;
_frame_type->set_content (video_sel);
flags |= wxALIGN_RIGHT;
t += wxT (":");
}
-#endif
+#endif
wxStaticText* m = new wxStaticText (p, wxID_ANY, t);
s->Add (m, prop, flags, 6);
return m;
flags |= wxALIGN_RIGHT;
t += wxT (":");
}
-#endif
+#endif
wxStaticText* m = new wxStaticText (p, wxID_ANY, t);
s->Add (m, pos, span, flags);
return m;
d->Destroy ();
return r == wxID_YES;
}
-
+
/** @param s wxWidgets string.
* @return Corresponding STL string.
if (widget->GetSelection() != -1) {
o = widget->GetClientObject (widget->GetSelection ());
}
-
+
if (!o || string_client_data(o) != value) {
for (unsigned int i = 0; i < widget->GetCount(); ++i) {
if (string_client_data (widget->GetClientObject (i)) == value) {
#ifdef DCPOMATIC_WINDOWS
locale->AddCatalogLookupPathPrefix (std_to_wx (mo_path().string()));
-#endif
+#endif
#ifdef DCPOMATIC_LINUX
locale->AddCatalogLookupPathPrefix (LINUX_LOCALE_PREFIX);
of wxWidgets.
*/
locale->AddCatalog (wxT ("dcpomatic2-wxstd"));
-#endif
-
+#endif
+
locale->AddCatalog (wxT ("libdcpomatic2-wx"));
locale->AddCatalog (wxT ("dcpomatic2"));
-
+
if (!locale->IsOk()) {
delete locale;
locale = new wxLocale (wxLANGUAGE_ENGLISH);
{
int const channels = 3;
int const points = 4096;
-
+
srand (1);
-
+
AudioAnalysis a (3);
for (int i = 0; i < channels; ++i) {
for (int j = 0; j < points; ++j) {
BOOST_CHECK_CLOSE (p[AudioPoint::RMS], random_float (), 1);
}
}
-
+
BOOST_CHECK (b.peak ());
BOOST_CHECK_CLOSE (b.peak().get(), peak, 1);
BOOST_CHECK (b.peak_time ());
c->set_audio_delay (-250);
film->examine_and_add_content (c);
wait_for_jobs ();
-
+
shared_ptr<AnalyseAudioJob> job (new AnalyseAudioJob (film));
job->Finished.connect (boost::bind (&finished));
JobManager::instance()->add (job);
shared_ptr<AudioContent> c (new FFmpegContent (film, private_data / "3d_thx_broadway_2010_lossless.m2ts"));
film->examine_and_add_content (c);
wait_for_jobs ();
-
+
shared_ptr<AnalyseAudioJob> job (new AnalyseAudioJob (film));
job->Finished.connect (boost::bind (&finished));
JobManager::instance()->add (job);
random_fill (buffers);
buffers.make_silent ();
-
+
for (int i = 0; i < 9933; ++i) {
for (int c = 0; c < 9; ++c) {
BOOST_CHECK_EQUAL (buffers.data(c)[i], 0);
for (int i = 0; i < from * 7; ++i) {
random_float ();
}
-
+
random_check (buffers, to, frames);
}
AudioBuffers a (3, 256);
srand (38);
random_fill (a);
-
+
AudioBuffers b (3, 256);
random_fill (b);
AudioBuffers a (3, 256);
srand (38);
random_fill (a);
-
+
AudioBuffers b (3, 256);
random_fill (b);
DCPTime full_length () const {
return DCPTime::from_seconds (float (audio_length()) / audio_stream()->frame_rate ());
}
-
+
Frame audio_length () const {
return rint (61.2942 * audio_stream()->frame_rate ());
}
content.reset (new TestAudioContent (film));
decoder.reset (new TestAudioDecoder (content));
-
+
/* Simple reads */
check (0, 48000);
check (44, 9123);
Frame const from = content->resampled_audio_frame_rate() * 61;
Frame const length = content->resampled_audio_frame_rate() * 4;
ContentAudio ca = get (from, length);
-
+
for (int i = 0; i < content->audio_stream()->channels(); ++i) {
for (int j = 0; j < ca.audio->frames(); ++j) {
BOOST_REQUIRE_EQUAL (ca.audio->data(i)[j], j + from);
void test_audio_delay (int delay_in_ms)
{
BOOST_TEST_MESSAGE ("Testing delay of " << delay_in_ms);
-
+
string const film_name = "audio_delay_test_" + lexical_cast<string> (delay_in_ms);
shared_ptr<Film> film = new_test_film (film_name);
film->set_dcp_content_type (DCPContentType::from_isdcf_name ("FTR"));
while (n < sound_asset->asset()->intrinsic_duration()) {
shared_ptr<const dcp::SoundFrame> sound_frame = sound_asset->asset()->get_frame (frame++);
uint8_t const * d = sound_frame->data ();
-
+
for (int i = 0; i < sound_frame->size(); i += (3 * sound_asset->asset()->channels())) {
/* Mono input so it will appear on centre */
}
shared_ptr<AudioBuffers> out = f.run (in);
-
+
for (int j = 0; j < out->frames(); ++j) {
BOOST_CHECK_EQUAL (out->data()[0][j], c + j);
}
shared_ptr<AudioBuffers> in (new AudioBuffers (1, 1751));
in->make_silent ();
in->data(0)[0] = 1;
-
+
shared_ptr<AudioBuffers> out = lpf.run (in);
for (int j = 0; j < out->frames(); ++j) {
if (j <= lpf._M) {
in.reset (new AudioBuffers (1, 9133));
in->make_silent ();
in->data(0)[0] = 1;
-
+
out = hpf.run (in);
for (int j = 0; j < out->frames(); ++j) {
if (j <= hpf._M) {
{
Data remotely_encoded;
BOOST_CHECK_NO_THROW (remotely_encoded = frame->encode_remotely (description));
-
+
BOOST_CHECK_EQUAL (locally_encoded.size(), remotely_encoded.size());
BOOST_CHECK_EQUAL (memcmp (locally_encoded.data().get(), remotely_encoded.data().get(), locally_encoded.size()), 0);
}
{
shared_ptr<Image> image (new Image (PIX_FMT_RGB24, dcp::Size (1998, 1080), true));
uint8_t* p = image->data()[0];
-
+
for (int y = 0; y < 1080; ++y) {
uint8_t* q = p;
for (int x = 0; x < 1998; ++x) {
);
Data locally_encoded = frame->encode_locally (boost::bind (&Log::dcp_log, log.get(), _1, _2));
-
+
Server* server = new Server (log, true);
new thread (boost::bind (&Server::run, server, 2));
);
Data locally_encoded = frame->encode_locally (boost::bind (&Log::dcp_log, log.get(), _1, _2));
-
+
Server* server = new Server (log, true);
new thread (boost::bind (&Server::run, server, 2));
film->examine_and_add_content (c);
wait_for_jobs ();
-
+
c->set_scale (VideoContentScale (Ratio::from_id ("185")));
film->set_container (Ratio::from_id ("185"));
while (n < sound_asset->asset()->intrinsic_duration()) {
shared_ptr<const dcp::SoundFrame> sound_frame = sound_asset->asset()->get_frame (frame++);
uint8_t const * d = sound_frame->data ();
-
+
for (int i = 0; i < sound_frame->size(); i += (3 * sound_asset->asset()->channels())) {
if (sound_asset->asset()->channels() > 0) {
int const sample = d[i + 2] | (d[i + 3] << 8);
BOOST_CHECK_EQUAL (sample, 0);
}
-
+
if (sound_asset->asset()->channels() > 2) {
/* Mono input so it will appear on centre */
int const sample = d[i + 7] | (d[i + 8] << 8);
film->examine_and_add_content (c);
wait_for_jobs ();
-
+
c->set_scale (VideoContentScale (Ratio::from_id ("185")));
-
+
film->set_container (Ratio::from_id ("185"));
film->set_dcp_content_type (DCPContentType::from_pretty_name ("Test"));
film->make_dcp ();
if (i != boost::filesystem::directory_iterator ()) {
boost::filesystem::remove (i->path ());
}
-
+
BOOST_CHECK (film->cpls().empty());
}
}
shared_ptr<Film> film = new_test_film ("ffmpeg_decoder_seek_test_" + file.string());
- shared_ptr<FFmpegContent> content (new FFmpegContent (film, path));
+ shared_ptr<FFmpegContent> content (new FFmpegContent (film, path));
film->examine_and_add_content (content);
wait_for_jobs ();
shared_ptr<Log> log (new NullLog);
BOOST_AUTO_TEST_CASE (ffmpeg_decoder_seek_test)
{
vector<int> frames;
-
+
frames.clear ();
frames.push_back (0);
frames.push_back (42);
test ("boon_telly.mkv", frames);
test ("Sintel_Trailer1.480p.DivX_Plus_HD.mkv", frames);
-
+
frames.clear ();
frames.push_back (15);
frames.push_back (42);
frames.push_back (999);
frames.push_back (15);
-
+
test ("prophet_clip.mkv", frames);
}
}
shared_ptr<Film> film = new_test_film ("ffmpeg_decoder_seek_test_" + file.string());
- shared_ptr<FFmpegContent> content (new FFmpegContent (film, path));
+ shared_ptr<FFmpegContent> content (new FFmpegContent (film, path));
film->examine_and_add_content (content);
wait_for_jobs ();
shared_ptr<Log> log (new NullLog);
shared_ptr<FFmpegDecoder> decoder (new FFmpegDecoder (content, log));
BOOST_CHECK_CLOSE (decoder->video_content()->video_frame_rate(), fps, 0.01);
-
+
Frame const N = decoder->video_content()->video_length();
-#ifdef DCPOMATIC_DEBUG
+#ifdef DCPOMATIC_DEBUG
decoder->test_gaps = 0;
-#endif
+#endif
for (Frame i = 0; i < N; ++i) {
list<ContentVideo> v;
v = decoder->get_video (i, true);
BOOST_CHECK_EQUAL (v.size(), 1U);
BOOST_CHECK_EQUAL (v.front().frame, i);
}
-#ifdef DCPOMATIC_DEBUG
+#ifdef DCPOMATIC_DEBUG
BOOST_CHECK_EQUAL (decoder->test_gaps, gaps);
#endif
}
for (int i = 0; i < 65536; ++i) {
data[i] = rand() & 0xff;
}
-
+
int const num_files = 4;
int length[] = {
BOOST_CHECK_EQUAL (g->name(), "fred");
BOOST_CHECK_EQUAL (g->dcp_content_type(), DCPContentType::from_pretty_name ("Short"));
BOOST_CHECK_EQUAL (g->container(), Ratio::from_id ("185"));
-
+
g->write_metadata ();
check_xml ("test/data/metadata.xml.ref", dir.string() + "/metadata.xml", ignore);
}
shared_ptr<FFmpegContent> content (new FFmpegContent (film, "test/data/test.mp4"));
film->add_content (content);
wait_for_jobs ();
-
+
/* Run some tests with a limited range of allowed rates */
-
+
std::list<int> afr;
afr.push_back (24);
afr.push_back (25);
BOOST_CHECK_EQUAL (frc.repeat, 1);
BOOST_CHECK_EQUAL (frc.change_speed, false);
BOOST_CHECK_CLOSE (frc.speed_up, 1, 0.1);
-
+
content->_video_frame_rate = 50;
best = film->best_video_frame_rate ();
frc = FrameRateChange (50, best);
BOOST_CHECK_EQUAL (frc.repeat, 1);
BOOST_CHECK_EQUAL (frc.change_speed, true);
BOOST_CHECK_CLOSE (frc.speed_up, 30 / 29.97, 0.1);
-
+
content->_video_frame_rate = 25;
best = film->best_video_frame_rate ();
frc = FrameRateChange (25, best);
BOOST_CHECK_EQUAL (frc.repeat, 1);
BOOST_CHECK_EQUAL (frc.change_speed, false);
BOOST_CHECK_CLOSE (frc.speed_up, 1, 0.1);
-
+
content->_video_frame_rate = 50;
best = film->best_video_frame_rate ();
frc = FrameRateChange (50, best);
BOOST_CHECK_CLOSE (frc.speed_up, 1, 0.1);
/* Check some out-there conversions (not the best) */
-
+
frc = FrameRateChange (14.99, 24);
BOOST_CHECK_EQUAL (frc.skip, false);
BOOST_CHECK_EQUAL (frc.repeat, 2);
wait_for_jobs ();
/* Run some tests with a limited range of allowed rates */
-
+
std::list<int> afr;
afr.push_back (24);
afr.push_back (25);
shared_ptr<FFmpegContent> content (new FFmpegContent (film, "test/data/test.mp4"));
film->examine_and_add_content (content);
wait_for_jobs ();
-
+
std::list<int> afr;
afr.push_back (24);
afr.push_back (25);
BOOST_CHECK_EQUAL (content->resampled_audio_frame_rate(), 50000);
/* Check some out-there conversions (not the best) */
-
+
content->_video_frame_rate = 14.99;
film->set_video_frame_rate (25);
stream->_frame_rate = 16000;
}
}
}
-
+
/* Convert using separate methods */
boost::shared_ptr<Image> sep = test->crop (crop, true);
sep = sep->scale (inter_size, dcp::YUV_TO_RGB_REC601, PIX_FMT_RGB24, true);
BOOST_AUTO_TEST_CASE (alpha_blend_test)
{
int const stride = 48 * 4;
-
+
shared_ptr<Image> A (new Image (AV_PIX_FMT_RGBA, dcp::Size (48, 48), false));
A->make_black ();
uint8_t* a = A->data()[0];
BOOST_AUTO_TEST_CASE (merge_test1)
{
int const stride = 48 * 4;
-
+
shared_ptr<Image> A (new Image (AV_PIX_FMT_RGBA, dcp::Size (48, 48), false));
A->make_transparent ();
uint8_t* a = A->data()[0];
film->set_dcp_content_type (DCPContentType::from_isdcf_name ("XSN"));
BOOST_CHECK_EQUAL (film->isdcf_name(false), "MyNiceFilmWith_XSN-2_F-133_DE-FR_US-R_4K_DI_20140704_PP_SMPTE_VF");
-
+
/* Test 3D */
film->set_three_d (true);
BOOST_CHECK_EQUAL (film->isdcf_name(false), "LikeShouting_XSN-2_F-133_DE-FR_US-R_4K_DI_20140704_PP_SMPTE_VF");
/* Test audio channel markup */
-
+
film->set_audio_channels (6);
shared_ptr<SndfileContent> sound (new SndfileContent (film, "test/data/sine_440.wav"));
film->examine_and_add_content (sound);
pix_fmts.push_back (AV_PIX_FMT_YUVA422P16LE);
pix_fmts.push_back (AV_PIX_FMT_YUVA444P16LE);
pix_fmts.push_back (AV_PIX_FMT_RGB555LE); // 46
-
+
int N = 0;
for (list<AVPixelFormat>::const_iterator i = pix_fmts.begin(); i != pix_fmts.end(); ++i) {
boost::shared_ptr<Image> foo (new Image (*i, in_size, true));
foo->make_black ();
boost::shared_ptr<Image> bar = foo->scale (out_size, dcp::YUV_TO_RGB_REC601, PIX_FMT_RGB24, true);
-
+
uint8_t* p = bar->data()[0];
for (int y = 0; y < bar->size().height; ++y) {
uint8_t* q = p;
bpp[1] = b1;
bpp[2] = b2;
}
-
+
AVPixelFormat format;
int components;
int lines[3];
if (_queue.empty ()) {
return optional<Video> ();
}
-
+
Video v = _queue.back ();
_queue.pop_back ();
return v;
wait_for_jobs ();
BOOST_CHECK_EQUAL (B->video_length_after_3d_combine(), 16);
-
+
/* Film should have been set to 25fps */
BOOST_CHECK_EQUAL (film->video_frame_rate(), 25);
shared_ptr<FFmpegContent> c (new FFmpegContent (film, "test/data/test.mp4"));
film->set_container (Ratio::from_id ("185"));
film->set_audio_channels (6);
-
+
film->examine_and_add_content (c);
wait_for_jobs ();
video,
"build/test/recover_test/original.mxf"
);
-
+
boost::filesystem::resize_file (video, 2 * 1024 * 1024);
film->make_dcp ();
wait_for_jobs ();
c->set_scale (VideoContentScale (Ratio::from_id ("185")));
-
+
film->set_video_frame_rate (48);
film->make_dcp ();
wait_for_jobs ();
/* 3 hours */
int64_t const N = int64_t (from) * 60 * 60 * 3;
-
+
/* XXX: no longer checks anything */
for (int64_t i = 0; i < N; i += 1000) {
shared_ptr<AudioBuffers> a (new AudioBuffers (1, 1000));
a->make_silent ();
shared_ptr<const AudioBuffers> r = resamp.run (a);
}
-}
-
+}
+
BOOST_AUTO_TEST_CASE (resampler_test)
{
resampler_test_one (44100, 48000);
film->examine_and_add_content (imc);
wait_for_jobs ();
-
+
imc->set_video_length (1);
/* F-133: 133 image in a flat container */
film->examine_and_add_content (content);
wait_for_jobs ();
content->set_scale (VideoContentScale (Ratio::from_id ("185")));
-
+
/* Work out the first video frame index that we will be given, taking into account
* the difference between first video and first audio.
*/
while (n < sound_asset->asset()->intrinsic_duration()) {
shared_ptr<const dcp::SoundFrame> sound_frame = sound_asset->asset()->get_frame (frame++);
uint8_t const * d = sound_frame->data ();
-
+
for (int i = 0; i < sound_frame->size(); i += (3 * sound_asset->asset()->channels())) {
if (sound_asset->asset()->channels() > 0) {
int const sample = d[i + 2] | (d[i + 3] << 8);
BOOST_CHECK_EQUAL (sample, 0);
}
-
+
if (sound_asset->asset()->channels() > 2) {
/* Mono input so it will appear on centre */
int const sample = d[i + 7] | (d[i + 8] << 8);
++n;
}
}
-
+
}
BOOST_AUTO_TEST_CASE (silence_padding_test)
content->set_use_subtitles (true);
/* Use test/data/subrip2.srt as if it were a font file */
content->fonts().front()->set_file ("test/data/subrip2.srt");
-
+
film->make_dcp ();
wait_for_jobs ();
root->add_child("Channels")->add_child_text ("2");
/* This is the state file version 5 description of the mapping */
-
+
xmlpp::Element* mapping = root->add_child("Mapping");
mapping->add_child("ContentChannels")->add_child_text ("2");
{
map->add_child("ContentIndex")->add_child_text ("1");
map->add_child("DCP")->add_child_text ("2");
}
-
+
FFmpegAudioStream a (cxml::NodePtr (new cxml::Node (root)), 5);
BOOST_CHECK_EQUAL (a.identifier(), "4");
if (boost::filesystem::exists (p)) {
boost::filesystem::remove_all (p);
}
-
+
shared_ptr<Film> film = shared_ptr<Film> (new Film (p.string()));
film->write_metadata ();
return film;
ref_info.format = 0;
SNDFILE* ref_file = sf_open (ref.string().c_str(), SFM_READ, &ref_info);
BOOST_CHECK (ref_file);
-
+
SF_INFO check_info;
check_info.format = 0;
SNDFILE* check_file = sf_open (check.string().c_str(), SFM_READ, &check_info);
sf_count_t const buffer_size = 65536 * ref_info.channels;
scoped_array<int32_t> ref_buffer (new int32_t[buffer_size]);
scoped_array<int32_t> check_buffer (new int32_t[buffer_size]);
-
+
sf_count_t N = ref_info.frames;
while (N) {
sf_count_t this_time = min (buffer_size, N);
BOOST_CHECK (ref_file);
FILE* check_file = fopen_boost (check, "rb");
BOOST_CHECK (check_file);
-
+
int const buffer_size = 65536;
uint8_t* ref_buffer = new uint8_t[buffer_size];
uint8_t* check_buffer = new uint8_t[buffer_size];
SafeStringStream error;
error << "File " << check.string() << " differs from reference " << ref.string();
-
+
while (N) {
uintmax_t this_time = min (uintmax_t (buffer_size), N);
size_t r = fread (ref_buffer, 1, this_time, ref_file);
if (memcmp (ref_buffer, check_buffer, this_time)) {
break;
}
-
+
N -= this_time;
}
options.reel_annotation_texts_can_differ = true;
options.reel_hashes_can_differ = true;
options.issue_dates_can_differ = true;
-
+
BOOST_CHECK (ref_dcp.equals (check_dcp, options, boost::bind (note, _1, _2)));
}
}
cout << "Waiting for jobs: all finished; errors=" << jm->errors() << ".\n";
-
+
if (jm->errors ()) {
int N = 0;
for (list<shared_ptr<Job> >::iterator i = jm->_jobs.begin(); i != jm->_jobs.end(); ++i) {
using namespace MagickCore;
#else
using namespace MagickLib;
-#endif
+#endif
Magick::Image m (image->size().width, image->size().height, "ARGB", CharPixel, (void *) image->data()[0]);
m.write (file.string ());
wait_for_jobs ();
c->set_scale (VideoContentScale (Ratio::from_id ("185")));
-
+
film->set_container (Ratio::from_id ("185"));
film->set_dcp_content_type (DCPContentType::from_pretty_name ("Test"));
film->set_three_d (true);
p.push_back ("test/data/md5.test2");
p.push_back ("test/data/md5.test4");
BOOST_CHECK_EQUAL (md5_digest_head_tail (p, 1024), "52ccf111e4e72b58bb7b2aaa6bd45ea5");
-
+
p.clear ();
p.push_back ("foobar");
BOOST_CHECK_THROW (md5_digest_head_tail (p, 1024), OpenFileError);
BOOST_CHECK_EQUAL (DCPTime (1).round_up (DCPTime::HZ / 2), DCPTime (2));
BOOST_CHECK_EQUAL (DCPTime (2).round_up (DCPTime::HZ / 2), DCPTime (2));
BOOST_CHECK_EQUAL (DCPTime (3).round_up (DCPTime::HZ / 2), DCPTime (4));
-
+
BOOST_CHECK_EQUAL (DCPTime (0).round_up (DCPTime::HZ / 42), DCPTime (0));
BOOST_CHECK_EQUAL (DCPTime (1).round_up (DCPTime::HZ / 42), DCPTime (42));
BOOST_CHECK_EQUAL (DCPTime (42).round_up (DCPTime::HZ / 42), DCPTime (42));
}
BOOST_CHECK (answer == correct);
}
-
+
/* Test scale and stretch to specified ratio */
BOOST_AUTO_TEST_CASE (video_content_scale_test_to_ratio)
{
true,
dcp::Size (1998, 837)
);
-
+
// Flat in scope container
test (
dcp::Size (400, 200),
dcp::Size (1587, 858)
);
-
+
/* To player */
// Flat in flat container
true,
dcp::Size (185, 78)
);
-
+
// Flat in scope container
test (
dcp::Size (400, 200),
decoder.fill_2d (0, 4);
BOOST_CHECK_EQUAL (decoder._decoded_video.size(), 4U);
- list<ContentVideo>::iterator i = decoder._decoded_video.begin();
+ list<ContentVideo>::iterator i = decoder._decoded_video.begin();
for (int j = 0; j < 4; ++j) {
BOOST_CHECK_EQUAL (i->frame, j);
++i;
decoder.fill_2d (0, 7);
BOOST_CHECK_EQUAL (decoder._decoded_video.size(), 7);
- i = decoder._decoded_video.begin();
+ i = decoder._decoded_video.begin();
for (int j = 0; j < 7; ++j) {
BOOST_CHECK_EQUAL (i->frame, j);
++i;
decoder.fill_3d (0, 4, EYES_LEFT);
BOOST_CHECK_EQUAL (decoder._decoded_video.size(), 8);
- list<ContentVideo>::iterator i = decoder._decoded_video.begin();
+ list<ContentVideo>::iterator i = decoder._decoded_video.begin();
for (int j = 0; j < 8; ++j) {
BOOST_CHECK_EQUAL (i->frame, j / 2);
BOOST_CHECK_EQUAL (i->eyes, (j % 2) == 0 ? EYES_LEFT : EYES_RIGHT);
decoder.fill_3d (0, 7, EYES_RIGHT);
BOOST_CHECK_EQUAL (decoder._decoded_video.size(), 15);
- i = decoder._decoded_video.begin();
+ i = decoder._decoded_video.begin();
for (int j = 0; j < 15; ++j) {
BOOST_CHECK_EQUAL (i->frame, j / 2);
BOOST_CHECK_EQUAL (i->eyes, (j % 2) == 0 ? EYES_LEFT : EYES_RIGHT);