+ maybe_set (_delay, d, AudioContentProperty::DELAY);
+}
+
+string
+AudioContent::technical_summary () const
+{
+ string s = "audio: ";
+ BOOST_FOREACH (AudioStreamPtr i, streams ()) {
+ s += String::compose ("stream channels %1 rate %2 ", i->channels(), i->frame_rate());
+ }
+
+ return s;
+}
+
+void
+AudioContent::set_mapping (AudioMapping mapping)
+{
+ ChangeSignaller<Content> cc (_parent, AudioContentProperty::STREAMS);
+
+ int c = 0;
+ BOOST_FOREACH (AudioStreamPtr i, streams ()) {
+ AudioMapping stream_mapping (i->channels (), MAX_DCP_AUDIO_CHANNELS);
+ for (int j = 0; j < i->channels(); ++j) {
+ for (int k = 0; k < MAX_DCP_AUDIO_CHANNELS; ++k) {
+ stream_mapping.set (j, k, mapping.get (c, k));
+ }
+ ++c;
+ }
+ i->set_mapping (stream_mapping);
+ }
+}
+
+AudioMapping
+AudioContent::mapping () const
+{
+ int channels = 0;
+ BOOST_FOREACH (AudioStreamPtr i, streams ()) {
+ channels += i->channels ();
+ }
+
+ AudioMapping merged (channels, MAX_DCP_AUDIO_CHANNELS);
+ merged.make_zero ();
+
+ int c = 0;
+ int s = 0;
+ BOOST_FOREACH (AudioStreamPtr i, streams ()) {
+ AudioMapping mapping = i->mapping ();
+ for (int j = 0; j < mapping.input_channels(); ++j) {
+ for (int k = 0; k < MAX_DCP_AUDIO_CHANNELS; ++k) {
+ if (k < mapping.output_channels()) {
+ merged.set (c, k, mapping.get (j, k));
+ }
+ }
+ ++c;
+ }
+ ++s;
+ }
+
+ return merged;
+}
+
+/** @return the frame rate that this content should be resampled to in order
+ * that it is in sync with the active video content at its start time.
+ */
+int
+AudioContent::resampled_frame_rate (shared_ptr<const Film> film) const
+{
+ double t = film->audio_frame_rate ();
+
+ FrameRateChange frc (film, _parent);
+
+ /* Compensate if the DCP is being run at a different frame rate
+ to the source; that is, if the video is run such that it will
+ look different in the DCP compared to the source (slower or faster).
+ */
+
+ if (frc.change_speed) {
+ t /= frc.speed_up;
+ }
+
+ return lrint (t);
+}
+
+string
+AudioContent::processing_description (shared_ptr<const Film> film) const
+{
+ if (streams().empty ()) {
+ return "";
+ }
+
+ /* Possible answers are:
+ 1. all audio will be resampled from x to y.
+ 2. all audio will be resampled to y (from a variety of rates)
+ 3. some audio will be resampled to y (from a variety of rates)
+ 4. nothing will be resampled.
+ */
+
+ bool not_resampled = false;
+ bool resampled = false;
+ bool same = true;
+
+ optional<int> common_frame_rate;
+ BOOST_FOREACH (AudioStreamPtr i, streams()) {
+ if (i->frame_rate() != resampled_frame_rate(film)) {
+ resampled = true;
+ } else {
+ not_resampled = true;
+ }
+
+ if (common_frame_rate && common_frame_rate != i->frame_rate ()) {
+ same = false;
+ }
+ common_frame_rate = i->frame_rate ();
+ }
+
+ if (not_resampled && !resampled) {
+ return _("Audio will not be resampled");
+ }
+
+ if (not_resampled && resampled) {
+ return String::compose (_("Some audio will be resampled to %1Hz"), resampled_frame_rate(film));
+ }
+
+ if (!not_resampled && resampled) {
+ if (same) {
+ return String::compose (_("Audio will be resampled from %1Hz to %2Hz"), common_frame_rate.get(), resampled_frame_rate(film));
+ } else {
+ return String::compose (_("Audio will be resampled to %1Hz"), resampled_frame_rate(film));
+ }
+ }
+
+ return "";
+}
+
+/** @return User-visible names of each of our audio channels */
+vector<string>
+AudioContent::channel_names () const
+{
+ vector<string> n;
+
+ int t = 1;
+ BOOST_FOREACH (AudioStreamPtr i, streams ()) {
+ for (int j = 0; j < i->channels(); ++j) {
+ n.push_back (String::compose ("%1:%2", t, j + 1));
+ }
+ ++t;
+ }
+
+ return n;
+}
+
+void
+AudioContent::add_properties (shared_ptr<const Film> film, list<UserProperty>& p) const
+{
+ shared_ptr<const AudioStream> stream;
+ if (streams().size() == 1) {
+ stream = streams().front ();
+ }
+
+ if (stream) {
+ p.push_back (UserProperty (UserProperty::AUDIO, _("Channels"), stream->channels ()));
+ p.push_back (UserProperty (UserProperty::AUDIO, _("Content audio sample rate"), stream->frame_rate(), _("Hz")));
+ }
+
+ FrameRateChange const frc (_parent->active_video_frame_rate(film), film->video_frame_rate());
+ ContentTime const c (_parent->full_length(film), frc);
+
+ p.push_back (
+ UserProperty (UserProperty::LENGTH, _("Full length in video frames at content rate"), c.frames_round(frc.source))
+ );
+
+ if (stream) {
+ p.push_back (
+ UserProperty (
+ UserProperty::LENGTH,
+ _("Full length in audio samples at content rate"),
+ c.frames_round (stream->frame_rate ())
+ )
+ );
+ }
+
+ p.push_back (UserProperty (UserProperty::AUDIO, _("DCP sample rate"), resampled_frame_rate(film), _("Hz")));
+ p.push_back (UserProperty (UserProperty::LENGTH, _("Full length in video frames at DCP rate"), c.frames_round (frc.dcp)));
+
+ if (stream) {
+ p.push_back (
+ UserProperty (
+ UserProperty::LENGTH,
+ _("Full length in audio samples at DCP rate"),
+ c.frames_round(resampled_frame_rate(film))
+ )
+ );
+ }
+}
+
+void
+AudioContent::set_streams (vector<AudioStreamPtr> streams)
+{
+ ChangeSignaller<Content> cc (_parent, AudioContentProperty::STREAMS);
+