+ OSErr err;
+
+ if (requires_fixed_size_buffers() && (nframes != _last_nframes)) {
+ unit->GlobalReset();
+ _last_nframes = nframes;
+ }
+
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 in %2 out %3 MIDI %4 bufs %5 (available %6)\n",
+ name(), input_channels, output_channels, _has_midi_input,
+ bufs.count(), bufs.available()));
+
+ /* the apparent number of buffers matches our input configuration, but we know that the bufferset
+ has the capacity to handle our outputs.
+ */
+
+ assert (bufs.available() >= ChanCount (DataType::AUDIO, output_channels));
+
+ input_buffers = &bufs;
+ input_maxbuf = bufs.count().n_audio(); // number of input audio buffers
+ input_offset = offset;
+ cb_offset = 0;
+
+ buffers->mNumberBuffers = output_channels;
+
+ for (int32_t i = 0; i < output_channels; ++i) {
+ buffers->mBuffers[i].mNumberChannels = 1;
+ buffers->mBuffers[i].mDataByteSize = nframes * sizeof (Sample);
+ /* setting this to 0 indicates to the AU that it can provide buffers here
+ if necessary. if it can process in-place, it will use the buffers provided
+ as input by ::render_callback() above.
+
+ a non-null values tells the plugin to render into the buffer pointed
+ at by the value.
+ */
+ buffers->mBuffers[i].mData = 0;
+ }
+
+ if (_has_midi_input) {
+
+ uint32_t nmidi = bufs.count().n_midi();
+
+ for (uint32_t i = 0; i < nmidi; ++i) {
+
+ /* one MIDI port/buffer only */
+
+ MidiBuffer& m = bufs.get_midi (i);
+
+ for (MidiBuffer::iterator i = m.begin(); i != m.end(); ++i) {
+ Evoral::MIDIEvent<framepos_t> ev (*i);
+
+ if (ev.is_channel_event()) {
+ const uint8_t* b = ev.buffer();
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1: MIDI event %2\n", name(), ev));
+ unit->MIDIEvent (b[0], b[1], b[2], ev.time());
+ }
+
+ /* XXX need to handle sysex and other message types */
+ }
+ }
+ }
+
+ /* does this really mean anything ?
+ */
+
+ ts.mSampleTime = frames_processed;
+ ts.mFlags = kAudioTimeStampSampleTimeValid;
+
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 render flags=%2 time=%3 nframes=%4 buffers=%5\n",
+ name(), flags, frames_processed, nframes, buffers->mNumberBuffers));
+
+ if ((err = unit->Render (&flags, &ts, 0, nframes, buffers)) == noErr) {
+
+ input_maxbuf = 0;
+ frames_processed += nframes;
+
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 rendered %2 buffers of %3\n",
+ name(), buffers->mNumberBuffers, output_channels));
+
+ int32_t limit = min ((int32_t) buffers->mNumberBuffers, output_channels);
+ int32_t i;
+
+ for (i = 0; i < limit; ++i) {
+ Sample* expected_buffer_address= bufs.get_audio (i).data (offset);
+ if (expected_buffer_address != buffers->mBuffers[i].mData) {
+ /* plugin provided its own buffer for output so copy it back to where we want it
+ */
+ memcpy (expected_buffer_address, buffers->mBuffers[i].mData, nframes * sizeof (Sample));
+ }
+ }
+
+ /* now silence any buffers that were passed in but the that the plugin
+ did not fill/touch/use.
+ */
+
+ for (;i < output_channels; ++i) {
+ memset (bufs.get_audio (i).data (offset), 0, nframes * sizeof (Sample));
+ }
+
+ return 0;
+ }
+
+ error << string_compose (_("AU: render error for %1, status = %2"), name(), err) << endmsg;
+ return -1;
+}
+
+OSStatus
+AUPlugin::get_beat_and_tempo_callback (Float64* outCurrentBeat,
+ Float64* outCurrentTempo)
+{
+ TempoMap& tmap (_session.tempo_map());
+
+ DEBUG_TRACE (DEBUG::AudioUnits, "AU calls ardour beat&tempo callback\n");
+
+ /* more than 1 meter or more than 1 tempo means that a simplistic computation
+ (and interpretation) of a beat position will be incorrect. So refuse to
+ offer the value.
+ */
+
+ if (tmap.n_tempos() > 1 || tmap.n_meters() > 1) {
+ return kAudioUnitErr_CannotDoInCurrentContext;
+ }
+
+ Timecode::BBT_Time bbt;
+ TempoMetric metric = tmap.metric_at (_session.transport_frame() + input_offset);
+ tmap.bbt_time (_session.transport_frame() + input_offset, bbt);
+
+ if (outCurrentBeat) {
+ float beat;
+ beat = metric.meter().divisions_per_bar() * bbt.bars;
+ beat += bbt.beats;
+ beat += bbt.ticks / Timecode::BBT_Time::ticks_per_beat;
+ *outCurrentBeat = beat;
+ }
+
+ if (outCurrentTempo) {
+ *outCurrentTempo = floor (metric.tempo().beats_per_minute());
+ }
+
+ return noErr;
+
+}
+
+OSStatus
+AUPlugin::get_musical_time_location_callback (UInt32* outDeltaSampleOffsetToNextBeat,
+ Float32* outTimeSig_Numerator,
+ UInt32* outTimeSig_Denominator,
+ Float64* outCurrentMeasureDownBeat)
+{
+ TempoMap& tmap (_session.tempo_map());
+
+ DEBUG_TRACE (DEBUG::AudioUnits, "AU calls ardour music time location callback\n");
+
+ /* more than 1 meter or more than 1 tempo means that a simplistic computation
+ (and interpretation) of a beat position will be incorrect. So refuse to
+ offer the value.
+ */
+
+ if (tmap.n_tempos() > 1 || tmap.n_meters() > 1) {
+ return kAudioUnitErr_CannotDoInCurrentContext;
+ }
+
+ Timecode::BBT_Time bbt;
+ TempoMetric metric = tmap.metric_at (_session.transport_frame() + input_offset);
+ tmap.bbt_time (_session.transport_frame() + input_offset, bbt);
+
+ if (outDeltaSampleOffsetToNextBeat) {
+ if (bbt.ticks == 0) {
+ /* on the beat */
+ *outDeltaSampleOffsetToNextBeat = 0;
+ } else {
+ *outDeltaSampleOffsetToNextBeat = (UInt32)
+ floor (((Timecode::BBT_Time::ticks_per_beat - bbt.ticks)/Timecode::BBT_Time::ticks_per_beat) * // fraction of a beat to next beat
+ metric.tempo().frames_per_beat (_session.frame_rate())); // frames per beat
+ }
+ }
+
+ if (outTimeSig_Numerator) {
+ *outTimeSig_Numerator = (UInt32) lrintf (metric.meter().divisions_per_bar());
+ }
+ if (outTimeSig_Denominator) {
+ *outTimeSig_Denominator = (UInt32) lrintf (metric.meter().note_divisor());
+ }
+
+ if (outCurrentMeasureDownBeat) {
+
+ /* beat for the start of the bar.
+ 1|1|0 -> 1
+ 2|1|0 -> 1 + divisions_per_bar
+ 3|1|0 -> 1 + (2 * divisions_per_bar)
+ etc.
+ */
+
+ *outCurrentMeasureDownBeat = 1 + metric.meter().divisions_per_bar() * (bbt.bars - 1);
+ }
+
+ return noErr;
+}
+
+OSStatus
+AUPlugin::get_transport_state_callback (Boolean* outIsPlaying,
+ Boolean* outTransportStateChanged,
+ Float64* outCurrentSampleInTimeLine,
+ Boolean* outIsCycling,
+ Float64* outCycleStartBeat,
+ Float64* outCycleEndBeat)
+{
+ bool rolling;
+ float speed;
+
+ DEBUG_TRACE (DEBUG::AudioUnits, "AU calls ardour transport state callback\n");
+
+ rolling = _session.transport_rolling();
+ speed = _session.transport_speed ();
+
+ if (outIsPlaying) {
+ *outIsPlaying = _session.transport_rolling();
+ }
+
+ if (outTransportStateChanged) {
+ if (rolling != last_transport_rolling) {
+ *outTransportStateChanged = true;
+ } else if (speed != last_transport_speed) {
+ *outTransportStateChanged = true;
+ } else {
+ *outTransportStateChanged = false;
+ }
+ }
+
+ if (outCurrentSampleInTimeLine) {
+ /* this assumes that the AU can only call this host callback from render context,
+ where input_offset is valid.
+ */
+ *outCurrentSampleInTimeLine = _session.transport_frame() + input_offset;
+ }
+
+ if (outIsCycling) {
+ Location* loc = _session.locations()->auto_loop_location();
+
+ *outIsCycling = (loc && _session.transport_rolling() && _session.get_play_loop());
+
+ if (*outIsCycling) {
+
+ if (outCycleStartBeat || outCycleEndBeat) {
+
+ TempoMap& tmap (_session.tempo_map());