#include "pbd/pathscanner.h"
#include "pbd/locale_guard.h"
-#include <glibmm/thread.h>
+#include <glibmm/threads.h>
#include <glibmm/fileutils.h>
#include <glibmm/miscutils.h>
+#include <glib/gstdio.h>
#include "ardour/ardour.h"
#include "ardour/audioengine.h"
#include "ardour/filesystem_paths.h"
#include "ardour/io.h"
#include "ardour/audio_unit.h"
+#include "ardour/route.h"
#include "ardour/session.h"
#include "ardour/tempo.h"
#include "ardour/utils.h"
#include <CoreServices/CoreServices.h>
#include <AudioUnit/AudioUnit.h>
#include <AudioToolbox/AudioUnitUtilities.h>
+#ifdef WITH_CARBON
+#include <Carbon/Carbon.h>
+#endif
#include "i18n.h"
using namespace PBD;
using namespace ARDOUR;
-#ifndef AU_STATE_SUPPORT
-static bool seen_get_state_message = false;
-static bool seen_set_state_message = false;
-static bool seen_loading_message = false;
-static bool seen_saving_message = false;
-#endif
-
AUPluginInfo::CachedInfoMap AUPluginInfo::cached_info;
static string preset_search_path = "/Library/Audio/Presets:/Network/Library/Audio/Presets";
, _current_block_size (0)
, _requires_fixed_size_buffers (false)
, buffers (0)
- , current_maxbuf (0)
- , current_offset (0)
- , current_buffers (0)
+ , input_maxbuf (0)
+ , input_offset (0)
+ , input_buffers (0)
, frames_processed (0)
+ , _parameter_listener (0)
+ , _parameter_listener_arg (0)
, last_transport_rolling (false)
, last_transport_speed (0.0)
{
, _last_nframes (0)
, _requires_fixed_size_buffers (false)
, buffers (0)
- , current_maxbuf (0)
- , current_offset (0)
- , current_buffers (0)
+ , input_maxbuf (0)
+ , input_offset (0)
+ , input_buffers (0)
, frames_processed (0)
+ , _parameter_listener (0)
+ , _parameter_listener_arg (0)
{
init ();
AUPlugin::~AUPlugin ()
{
+ if (_parameter_listener) {
+ AUListenerDispose (_parameter_listener);
+ _parameter_listener = 0;
+ }
+
if (unit) {
DEBUG_TRACE (DEBUG::AudioUnits, "about to call uninitialize in plugin destructor\n");
unit->Uninitialize ();
DEBUG_TRACE (DEBUG::AudioUnits, "set render callback in input scope\n");
if ((err = unit->SetProperty (kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input,
0, (void*) &renderCallbackInfo, sizeof(renderCallbackInfo))) != 0) {
- cerr << "cannot install render callback (err = " << err << ')' << endl;
+ error << string_compose (_("cannot install render callback (err = %1)"), err) << endmsg;
throw failed_constructor();
}
}
throw failed_constructor();
}
+ create_parameter_listener (AUPlugin::_parameter_change_listener, this, 0.05);
discover_parameters ();
discover_factory_presets ();
d.max_unbound = 0; // upper is bound
descriptors.push_back (d);
+
+ uint32_t last_param = descriptors.size() - 1;
+ parameter_map.insert (pair<uint32_t,uint32_t> (d.id, last_param));
+ listen_to_parameter (last_param);
}
}
}
/* too close to the end for \xNN parsing: treat as literal characters */
- cerr << "Parse " << cstr << " as a literal \\" << endl;
nascent[in] = *cstr;
++cstr;
++in;
} else {
/* treat as literal characters */
- cerr << "Parse " << cstr << " as a literal \\" << endl;
nascent[in] = *cstr;
++cstr;
++in;
return;
}
+ if (get_parameter(which) == val) {
+ return;
+ }
+
const AUParameterDescriptor& d (descriptors[which]);
DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("set parameter %1 in scope %2 element %3 to %4\n", d.id, d.scope, d.element, val));
unit->SetParameter (d.id, d.scope, d.element, val);
float val = 0.0;
if (which < descriptors.size()) {
const AUParameterDescriptor& d (descriptors[which]);
- DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("get value of parameter %1 in scope %2 element %3\n", d.id, d.scope, d.element));
+ // DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("get value of parameter %1 in scope %2 element %3\n", d.id, d.scope, d.element));
unit->GetParameter(d.id, d.scope, d.element, val);
}
return val;
DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("set MaximumFramesPerSlice in global scope to %1\n", numFrames));
if ((err = unit->SetProperty (kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global,
0, &numFrames, sizeof (numFrames))) != noErr) {
- cerr << "cannot set max frames (err = " << err << ')' << endl;
+ error << string_compose (_("AU: cannot set max frames (err = %1)"), err) << endmsg;
return -1;
}
}
bool
-AUPlugin::can_support_io_configuration (const ChanCount& in, ChanCount& out) const
+AUPlugin::can_support_io_configuration (const ChanCount& in, ChanCount& out)
{
// Note: We never attempt to multiply-instantiate plugins to meet io configurations.
vector<pair<int,int> >& io_configs = pinfo->cache.io_configs;
- DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 has %2 IO configurations\n", name(), io_configs.size()));
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 has %2 IO configurations, looking for %3 in, %4 out\n",
+ name(), io_configs.size(), in, out));
//Ardour expects the plugin to tell it the output
//configuration but AU plugins can have multiple I/O
int32_t possible_out = i->second;
if ((possible_in == audio_in) && (possible_out == audio_out)) {
- DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("\tCHOSEN: in %1 out %2\n", in, out));
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("\tCHOSEN: %1 in %2 out to match in %3 out %4\n",
+ possible_in, possible_out,
+ in, out));
+
+ out.set (DataType::MIDI, 0);
+ out.set (DataType::AUDIO, audio_out);
+
return 1;
}
}
audio_out = 2;
found = true;
} else if (possible_out < -2) {
- /* explicitly variable number of outputs, pick maximum */
- audio_out = -possible_out;
+ /* explicitly variable number of outputs.
+
+ Since Ardour can handle any configuration,
+ we have to somehow pick a number.
+
+ We'll use the number of inputs
+ to the master bus, or 2 if there
+ is no master bus.
+ */
+ boost::shared_ptr<Route> master = _session.master_out();
+ if (master) {
+ audio_out = master->input()->n_ports().n_audio();
+ } else {
+ audio_out = 2;
+ }
found = true;
} else {
/* exact number of outputs */
/* request is too large */
}
+
if (possible_out == -1) {
/* any output configuration possible, provide stereo out */
audio_out = 2;
audio_out = 2;
found = true;
} else if (possible_out < -2) {
- /* explicitly variable number of outputs, pick maximum */
- audio_out = -possible_out;
+ /* explicitly variable number of outputs.
+
+ Since Ardour can handle any configuration,
+ we have to somehow pick a number.
+
+ We'll use the number of inputs
+ to the master bus, or 2 if there
+ is no master bus.
+ */
+ boost::shared_ptr<Route> master = _session.master_out();
+ if (master) {
+ audio_out = master->input()->n_ports().n_audio();
+ } else {
+ audio_out = 2;
+ }
found = true;
} else {
/* exact number of outputs */
{
/* not much to do with audio - the data is already in the buffers given to us in connect_and_run() */
- DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1: render callback, frames %2 bufs %3\n",
- name(), inNumberFrames, ioData->mNumberBuffers));
+ // DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1: render callback, frames %2 bufs %3\n",
+ // name(), inNumberFrames, ioData->mNumberBuffers));
- if (current_maxbuf == 0) {
+ if (input_maxbuf == 0) {
error << _("AUPlugin: render callback called illegally!") << endmsg;
return kAudioUnitErr_CannotDoInCurrentContext;
}
- uint32_t limit = min ((uint32_t) ioData->mNumberBuffers, current_maxbuf);
+ uint32_t limit = min ((uint32_t) ioData->mNumberBuffers, input_maxbuf);
for (uint32_t i = 0; i < limit; ++i) {
ioData->mBuffers[i].mNumberChannels = 1;
passed to PluginInsert::connect_and_run()
*/
- ioData->mBuffers[i].mData = current_buffers->get_audio (i).data (cb_offset + current_offset);
+ ioData->mBuffers[i].mData = input_buffers->get_audio (i).data (cb_offset + input_offset);
}
cb_offset += inNumberFrames;
_last_nframes = nframes;
}
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 in %2 out %3 MIDI %4 bufs %5 (available %6)\n",
+ name(), input_channels, output_channels, _has_midi_input,
+ bufs.count(), bufs.available()));
+
/* the apparent number of buffers matches our input configuration, but we know that the bufferset
has the capacity to handle our outputs.
*/
+
assert (bufs.available() >= ChanCount (DataType::AUDIO, output_channels));
- current_buffers = &bufs;
- current_maxbuf = bufs.count().n_audio(); // number of input audio buffers
- current_offset = offset;
+ input_buffers = &bufs;
+ input_maxbuf = bufs.count().n_audio(); // number of input audio buffers
+ input_offset = offset;
cb_offset = 0;
- DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 in %2 out %3 MIDI %4 bufs %5 (available %6)\n",
- name(), input_channels, output_channels, _has_midi_input,
- bufs.count(), bufs.available()));
-
buffers->mNumberBuffers = output_channels;
- for (uint32_t i = 0; i < output_channels; ++i) {
+ for (int32_t i = 0; i < output_channels; ++i) {
buffers->mBuffers[i].mNumberChannels = 1;
buffers->mBuffers[i].mDataByteSize = nframes * sizeof (Sample);
/* setting this to 0 indicates to the AU that it can provide buffers here
if necessary. if it can process in-place, it will use the buffers provided
- as input by ::render_callback() above. no documentation on what setting it
- to a non-null value means.
+ as input by ::render_callback() above.
+
+ a non-null values tells the plugin to render into the buffer pointed
+ at by the value.
*/
buffers->mBuffers[i].mData = 0;
}
ts.mSampleTime = frames_processed;
ts.mFlags = kAudioTimeStampSampleTimeValid;
- // DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 render flags=%2 time=%3 nframes=%4 buffers=%5\n",
- // name(), flags, frames_processed, nframes, buffers->mNumberBuffers));
+ DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 render flags=%2 time=%3 nframes=%4 buffers=%5\n",
+ name(), flags, frames_processed, nframes, buffers->mNumberBuffers));
if ((err = unit->Render (&flags, &ts, 0, nframes, buffers)) == noErr) {
- current_maxbuf = 0;
+ input_maxbuf = 0;
frames_processed += nframes;
DEBUG_TRACE (DEBUG::AudioUnits, string_compose ("%1 rendered %2 buffers of %3\n",
return 0;
}
- cerr << name() << " render status " << err << endl;
+ error << string_compose (_("AU: render error for %1, status = %2"), name(), err) << endmsg;
return -1;
}
}
Timecode::BBT_Time bbt;
- TempoMetric metric = tmap.metric_at (_session.transport_frame() + current_offset);
- tmap.bbt_time_with_metric (_session.transport_frame() + current_offset, bbt, metric);
+ TempoMetric metric = tmap.metric_at (_session.transport_frame() + input_offset);
+ tmap.bbt_time (_session.transport_frame() + input_offset, bbt);
if (outCurrentBeat) {
float beat;
- beat = metric.meter().beats_per_bar() * bbt.bars;
+ beat = metric.meter().divisions_per_bar() * bbt.bars;
beat += bbt.beats;
beat += bbt.ticks / Timecode::BBT_Time::ticks_per_beat;
*outCurrentBeat = beat;
}
Timecode::BBT_Time bbt;
- TempoMetric metric = tmap.metric_at (_session.transport_frame() + current_offset);
- tmap.bbt_time_with_metric (_session.transport_frame() + current_offset, bbt, metric);
+ TempoMetric metric = tmap.metric_at (_session.transport_frame() + input_offset);
+ tmap.bbt_time (_session.transport_frame() + input_offset, bbt);
if (outDeltaSampleOffsetToNextBeat) {
if (bbt.ticks == 0) {
} else {
*outDeltaSampleOffsetToNextBeat = (UInt32)
floor (((Timecode::BBT_Time::ticks_per_beat - bbt.ticks)/Timecode::BBT_Time::ticks_per_beat) * // fraction of a beat to next beat
- metric.tempo().frames_per_beat(_session.frame_rate(), metric.meter())); // frames per beat
+ metric.tempo().frames_per_beat (_session.frame_rate())); // frames per beat
}
}
if (outTimeSig_Numerator) {
- *outTimeSig_Numerator = (UInt32) lrintf (metric.meter().beats_per_bar());
+ *outTimeSig_Numerator = (UInt32) lrintf (metric.meter().divisions_per_bar());
}
if (outTimeSig_Denominator) {
*outTimeSig_Denominator = (UInt32) lrintf (metric.meter().note_divisor());
/* beat for the start of the bar.
1|1|0 -> 1
- 2|1|0 -> 1 + beats_per_bar
- 3|1|0 -> 1 + (2 * beats_per_bar)
+ 2|1|0 -> 1 + divisions_per_bar
+ 3|1|0 -> 1 + (2 * divisions_per_bar)
etc.
*/
- *outCurrentMeasureDownBeat = 1 + metric.meter().beats_per_bar() * (bbt.bars - 1);
+ *outCurrentMeasureDownBeat = 1 + metric.meter().divisions_per_bar() * (bbt.bars - 1);
}
return noErr;
if (outCurrentSampleInTimeLine) {
/* this assumes that the AU can only call this host callback from render context,
- where current_offset is valid.
+ where input_offset is valid.
*/
- *outCurrentSampleInTimeLine = _session.transport_frame() + current_offset;
+ *outCurrentSampleInTimeLine = _session.transport_frame() + input_offset;
}
if (outIsCycling) {
Timecode::BBT_Time bbt;
if (outCycleStartBeat) {
- TempoMetric metric = tmap.metric_at (loc->start() + current_offset);
- _session.tempo_map().bbt_time_with_metric (loc->start(), bbt, metric);
+ TempoMetric metric = tmap.metric_at (loc->start() + input_offset);
+ _session.tempo_map().bbt_time (loc->start(), bbt);
float beat;
- beat = metric.meter().beats_per_bar() * bbt.bars;
+ beat = metric.meter().divisions_per_bar() * bbt.bars;
beat += bbt.beats;
beat += bbt.ticks / Timecode::BBT_Time::ticks_per_beat;
}
if (outCycleEndBeat) {
- TempoMetric metric = tmap.metric_at (loc->end() + current_offset);
- _session.tempo_map().bbt_time_with_metric (loc->end(), bbt, metric);
+ TempoMetric metric = tmap.metric_at (loc->end() + input_offset);
+ _session.tempo_map().bbt_time (loc->end(), bbt);
float beat;
- beat = metric.meter().beats_per_bar() * bbt.bars;
+ beat = metric.meter().divisions_per_bar() * bbt.bars;
beat += bbt.beats;
beat += bbt.ticks / Timecode::BBT_Time::ticks_per_beat;
AUPlugin::add_state (XMLNode* root) const
{
LocaleGuard lg (X_("POSIX"));
-
-#ifdef AU_STATE_SUPPORT
CFDataRef xmlData;
CFPropertyListRef propertyList;
CFRelease (xmlData);
CFRelease (propertyList);
-#else
- if (!seen_get_state_message) {
- info << string_compose (_("Saving AudioUnit settings is not supported in this build of %1. Consider paying for a newer version"),
- PROGRAM_NAME)
- << endmsg;
- seen_get_state_message = true;
- }
-#endif
}
int
AUPlugin::set_state(const XMLNode& node, int version)
{
-#ifdef AU_STATE_SUPPORT
int ret = -1;
CFPropertyListRef propertyList;
LocaleGuard lg (X_("POSIX"));
return -1;
}
+#ifndef NO_PLUGIN_STATE
if (node.children().empty()) {
return -1;
}
}
CFRelease (propertyList);
}
+#endif
Plugin::set_state (node, version);
return ret;
-#else
- if (!seen_set_state_message) {
- info << string_compose (_("Restoring AudioUnit settings is not supported in this build of %1. Consider paying for a newer version"),
- PROGRAM_NAME)
- << endmsg;
- }
- return Plugin::set_state (node, version);
-#endif
}
bool
{
Plugin::load_preset (r);
-#ifdef AU_STATE_SUPPORT
bool ret = false;
CFPropertyListRef propertyList;
Glib::ustring path;
}
return ret;
-#else
- if (!seen_loading_message) {
- info << string_compose (_("Loading AudioUnit presets is not supported in this build of %1. Consider paying for a newer version"),
- PROGRAM_NAME)
- << endmsg;
- seen_loading_message = true;
- }
- return true;
-#endif
}
void
string
AUPlugin::do_save_preset (string preset_name)
{
-#ifdef AU_STATE_SUPPORT
CFPropertyListRef propertyList;
vector<Glib::ustring> v;
Glib::ustring user_preset_path;
- bool ret = true;
std::string m = maker();
std::string n = name();
if (g_mkdir_with_parents (user_preset_path.c_str(), 0775) < 0) {
error << string_compose (_("Cannot create user plugin presets folder (%1)"), user_preset_path) << endmsg;
- return false;
+ return string();
}
DEBUG_TRACE (DEBUG::AudioUnits, "get current preset\n");
if (unit->GetAUPreset (propertyList) != noErr) {
- return false;
+ return string();
}
// add the actual preset name */
if (save_property_list (propertyList, user_preset_path)) {
error << string_compose (_("Saving plugin state to %1 failed"), user_preset_path) << endmsg;
- ret = false;
+ return string();
}
CFRelease(propertyList);
return string ("file:///") + user_preset_path;
-#else
- if (!seen_saving_message) {
- info << string_compose (_("Saving AudioUnit presets is not supported in this build of %1. Consider paying for a newer version"),
- PROGRAM_NAME)
- << endmsg;
- seen_saving_message = true;
- }
- return string();
-#endif
}
//-----------------------------------------------------------------------------
{
string preset_name;
-#ifdef AU_STATE_SUPPORT
CFPropertyListRef propertyList;
DEBUG_TRACE (DEBUG::AudioUnits, "get current preset for current_preset()\n");
preset_name = get_preset_name_in_plist (propertyList);
CFRelease(propertyList);
}
-#endif
+
return preset_name;
}
void
AUPlugin::find_presets ()
{
-#ifdef AU_STATE_SUPPORT
vector<string*>* preset_files;
PathScanner scanner;
for (FactoryPresetMap::iterator i = factory_preset_map.begin(); i != factory_preset_map.end(); ++i) {
/* XXX: dubious */
string const uri = string_compose ("%1", _presets.size ());
- _presets.insert (make_pair (uri, Plugin::PresetRecord (uri, i->first)));
+ _presets.insert (make_pair (uri, Plugin::PresetRecord (uri, i->first, i->second)));
}
-
-#endif
}
bool
Glib::ustring
AUPluginInfo::au_cache_path ()
{
- return Glib::build_filename (ARDOUR::user_config_directory().to_string(), "au_cache");
+ return Glib::build_filename (ARDOUR::user_config_directory(), "au_cache");
}
PluginInfoList*
}
AUPluginInfo::get_names (temp, info->name, info->creator);
+ ARDOUR::PluginScanMessage(_("AU"), info->name, false);
info->type = ARDOUR::AudioUnit;
info->unique_id = stringify_descriptor (*info->descriptor);
if (!tree.write (path)) {
error << string_compose (_("could not save AU cache to %1"), path) << endmsg;
- unlink (path.c_str());
+ g_unlink (path.c_str());
}
}
AUPlugin::set_info (PluginInfoPtr info)
{
Plugin::set_info (info);
-
+
AUPluginInfoPtr pinfo = boost::dynamic_pointer_cast<AUPluginInfo>(get_info());
-
_has_midi_input = pinfo->needs_midi_input ();
_has_midi_output = false;
}
+
+int
+AUPlugin::create_parameter_listener (AUEventListenerProc cb, void* arg, float interval_secs)
+{
+#ifdef WITH_CARBON
+ CFRunLoopRef run_loop = (CFRunLoopRef) GetCFRunLoopFromEventLoop(GetCurrentEventLoop());
+#else
+ CFRunLoopRef run_loop = CFRunLoopGetCurrent();
+#endif
+ CFStringRef loop_mode = kCFRunLoopDefaultMode;
+
+ if (AUEventListenerCreate (cb, arg, run_loop, loop_mode, interval_secs, interval_secs, &_parameter_listener) != noErr) {
+ return -1;
+ }
+
+ _parameter_listener_arg = arg;
+
+ return 0;
+}
+
+int
+AUPlugin::listen_to_parameter (uint32_t param_id)
+{
+ AudioUnitEvent event;
+
+ if (!_parameter_listener || param_id >= descriptors.size()) {
+ return -2;
+ }
+
+ event.mEventType = kAudioUnitEvent_ParameterValueChange;
+ event.mArgument.mParameter.mAudioUnit = unit->AU();
+ event.mArgument.mParameter.mParameterID = descriptors[param_id].id;
+ event.mArgument.mParameter.mScope = descriptors[param_id].scope;
+ event.mArgument.mParameter.mElement = descriptors[param_id].element;
+
+ if (AUEventListenerAddEventType (_parameter_listener, _parameter_listener_arg, &event) != noErr) {
+ return -1;
+ }
+
+ event.mEventType = kAudioUnitEvent_BeginParameterChangeGesture;
+ event.mArgument.mParameter.mAudioUnit = unit->AU();
+ event.mArgument.mParameter.mParameterID = descriptors[param_id].id;
+ event.mArgument.mParameter.mScope = descriptors[param_id].scope;
+ event.mArgument.mParameter.mElement = descriptors[param_id].element;
+
+ if (AUEventListenerAddEventType (_parameter_listener, _parameter_listener_arg, &event) != noErr) {
+ return -1;
+ }
+
+ event.mEventType = kAudioUnitEvent_EndParameterChangeGesture;
+ event.mArgument.mParameter.mAudioUnit = unit->AU();
+ event.mArgument.mParameter.mParameterID = descriptors[param_id].id;
+ event.mArgument.mParameter.mScope = descriptors[param_id].scope;
+ event.mArgument.mParameter.mElement = descriptors[param_id].element;
+
+ if (AUEventListenerAddEventType (_parameter_listener, _parameter_listener_arg, &event) != noErr) {
+ return -1;
+ }
+
+ return 0;
+}
+
+int
+AUPlugin::end_listen_to_parameter (uint32_t param_id)
+{
+ AudioUnitEvent event;
+
+ if (!_parameter_listener || param_id >= descriptors.size()) {
+ return -2;
+ }
+
+ event.mEventType = kAudioUnitEvent_ParameterValueChange;
+ event.mArgument.mParameter.mAudioUnit = unit->AU();
+ event.mArgument.mParameter.mParameterID = descriptors[param_id].id;
+ event.mArgument.mParameter.mScope = descriptors[param_id].scope;
+ event.mArgument.mParameter.mElement = descriptors[param_id].element;
+
+ if (AUEventListenerRemoveEventType (_parameter_listener, _parameter_listener_arg, &event) != noErr) {
+ return -1;
+ }
+
+ event.mEventType = kAudioUnitEvent_BeginParameterChangeGesture;
+ event.mArgument.mParameter.mAudioUnit = unit->AU();
+ event.mArgument.mParameter.mParameterID = descriptors[param_id].id;
+ event.mArgument.mParameter.mScope = descriptors[param_id].scope;
+ event.mArgument.mParameter.mElement = descriptors[param_id].element;
+
+ if (AUEventListenerRemoveEventType (_parameter_listener, _parameter_listener_arg, &event) != noErr) {
+ return -1;
+ }
+
+ event.mEventType = kAudioUnitEvent_EndParameterChangeGesture;
+ event.mArgument.mParameter.mAudioUnit = unit->AU();
+ event.mArgument.mParameter.mParameterID = descriptors[param_id].id;
+ event.mArgument.mParameter.mScope = descriptors[param_id].scope;
+ event.mArgument.mParameter.mElement = descriptors[param_id].element;
+
+ if (AUEventListenerRemoveEventType (_parameter_listener, _parameter_listener_arg, &event) != noErr) {
+ return -1;
+ }
+
+ return 0;
+}
+
+void
+AUPlugin::_parameter_change_listener (void* arg, void* src, const AudioUnitEvent* event, UInt64 host_time, Float32 new_value)
+{
+ ((AUPlugin*) arg)->parameter_change_listener (arg, src, event, host_time, new_value);
+}
+
+void
+AUPlugin::parameter_change_listener (void* /*arg*/, void* /*src*/, const AudioUnitEvent* event, UInt64 /*host_time*/, Float32 new_value)
+{
+ ParameterMap::iterator i;
+
+ if ((i = parameter_map.find (event->mArgument.mParameter.mParameterID)) == parameter_map.end()) {
+ return;
+ }
+
+ switch (event->mEventType) {
+ case kAudioUnitEvent_BeginParameterChangeGesture:
+ StartTouch (i->second);
+ break;
+ case kAudioUnitEvent_EndParameterChangeGesture:
+ EndTouch (i->second);
+ break;
+ case kAudioUnitEvent_ParameterValueChange:
+ ParameterChanged (i->second, new_value);
+ break;
+ default:
+ break;
+ }
+}