+
+ if (receives_input ()) {
+
+ if (_input_active) {
+
+ void* buffer = port_engine.get_buffer (_port_handle, nframes);
+ const pframes_t event_count = port_engine.get_midi_event_count (buffer);
+
+ /* suck all relevant MIDI events from the MIDI port buffer
+ into our MidiBuffer
+ */
+
+ for (pframes_t i = 0; i < event_count; ++i) {
+
+ pframes_t timestamp;
+ size_t size;
+ uint8_t* buf;
+
+ port_engine.midi_event_get (timestamp, size, &buf, buffer, i);
+
+ if (buf[0] == 0xfe) {
+ /* throw away active sensing */
+ continue;
+ } else if ((buf[0] & 0xF0) == 0x90 && buf[2] == 0) {
+ /* normalize note on with velocity 0 to proper note off */
+ buf[0] = 0x80 | (buf[0] & 0x0F); /* note off */
+ buf[2] = 0x40; /* default velocity */
+ }
+
+ /* check that the event is in the acceptable time range */
+
+ if ((timestamp >= (_global_port_buffer_offset + _port_buffer_offset)) &&
+ (timestamp < (_global_port_buffer_offset + _port_buffer_offset + nframes))) {
+ _buffer->push_back (timestamp, size, buf);
+ } else {
+ cerr << "Dropping incoming MIDI at time " << timestamp << "; offset="
+ << _global_port_buffer_offset << " limit="
+ << (_global_port_buffer_offset + _port_buffer_offset + nframes) << "\n";
+ }
+ }
+
+ } else {
+ _buffer->silence (nframes);
+ }
+
+ } else {
+ _buffer->silence (nframes);
+ }
+
+ if (nframes) {
+ _has_been_mixed_down = true;
+ }
+
+ return *_buffer;