opts = Options('scache.conf')
opts.AddOptions(
('ARCH', 'Set architecture-specific compilation flags by hand (all flags as 1 argument)',''),
+ BoolOption('AUDIOUNITS', 'Compile with Apple\'s AudioUnit library. (experimental)', 0),
BoolOption('COREAUDIO', 'Compile with Apple\'s CoreAudio library', 0),
BoolOption('DEBUG', 'Set to build with debugging information and no optimizations', 0),
PathOption('DESTDIR', 'Set the intermediate install "prefix"', '/'),
libraries['boost'] = conf.Finish ()
+conf = env.Configure ()
+
+# jack_port_ensure_monitor available
+
+if conf.CheckFunc('jack_port_ensure_monitor'):
+ env.Append(CCFLAGS='-DWITH_JACK_PORT_ENSURE_MONITOR')
+else:
+ print '\nWARNING: You need at least svn revision 985 of jack for hardware monitoring to work correctly.\n'
+
+env = conf.Finish()
+
#
# Check for liblo
libraries['dmalloc'] = conf.Finish ()
-#
-
#
# Audio/MIDI library (needed for MIDI, since audio is all handled via JACK)
#
connection_editor.cc
""")
-coreaudio_files=Split("""
+audiounit_files=Split("""
au_pluginui.cc
""")
extra_sources += vst_files
gtkardour.Append (CCFLAGS="-DVST_SUPPORT", CPPPATH="#libs/fst")
-if gtkardour['COREAUDIO']:
- extra_sources += coreaudio_files
- gtkardour.Append(CCFLAGS='-DHAVE_COREAUDIO')
+if gtkardour['AUDIOUNITS']:
+ extra_sources += audiounit_files
+ gtkardour.Append(CCFLAGS='-DHAVE_AUDIOUNITS')
gtkardour.Append(LINKFLAGS='-framework Carbon')
gtkardour.Merge([libraries['appleutility']])
<menuitem action='ToggleAutoReturn'/>
<menuitem action='ToggleClick'/>
<menuitem action='toggle-follow-playhead'/>
+ <menuitem action='ToggleVideoSync'/>
</menu>
</menu>
<menu name='Edit' action='Edit'>
void sync_option_changed ();
void toggle_time_master ();
+ void toggle_video_sync ();
enum ShuttleBehaviour {
Sprung,
ActionManager::session_sensitive_actions.push_back (act);
ActionManager::transport_sensitive_actions.push_back (act);
- act = ActionManager::register_toggle_action (transport_actions, X_("ToggleTimeMaster"), _("Master"), mem_fun(*this, &ARDOUR_UI::toggle_time_master));
+ ActionManager::register_toggle_action (transport_actions, X_("ToggleVideoSync"), _("Sync startup to video"), mem_fun(*this, &ARDOUR_UI::toggle_video_sync));
+ act = ActionManager::register_toggle_action (transport_actions, X_("ToggleTimeMaster"), _("Time master"), mem_fun(*this, &ARDOUR_UI::toggle_time_master));
ActionManager::session_sensitive_actions.push_back (act);
act = ActionManager::register_action (common_actions, X_("ToggleRecordEnableTrack1"), _("Toggle Record Enable Track1"), bind (mem_fun(*this, &ARDOUR_UI::toggle_record_enable), 0U));
std::vector<Glib::ustring> groups;
groups.push_back("options");
groups.push_back("Editor");
-
+ groups.push_back("Transport");
+
struct {
char* name;
bool (Configuration::*method)(void) const;
{ "MeterHoldShort", &Configuration::get_meter_hold_short, 'r' },
{ "MeterHoldMedium", &Configuration::get_meter_hold_medium, 'r' },
{ "MeterHoldLong", &Configuration::get_meter_hold_long, 'r' },
+ { "ToggleVideoSync", &Configuration::get_use_video_sync, 't' },
{ 0, 0, 0 }
};
Glib::RefPtr<Action> act = ActionManager::get_action (i->c_str(), options[n].name);
if (act) {
Glib::RefPtr<ToggleAction> tact = Glib::RefPtr<ToggleAction>::cast_dynamic(act);
- if (options[n].act_type == 't' || (options[n].act_type == 'r' && (Config->*(options[n].method))()))
- tact->set_active ((Config->*(options[n].method))());
+ if (options[n].act_type == 't' || options[n].act_type == 'r') {
+ if ((Config->*(options[n].method))()) {
+ tact->set_active (true);
+ } else {
+ tact->set_active (false);
+ }
+ }
continue;
}
}
toggle_session_state ("Transport", "TogglePunchOut", &Session::set_punch_out, &Session::get_punch_out);
}
+ void
+ARDOUR_UI::toggle_video_sync()
+{
+ Glib::RefPtr<Action> act = ActionManager::get_action ("Transport", "ToggleVideoSync");
+ if (act) {
+ Glib::RefPtr<ToggleAction> tact = Glib::RefPtr<ToggleAction>::cast_dynamic(act);
+ Config->set_use_video_sync (tact->get_active());
+ }
+}
+
void
ARDOUR_UI::toggle_editing_space()
{
switch (sfdb.run()) {
case SoundFileOmega::ResponseImport:
- do_import (sfdb.get_paths(), sfdb.get_split(), mode, track, pos, prompt);
+ do_import (sfdb.get_paths(), sfdb.get_split(), sfdb.get_mode(), track, pos, prompt);
break;
case SoundFileOmega::ResponseEmbed:
- do_embed (sfdb.get_paths(), sfdb.get_split(), mode, track, pos, prompt);
+ do_embed (sfdb.get_paths(), sfdb.get_split(), sfdb.get_mode(), track, pos, prompt);
break;
default:
sources.push_back(source);
}
- catch (failed_constructor& err) {
- error << string_compose(_("could not open %1"), path) << endmsg;
- goto out;
- }
+ catch (failed_constructor& err) {
+ error << string_compose(_("could not open %1"), path) << endmsg;
+ goto out;
+ }
- ARDOUR_UI::instance()->flush_pending ();
+ ARDOUR_UI::instance()->flush_pending ();
}
if (sources.empty()) {
}
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
aumodel = ListStore::create(aucols);
au_display.set_model (aumodel);
au_display.append_column (_("Available plugins"), aucols.name);
}
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
notebook.pages().push_back (TabElem (auscroller, _("AudioUnit")));
#endif
}
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
au_display.signal_button_press_event().connect_notify (mem_fun(*this, &PluginSelector::row_clicked));
au_display.get_selection()->signal_changed().connect (mem_fun(*this, &PluginSelector::au_display_selection_changed));
#endif
vst_refiller ();
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
au_refiller ();
#endif
}
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
cp--;
if (cp == 0) {
#endif //VST_SUPPORT
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
void
PluginSelector::_au_refiller (void *arg)
current_selection = ARDOUR::AudioUnit;
}
-#endif //HAVE_COREAUDIO
+#endif //HAVE_AUDIOUNIT
void
PluginSelector::use_plugin (PluginInfoPtr pi)
#endif
break;
case ARDOUR::AudioUnit:
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
row = *(au_display.get_selection()->get_selected());
name = row[aucols.name];
pi = row[aucols.plugin];
#ifdef VST_SUPPORT
vst_refiller ();
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
au_refiller ();
#endif
}
void vst_display_selection_changed();
#endif // VST_SUPPORT
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
// page 3
struct AUColumns : public Gtk::TreeModel::ColumnRecord {
AUColumns () {
static void _au_refiller (void *);
void au_refiller ();
void au_display_selection_changed();
-#endif //HAVE_COREAUDIO
+#endif //HAVE_AUDIOUNIT
ARDOUR::PluginManager *manager;
#include "i18n.h"
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
#include "au_pluginui.h"
#endif
} else {
plugin_ui->show_all ();
}
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNIT
} else if (type == ARDOUR::AudioUnit) {
AUPluginUI* plugin_ui;
if (plugin_insert->get_gui() == 0) {
void
SoundFileBox::field_edited (const Glib::ustring& str1, const Glib::ustring& str2)
{
- cout << "field_edited" << endl;
+ Gtk::TreeModel::Children rows(fields->children());
+ Gtk::TreeModel::Row row(rows[atoi(str1.c_str())]);
+
+ Library->set_field (path, row[label_columns.field], str2);
+
Library->save_changes ();
}
osc_files = [ 'osc.cc' ]
vst_files = [ 'vst_plugin.cc', 'session_vst.cc' ]
-coreaudio_files = [ 'audio_unit.cc', 'coreaudiosource.cc' ]
+audiounit_files = [ 'audio_unit.cc' ]
+coreaudio_files = [ 'coreaudiosource.cc' ]
extra_sources = [ ]
if ardour['VST']:
if conf.CheckCHeader('/System/Library/Frameworks/CoreMIDI.framework/Headers/CoreMIDI.h'):
ardour.Append(LINKFLAGS="-framework CoreMIDI")
-if conf.CheckCHeader('/System/Library/Frameworks/AudioToolbox.framework/Headers/ExtendedAudioFile.h') and ardour['COREAUDIO'] == 1:
+if conf.CheckCHeader('/System/Library/Frameworks/AudioUnit.framework/Headers/AudioUnit.h') and ardour['AUDIOUNITS']:
+ ardour.Append(CXXFLAGS="-DHAVE_AUDIOUNITS")
+ ardour.Append(LINKFLAGS="-framework AudioUnit")
+ extra_sources += audiounit_files
+
+if conf.CheckCHeader('/System/Library/Frameworks/AudioToolbox.framework/Headers/ExtendedAudioFile.h') and ardour['COREAUDIO']:
ardour.Append(CXXFLAGS="-DHAVE_COREAUDIO")
ardour.Append(LINKFLAGS="-framework AudioToolbox")
extra_sources += coreaudio_files
-
if env['CONFIG_ARCH'] == 'apple':
# this next line avoids issues with circular dependencies between libardour and libardour_cp.
if ardour['LIBLO']:
ardour.Merge ([ libraries['lo'] ])
-if ardour['COREAUDIO']:
+if ardour['COREAUDIO'] or ardour['AUDIOUNITS']:
ardour.Merge ([ libraries['appleutility'] ])
ardour.VersionBuild(['version.cc', 'ardour/version.h'], 'SConscript')
Default(libardour)
if env['NLS']:
- i18n (ardour, ardour_files + vst_files + coreaudio_files, env)
+ i18n (ardour, ardour_files + vst_files + coreaudio_files + audiounit_files, env)
env.Alias('install', env.Install(os.path.join(install_prefix, 'lib/ardour2'), libardour))
env.Alias('tarball', env.Distribute (env['DISTTREE'],
[ 'SConscript', 'i18n.h', 'gettext.h', 'sse_functions.s', 'sse_functions_64bit.s' ] +
- ardour_files + vst_files + coreaudio_files +
+ ardour_files + vst_files + coreaudio_files + audiounit_files +
glob.glob('po/*.po') + glob.glob('ardour/*.h')))
int usecs_per_cycle () const { return _usecs_per_cycle; }
+ bool get_sync_offset (jack_nframes_t& offset) const;
+
jack_nframes_t frames_since_cycle_start () {
if (!_running || !_jack) return 0;
return jack_frames_since_cycle_start (_jack);
XMLNode* control_protocol_state () { return _control_protocol_state; }
+ sigc::signal<void,const char*> ParameterChanged;
+
/* define accessor methods */
#undef CONFIG_VARIABLE
#undef CONFIG_VARIABLE_SPECIAL
#define CONFIG_VARIABLE(Type,var,name,value) \
Type get_##var () const { return var.get(); } \
- void set_##var (Type val) { var.set (val); var.set_is_user (user_configuration); }
+ void set_##var (Type val) { var.set (val); var.set_is_user (user_configuration); ParameterChanged (name); }
#define CONFIG_VARIABLE_SPECIAL(Type,var,name,value,mutator) \
Type get_##var () const { return var.get(); } \
- void set_##var (Type val) { var.set (val); var.set_is_user (user_configuration); }
+ void set_##var (Type val) { var.set (val); var.set_is_user (user_configuration); ParameterChanged (name); }
#include "ardour/configuration_vars.h"
#undef CONFIG_VARIABLE
#undef CONFIG_VARIABLE_SPECIAL
CONFIG_VARIABLE(bool, use_sw_monitoring, "use-sw-monitoring", false)
CONFIG_VARIABLE(bool, use_external_monitoring, "use-external-monitoring", true)
CONFIG_VARIABLE(bool, jack_time_master, "jack-time-master", true)
+CONFIG_VARIABLE(bool, use_video_sync, "use-video-sync", false)
CONFIG_VARIABLE(bool, trace_midi_input, "trace-midi-input", false)
CONFIG_VARIABLE(bool, trace_midi_output, "trace-midi-output", false)
CONFIG_VARIABLE(bool, plugins_stop_with_transport, "plugins-stop-with-transport", false)
}
void ensure_monitor_input (bool yn) {
+
+#ifdef WITH_JACK_PORT_ENSURE_MONITOR
jack_port_ensure_monitor (_port, yn);
+#else
+ jack_port_request_monitor(_port, yn);
+#endif
+
}
/*XXX completely bloody useless imho*/
#include <vector>
#include <boost/shared_ptr.hpp>
+#include <boost/enable_shared_from_this.hpp>
#include <pbd/undo.h>
#include <pbd/statefuldestructible.h>
mutable RegionEditState _first_edit;
};
-class Region : public PBD::StatefulDestructible, public StateManager
+class Region : public PBD::StatefulDestructible, public StateManager, public boost::enable_shared_from_this<Region>
{
public:
typedef std::vector<boost::shared_ptr<Source> > SourceList;
jack_nframes_t last_transport_start() const { return _last_roll_location; }
void goto_end () { request_locate (end_location->start(), false);}
void goto_start () { request_locate (start_location->start(), false); }
+ void set_session_start (jack_nframes_t start) { start_location->set_start(start); }
+ void set_session_end (jack_nframes_t end) { end_location->set_start(end); _end_location_is_free = false; }
void use_rf_shuttle_speed ();
void request_transport_speed (float speed);
void request_overwrite_buffer (Diskstream*);
jack_nframes_t current_end_frame() const { return end_location->start(); }
jack_nframes_t current_start_frame() const { return start_location->start(); }
jack_nframes_t frame_rate() const { return _current_frame_rate; }
- double frames_per_smpte_frame() const { return _frames_per_smpte_frame; }
jack_nframes_t frames_per_hour() const { return _frames_per_hour; }
+
+ double frames_per_smpte_frame() const { return _frames_per_smpte_frame; }
jack_nframes_t smpte_frames_per_hour() const { return _smpte_frames_per_hour; }
/* Locations */
MidiFeedback,
MidiControl,
TranzportControl,
- Feedback
+ Feedback,
+ SmpteMode,
};
sigc::signal<void,ControlType> ControlChanged;
float shuttle_speed_threshold;
float rf_speed;
float smpte_frames_per_second;
+ float video_pullup;
bool smpte_drop_frames;
AnyTime preroll;
AnyTime postroll;
jack_nframes_t transport_frame () const {return _transport_frame; }
jack_nframes_t audible_frame () const;
+ enum SmpteFormat {
+ smpte_23976,
+ smpte_24,
+ smpte_24976,
+ smpte_25,
+ smpte_2997,
+ smpte_2997drop,
+ smpte_30,
+ smpte_30drop,
+ smpte_5994,
+ smpte_60,
+ };
+
+ enum PullupFormat {
+ pullup_Plus4Plus1,
+ pullup_Plus4,
+ pullup_Plus4Minus1,
+ pullup_Plus1,
+ pullup_None,
+ pullup_Minus1,
+ pullup_Minus4Plus1,
+ pullup_Minus4,
+ pullup_Minus4Minus1,
+ };
+
int set_smpte_type (float fps, bool drop_frames);
+ int set_video_pullup (float pullup);
+
+ void sync_time_vars();
void bbt_time (jack_nframes_t when, BBT_Time&);
void smpte_to_sample( SMPTE::Time& smpte, jack_nframes_t& sample, bool use_offset, bool use_subframes ) const;
jack_nframes_t convert_to_frames_at (jack_nframes_t position, AnyTime&);
+ static sigc::signal<void> StartTimeChanged;
+ static sigc::signal<void> EndTimeChanged;
static sigc::signal<void> SMPTEOffsetChanged;
- sigc::signal<void> SMPTETypeChanged;
+ static sigc::signal<void> SMPTETypeChanged;
+ static sigc::signal<void> PullupChanged;
void request_slave_source (SlaveSource, jack_nframes_t pos = 0);
SlaveSource slave_source() const { return _slave_type; }
mutable gint processing_prohibited;
process_function_type process_function;
process_function_type last_process_function;
- jack_nframes_t _current_frame_rate;
+ bool waiting_for_sync_offset;
+ jack_nframes_t _base_frame_rate;
+ jack_nframes_t _current_frame_rate; //this includes video pullup offset
int transport_sub_state;
mutable gint _record_status;
jack_nframes_t _transport_frame;
return false;
}
+ bool maybe_sync_start (jack_nframes_t&, jack_nframes_t&);
+
void check_declick_out ();
MIDI::MachineControl* mmc;
void add_controllable (PBD::Controllable*);
void remove_controllable (PBD::Controllable*);
+
+ void handle_configuration_change (const char*);
};
} // namespace ARDOUR
#include <string>
#include <sigc++/signal.h>
-#include <boost/enable_shared_from_this.hpp>
#include <pbd/statefuldestructible.h>
namespace ARDOUR {
-class Source : public PBD::StatefulDestructible, public sigc::trackable, public boost::enable_shared_from_this<Source>
+class Source : public PBD::StatefulDestructible, public sigc::trackable
{
public:
Source (std::string name, DataType type);
#include <string>
#include <cmath>
-#ifdef HAVE_COREAUDIO
+#if defined(HAVE_COREAUDIO) || defined(HAVE_AUDIOUNITS)
#include <CoreFoundation/CoreFoundation.h>
#endif
std::string region_name_from_path (std::string path);
std::string path_expand (std::string);
-#ifdef HAVE_COREAUDIO
+void compute_equal_power_fades (jack_nframes_t nframes, float* in, float* out);
+
+#if defined(HAVE_COREAUDIO) || defined(HAVE_AUDIOUNITS)
std::string CFStringRefToStdString(CFStringRef stringRef);
#endif // HAVE_COREAUDIO
#endif /* __ardour_utils_h__ */
+
if (speed() != 1.0f || speed() != -1.0f) {
seek ((jack_nframes_t) (_session.transport_frame() * (double) speed()));
- }
- else {
+ } else {
seek (_session.transport_frame());
}
}
void
AudioDiskstream::use_destructive_playlist ()
{
- /* use the sources associated with the single full-extent region */
-
+ /* this is called from the XML-based constructor. when its done,
+ we already have a playlist and a region, but we need to
+ set up our sources for write. we use the sources associated
+ with the (presumed single, full-extent) region.
+ */
+
Playlist::RegionList* rl = _playlist->regions_at (0);
if (rl->empty()) {
file.rfind(".vwe") == string::npos &&
file.rfind(".paf") == string::npos &&
#ifdef HAVE_COREAUDIO
- file.rfind(".mp3") == string::npos &&
- file.rfind(".aac") == string::npos &&
- file.rfind(".mp4") == string::npos &&
+ file.rfind(".mp3") == string::npos &&
+ file.rfind(".aac") == string::npos &&
+ file.rfind(".mp4") == string::npos &&
#endif // HAVE_COREAUDIO
file.rfind(".voc") == string::npos);
}
0, &cinfo, &info_size);
}
}
+
}
+
+bool
+AudioEngine::get_sync_offset (jack_nframes_t& offset) const
+{
+ jack_position_t pos;
+
+ (void) jack_transport_query (_jack, &pos);
+
+ if (pos.valid & JackVideoFrameOffset) {
+ offset = pos.video_offset;
+ return true;
+ }
+
+ return false;
+}
+
void
AudioEngine::_jack_timebase_callback (jack_transport_state_t state, jack_nframes_t nframes,
-
- jack_position_t* pos, int new_position, void *arg)
+ jack_position_t* pos, int new_position, void *arg)
{
static_cast<AudioEngine*> (arg)->jack_timebase_callback (state, nframes, pos, new_position);
}
void
AudioEngine::jack_timebase_callback (jack_transport_state_t state, jack_nframes_t nframes,
-
- jack_position_t* pos, int new_position)
+ jack_position_t* pos, int new_position)
{
if (session && session->synced_to_jack()) {
session->jack_timebase_callback (state, nframes, pos, new_position);
AudioSource::AudioSource (const XMLNode& node)
: Source (node)
{
- cerr << "audiosource from XML\n";
if (pending_peak_sources_lock == 0) {
pending_peak_sources_lock = new Glib::Mutex;
}
: AudioFileSource (node)
{
init (_name);
-
- SourceCreated (this); /* EMIT SIGNAL */
}
CoreAudioSource::CoreAudioSource (const string& idstr, Flag flags)
: AudioFileSource(idstr, flags)
{
init (idstr);
-
- SourceCreated (this); /* EMIT SIGNAL */
}
void
#include <pbd/error.h>
#include <ardour/destructive_filesource.h>
+#include <ardour/utils.h>
#include "i18n.h"
out_coefficient = new gain_t[xfade_frames];
in_coefficient = new gain_t[xfade_frames];
- for (jack_nframes_t n = 0; n < xfade_frames; ++n) {
-
- /* XXXX THIS IS NOT THE RIGHT XFADE CURVE: USE A PROPER VOLUMETRIC EQUAL POWER CURVE */
-
- in_coefficient[n] = n/(gain_t) (xfade_frames-1); /* 0 .. 1 */
- out_coefficient[n] = 1.0 - in_coefficient[n]; /* 1 .. 0 */
- }
+ compute_equal_power_fades (xfade_frames, in_coefficient, out_coefficient);
}
void
}
if (file_cnt) {
- if ((retval = write_float (xfade_buf, fade_position, file_cnt)) != (ssize_t) file_cnt) {
+ if ((retval = read_unlocked (xfade_buf, fade_position, file_cnt)) != (ssize_t) file_cnt) {
if (retval >= 0 && errno == EAGAIN) {
/* XXX - can we really trust that errno is meaningful here? yes POSIX, i'm talking to you.
* short or no data there */
}
if (nofade && !fade_in) {
- if (write_float (data, file_pos, nofade) != nofade) {
+ if (write_float (data, file_pos - timeline_position, nofade) != nofade) {
error << string_compose(_("DestructiveFileSource: \"%1\" bad write (%2)"), _path, strerror (errno)) << endmsg;
return 0;
}
} else if (xfade) {
+ gain_t in[xfade];
+ gain_t out[xfade];
+
/* short xfade, compute custom curve */
- /* XXX COMPUTE THE CURVE, DAMMIT! */
+ compute_equal_power_fades (xfade, in, out);
for (jack_nframes_t n = 0; n < xfade; ++n) {
- xfade_buf[n] = (xfade_buf[n] * out_coefficient[n]) + (fade_data[n] * in_coefficient[n]);
+ xfade_buf[n] = (xfade_buf[n] * out[n]) + (fade_data[n] * in[n]);
}
}
if (xfade) {
- if (write_float (xfade_buf, fade_position, xfade) != xfade) {
+ if (write_float (xfade_buf, fade_position - timeline_position, xfade) != xfade) {
error << string_compose(_("DestructiveFileSource: \"%1\" bad write (%2)"), _path, strerror (errno)) << endmsg;
return 0;
}
}
if (fade_in && nofade) {
- if (write_float (data + xfade, file_pos + xfade, nofade) != nofade) {
+ if (write_float (data + xfade, file_pos + xfade - timeline_position, nofade) != nofade) {
error << string_compose(_("DestructiveFileSource: \"%1\" bad write (%2)"), _path, strerror (errno)) << endmsg;
return 0;
}
/* in the middle of recording */
- if (write_float (data, file_pos, cnt) != cnt) {
+ if (write_float (data, file_pos - timeline_position, cnt) != cnt) {
return 0;
}
}
sf_count_t input_count = 0;
SNDFILE* in = sf_open(infile.c_str(), SFM_READ, &sf_info);
+ if (!in) {
+ error << string_compose(_("Import/SRC: could not open input file: %1"), outfile) << endmsg;
+ return false;
+ }
sf_count_t total_input_frames = sf_info.frames;
outfile = build_tmp_convert_name(infile);
SNDFILE* out = sf_open(outfile.c_str(), SFM_RDWR, &sf_info);
- if(!out) {
- error << string_compose(_("Import: could not open temp file: %1"), outfile) << endmsg;
- return false;
- }
+ if (!out) {
+ error << string_compose(_("Import/SRC: could not open output file: %1"), outfile) << endmsg;
+ return false;
+ }
sf_seek (in, 0, SEEK_SET) ;
sf_seek (out, 0, SEEK_SET) ;
sf_close(in);
sf_close(out);
- status.done = true;
-
if (status.cancel) {
return false;
} else {
#include <ardour/vst_plugin.h>
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNITS
#include <ardour/audio_unit.h>
#endif
#ifdef VST_SUPPORT
boost::shared_ptr<VSTPlugin> vp;
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNITS
boost::shared_ptr<AUPlugin> ap;
#endif
} else if ((vp = boost::dynamic_pointer_cast<VSTPlugin> (other)) != 0) {
return boost::shared_ptr<Plugin> (new VSTPlugin (*vp));
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNITS
} else if ((ap = boost::dynamic_pointer_cast<AUPlugin> (other)) != 0) {
return boost::shared_ptr<Plugin> (new AUPlugin (*ap));
#endif
#ifdef VST_SUPPORT
boost::shared_ptr<VSTPlugin> vp;
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNITS
boost::shared_ptr<AUPlugin> ap;
#endif
} else if ((vp = boost::dynamic_pointer_cast<VSTPlugin> (other)) != 0) {
return ARDOUR::VST;
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNITS
} else if ((ap = boost::dynamic_pointer_cast<AUPlugin> (other)) != 0) {
return ARDOUR::AudioUnit;
#endif
#include <pbd/xml++.h>
#include <ardour/location.h>
+#include <ardour/session.h>
#include <ardour/audiofilesource.h>
#include "i18n.h"
_end = s;
start_changed(this); /* EMIT SIGNAL */
if ( is_start() ) {
+ Session::StartTimeChanged (); /* EMIT SIGNAL */
AudioFileSource::set_header_position_offset ( s );
}
+ if ( is_end() ) {
+ Session::EndTimeChanged (); /* EMIT SIGNAL */
+ }
}
return 0;
}
break;
#endif
-#ifdef HAVE_COREAUDIO
+#ifdef HAVE_AUDIOUNITS
case ARDOUR::AudioUnit:
plugs = AUPluginInfo::discover ();
unique_id = 0; // Neither do AU.
return PluginPtr ((Plugin*) 0);
}
+
#include <ardour/playlist.h>
#include <ardour/session.h>
#include <ardour/source.h>
+#include <ardour/region_factory.h>
#include "i18n.h"
_first_edit = EditChangesNothing;
send_change (NameChanged);
- /// XXX CheckNewRegion (boost::shared_ptr<Region>(this));
+ RegionFactory::CheckNewRegion (shared_from_this());
}
}
Session::mix_buffers_no_gain_t Session::mix_buffers_no_gain = 0;
sigc::signal<int> Session::AskAboutPendingState;
-sigc::signal<void> Session::SMPTEOffsetChanged;
sigc::signal<void> Session::SendFeedback;
+sigc::signal<void> Session::SMPTEOffsetChanged;
+sigc::signal<void> Session::SMPTETypeChanged;
+sigc::signal<void> Session::PullupChanged;
+sigc::signal<void> Session::StartTimeChanged;
+sigc::signal<void> Session::EndTimeChanged;
int
Session::find_session (string str, string& path, string& snapshot, bool& isnew)
_state_of_the_state = StateOfTheState (_state_of_the_state & ~Dirty);
+ Config->ParameterChanged.connect (mem_fun (*this, &Session::handle_configuration_change));
+
if (was_dirty) {
DirtyChanged (); /* EMIT SIGNAL */
}
here.
*/
- _current_frame_rate = frames_per_second;
- _frames_per_smpte_frame = (double) _current_frame_rate / (double) smpte_frames_per_second;
+ _base_frame_rate = frames_per_second;
+
+ sync_time_vars();
Route::set_automation_interval ((jack_nframes_t) ceil ((double) frames_per_second * 0.25));
void
Session::add_source (boost::shared_ptr<Source> source)
{
- cerr << "add new source " << source->name() << endl;
-
pair<SourceMap::key_type, SourceMap::mapped_type> entry;
pair<SourceMap::iterator,bool> result;
}
}
+void
+Session::handle_configuration_change (const char* parameter)
+{
+ if (!strcmp (parameter, "use-video-sync")) {
+ if (_transport_speed == 0.0f) {
+ waiting_for_sync_offset = true;
+ }
+ }
+}
+
void
Session::add_curve(Curve *curve)
{
bool session_needs_butler = false;
jack_nframes_t stop_limit;
long frames_moved;
-
+ jack_nframes_t offset = 0;
+
{
if (post_transport_work & (PostTransportLocate|PostTransportStop)) {
no_roll (nframes, 0);
return;
}
- click (_transport_frame, nframes, 0);
+ if (maybe_sync_start (nframes, offset)) {
+ return;
+ }
+
+ click (_transport_frame, nframes, offset);
prepare_diskstreams ();
frames_moved = (long) floor (_transport_speed * nframes);
- if (process_routes (nframes, 0)) {
- no_roll (nframes, 0);
- return;
- }
+ if (process_routes (nframes, offset)) {
+ no_roll (nframes, offset);
+ return;
+ }
commit_diskstreams (nframes, session_needs_butler);
}
}
+bool
+Session::maybe_sync_start (jack_nframes_t& nframes, jack_nframes_t& offset)
+{
+ jack_nframes_t sync_offset;
+
+ if (!waiting_for_sync_offset) {
+ return false;
+ }
+
+ if (_engine.get_sync_offset (sync_offset) && sync_offset < nframes) {
+
+ no_roll (sync_offset, 0);
+ nframes -= sync_offset;
+ offset += sync_offset;
+ waiting_for_sync_offset = false;
+
+ if (nframes == 0) {
+ return true; // done
+ }
+
+ } else {
+ no_roll (nframes, 0);
+ return true; // done
+ }
+
+ return false;
+}
+
process_function = &Session::process_with_events;
+ if (Config->get_use_video_sync()) {
+ waiting_for_sync_offset = true;
+ } else {
+ waiting_for_sync_offset = false;
+ }
+
+ _current_frame_rate = 48000;
+ _base_frame_rate = 48000;
+
+ smpte_frames_per_second = 30;
+ video_pullup = 0.0;
+ smpte_drop_frames = false;
last_smpte_when = 0;
_smpte_offset = 0;
_smpte_offset_negative = true;
/* default SMPTE type is 30 FPS, non-drop */
set_smpte_type (30.0, false);
+ set_video_pullup (0.0);
_engine.GraphReordered.connect (mem_fun (*this, &Session::graph_reordered));
rf_speed = atof (prop->value().c_str());
}
}
+ if ((child = find_named_node (node, "video-pullup")) != 0) {
+ if ((prop = child->property ("val")) != 0) {
+ set_video_pullup( atof (prop->value().c_str()) );
+ }
+ }
if ((child = find_named_node (node, "smpte-frames-per-second")) != 0) {
if ((prop = child->property ("val")) != 0) {
set_smpte_type( atof (prop->value().c_str()), smpte_drop_frames );
child = opthead->add_child ("rf-speed");
child->add_property ("val", buf);
+ snprintf (buf, sizeof(buf)-1, "%.4f", video_pullup);
+ child = opthead->add_child ("video-pullup");
+ child->add_property ("val", buf);
+
snprintf (buf, sizeof(buf)-1, "%.2f", smpte_frames_per_second);
child = opthead->add_child ("smpte-frames-per-second");
child->add_property ("val", buf);
/* SMPTE TIME */
+void
+Session::sync_time_vars ()
+{
+ _current_frame_rate = _base_frame_rate * (1.0 + (video_pullup/100.0) );
+ _frames_per_hour = _current_frame_rate * 3600;
+ _frames_per_smpte_frame = (double) _current_frame_rate / (double) smpte_frames_per_second;
+ _smpte_frames_per_hour = (unsigned long) (smpte_frames_per_second * 3600.0);
+}
+
int
Session::set_smpte_type (float fps, bool drop_frames)
{
smpte_frames_per_second = fps;
smpte_drop_frames = drop_frames;
- _frames_per_smpte_frame = (double) _current_frame_rate / (double) smpte_frames_per_second;
- _frames_per_hour = _current_frame_rate * 3600;
- _smpte_frames_per_hour = (unsigned long) (smpte_frames_per_second * 3600.0);
-
last_smpte_valid = false;
// smpte type bits are the middle two in the upper nibble
break;
};
+ sync_time_vars();
+
SMPTETypeChanged (); /* EMIT SIGNAL */
set_dirty();
return 0;
}
+int
+Session::set_video_pullup (float pull)
+{
+ video_pullup = pull;
+
+ sync_time_vars();
+
+ PullupChanged (); /* EMIT SIGNAL */
+
+ set_dirty();
+
+ return 0;
+}
+
void
Session::set_smpte_offset (jack_nframes_t off)
{
pos->valid = jack_position_bits_t (pos->valid | JackPositionBBT);
}
+ //poke audio video ratio so Ardour can track Video Sync
+ pos->audio_frames_per_video_frame = frame_rate() / smpte_frames_per_second;
+ pos->valid = jack_position_bits_t (pos->valid | JackAudioVideoRatio);
+
#if 0
/* SMPTE info */
_transport_speed = 0;
+ if (Config->get_use_video_sync()) {
+ waiting_for_sync_offset = true;
+ }
+
transport_sub_state = (auto_return ? AutoReturning : 0);
}
uint32_t real_cnt;
jack_nframes_t file_cnt;
+ //destructive (tape) tracks need to offset reads and writes by the timeline position
+ if (_flags && ARDOUR::Destructive == ARDOUR::Destructive) {
+ start -= timeline_position;
+ }
+
if (start > _length) {
/* read starts beyond end of data, just memset to zero */
#include <ardour/destructive_filesource.h>
#include <ardour/configuration.h>
+#ifdef HAVE_COREAUDIO
+#include <ardour/coreaudiosource.h>
+#endif
+
#include "i18n.h"
using namespace ARDOUR;
sigc::signal<void,boost::shared_ptr<Source> > SourceFactory::SourceCreated;
#ifdef HAVE_COREAUDIO
-
-
boost::shared_ptr<Source>
SourceFactory::create (const XMLNode& node)
{
#endif
}
-#ifdef HAVE_COREAUDIO
+#if defined(HAVE_COREAUDIO) || defined(HAVE_AUDIOUNITS)
string
CFStringRefToStdString(CFStringRef stringRef)
{
return result;
}
#endif // HAVE_COREAUDIO
+
+void
+compute_equal_power_fades (jack_nframes_t nframes, float* in, float* out)
+{
+ double step;
+
+ step = 1.0/nframes;
+
+ in[0] = 0.0f;
+
+ for (int i = 1; i < nframes - 1; ++i) {
+ in[i] = in[i-1] + step;
+ }
+
+ in[nframes-1] = 1.0;
+
+ const float pan_law_attenuation = -3.0f;
+ const float scale = 2.0f - 4.0f * powf (10.0f,pan_law_attenuation/20.0f);
+
+ for (unsigned long n = 0; n < nframes; ++n) {
+ float inVal = in[n];
+ float outVal = 1 - inVal;
+ out[n] = outVal * (scale * outVal + 1.0f - scale);
+ in[n] = inVal * (scale * inVal + 1.0f - scale);
+ }
+}