--------------------------------------------------------------------------------
+0.16.2 --- 2020 . 02 . 18
+- Switch to Json for modern C++ library for reading and writing Json data
+- Resizable channels, improved version
+- Drop support for raw patches (still readable for backward compatibility)
+- Simplify global configuration parameters
+- Simplify column data storage in patch files
+- Center all micro-subwindows to screen
+- Revamped MIDI learning algorithm and related UI components
+- Always display 'R' button in Sample Channel
+- Don't download external files for unit tests
+- Optimized UI drawings for base buttons
+- Move build info from 'About' window to console log
+- Update RtAudio to 5.1.0
+- Fix crash during audio recording after opening a project (thanks AdTb!)
+
+
0.16.1 --- 2020 . 01 . 08
- FreeBSD support
- Ability to remove empty columns manually
src/core/channels/midiChannelProc.cpp \
src/core/model/model.h \
src/core/model/model.cpp \
+ src/core/model/storage.h \
+ src/core/model/storage.cpp \
src/core/idManager.h \
src/core/idManager.cpp \
src/glue/main.h \
src/gui/dispatcher.cpp \
src/gui/updater.h \
src/gui/updater.cpp \
+ src/gui/model.h \
+ src/gui/model.cpp \
src/gui/dialogs/keyGrabber.h \
src/gui/dialogs/keyGrabber.cpp \
src/gui/dialogs/about.h \
src/gui/dialogs/midiIO/midiInputChannel.cpp \
src/gui/dialogs/midiIO/midiInputMaster.h \
src/gui/dialogs/midiIO/midiInputMaster.cpp \
- src/gui/elems/midiLearner.h \
- src/gui/elems/midiLearner.cpp \
- src/gui/elems/browser.h \
- src/gui/elems/browser.cpp \
- src/gui/elems/soundMeter.h \
- src/gui/elems/soundMeter.cpp \
+ src/gui/elems/browser.h \
+ src/gui/elems/browser.cpp \
+ src/gui/elems/soundMeter.h \
+ src/gui/elems/soundMeter.cpp \
src/gui/elems/plugin/pluginBrowser.h \
src/gui/elems/plugin/pluginBrowser.cpp \
src/gui/elems/plugin/pluginParameter.h \
src/gui/elems/config/tabBehaviors.cpp \
src/gui/elems/config/tabPlugins.h \
src/gui/elems/config/tabPlugins.cpp \
+ src/gui/elems/midiIO/midiLearnerBase.h \
+ src/gui/elems/midiIO/midiLearnerBase.cpp \
+ src/gui/elems/midiIO/midiLearnerMaster.h \
+ src/gui/elems/midiIO/midiLearnerMaster.cpp \
+ src/gui/elems/midiIO/midiLearnerChannel.h \
+ src/gui/elems/midiIO/midiLearnerChannel.cpp \
+ src/gui/elems/midiIO/midiLearnerPlugin.h \
+ src/gui/elems/midiIO/midiLearnerPlugin.cpp \
src/gui/elems/basics/scroll.h \
src/gui/elems/basics/scroll.cpp \
src/gui/elems/basics/boxtypes.h \
src/utils/vector.h \
src/utils/ver.h \
src/utils/ver.cpp \
- src/utils/json.h \
- src/utils/json.cpp \
src/utils/string.h \
src/utils/string.cpp \
- src/deps/rtaudio-mod/RtAudio.h \
- src/deps/rtaudio-mod/RtAudio.cpp
+ src/deps/rtaudio/RtAudio.h \
+ src/deps/rtaudio/RtAudio.cpp
sourcesTests = \
tests/main.cpp \
tests/rcuList.cpp \
- tests/conf.cpp \
tests/wave.cpp \
tests/waveManager.cpp \
- tests/patch.cpp \
- tests/midiMapConf.cpp \
- tests/pluginHost.cpp \
tests/utils.cpp \
tests/recorder.cpp \
tests/waveFx.cpp \
tests/audioBuffer.cpp \
- tests/sampleChannel.cpp \
- tests/sampleChannelProc.cpp \
- tests/sampleChannelRec.cpp
+ tests/sampleChannel.cpp
if WITH_VST
src/deps/juce/modules/juce_gui_extra/juce_gui_extra.cpp
cppFlags += \
- -I$(top_srcdir)/src/deps/juce/modules \
- -I$(top_srcdir)/src/deps/vst \
- -I/usr/include \
- -I/usr/include/freetype2 \
- -DJUCE_GLOBAL_MODULE_SETTINGS_INCLUDED=1 \
- -DJUCE_STANDALONE_APPLICATION=1 \
- -DJUCE_PLUGINHOST_VST=1 \
- -DJUCE_PLUGINHOST_VST3=0 \
- -DJUCE_PLUGINHOST_AU=0 \
- -DJUCE_WEB_BROWSER=0
+ -I$(top_srcdir)/src/deps/juce/modules \
+ -I$(top_srcdir)/src/deps/vst \
+ -I/usr/include \
+ -I/usr/include/freetype2 \
+ -DJUCE_GLOBAL_MODULE_SETTINGS_INCLUDED=1 \
+ -DJUCE_STANDALONE_APPLICATION=1 \
+ -DJUCE_PLUGINHOST_VST=1 \
+ -DJUCE_PLUGINHOST_VST3=0 \
+ -DJUCE_PLUGINHOST_AU=0 \
+ -DJUCE_WEB_BROWSER=0
endif
if WINDOWS
sourcesExtra += \
- src/deps/rtaudio-mod/include/asio.h \
- src/deps/rtaudio-mod/include/asio.cpp \
- src/deps/rtaudio-mod/include/asiolist.h \
- src/deps/rtaudio-mod/include/asiolist.cpp \
- src/deps/rtaudio-mod/include/asiodrivers.h \
- src/deps/rtaudio-mod/include/asiodrivers.cpp \
- src/deps/rtaudio-mod/include/iasiothiscallresolver.h \
- src/deps/rtaudio-mod/include/iasiothiscallresolver.cpp \
+ src/deps/rtaudio/include/asio.h \
+ src/deps/rtaudio/include/asio.cpp \
+ src/deps/rtaudio/include/asiolist.h \
+ src/deps/rtaudio/include/asiolist.cpp \
+ src/deps/rtaudio/include/asiodrivers.h \
+ src/deps/rtaudio/include/asiodrivers.cpp \
+ src/deps/rtaudio/include/iasiothiscallresolver.h \
+ src/deps/rtaudio/include/iasiothiscallresolver.cpp \
resource.rc
cppFlags += \
- -I$(top_srcdir)/src/deps/rtaudio-mod/include \
+ -I$(top_srcdir)/src/deps/rtaudio/include \
-D__WINDOWS_ASIO__ \
-D__WINDOWS_WASAPI__ \
-D__WINDOWS_DS__
ldAdd += -ldsound -lwsock32 -lm -lfltk -lwininet -lgdi32 -lshell32 -lvfw32 \
- -lrpcrt4 -luuid -lcomctl32 -lole32 -lws2_32 -lsndfile -lsamplerate -lrtmidi \
- -lwinmm -lsetupapi -lksuser -ljansson -limm32 -lglu32 -lshell32 -lversion \
- -lopengl32 -loleaut32 -lshlwapi -lcomdlg32 -lflac -lvorbis -logg -lvorbisenc
+ -lrpcrt4 -luuid -lcomctl32 -lws2_32 -lsndfile -lsamplerate -lrtmidi \
+ -lsetupapi -limm32 -lglu32 -lshell32 -lversion \
+ -lopengl32 -loleaut32 -lshlwapi -lcomdlg32 -lflac -lvorbis -logg -lvorbisenc \
+ -lole32 -lwinmm -lksuser -lmfplat -lmfuuid -lwmcodecdspuuid
# Generate a GUI application (-mwindows), make the build static (-static).
ldFlags += -mwindows -static
cppFlags += -D__LINUX_ALSA__ -D__LINUX_PULSE__ -D__UNIX_JACK__
ldAdd += -lsndfile -lfltk -lXext -lX11 -lXft -lXpm -lm -ljack -lasound \
- -lpthread -ldl -lpulse-simple -lpulse -lsamplerate -lrtmidi -ljansson \
- -lfreetype -lfontconfig -lXrender -lXfixes -lXcursor -lXinerama
+ -lpthread -ldl -lpulse-simple -lpulse -lsamplerate -lrtmidi \
+ -lfreetype -lfontconfig -lXrender -lXfixes -lXcursor -lXinerama
endif
cppFlags += -D__LINUX_PULSE__ -D__UNIX_JACK__
ldAdd += -lsndfile -lfltk -lXext -lX11 -lXft -lXpm -lm -ljack -lasound \
- -lpthread -lpulse-simple -lpulse -lsamplerate -lrtmidi -ljansson \
- -lfreetype
+ -lpthread -lpulse-simple -lpulse -lsamplerate -lrtmidi \
+ -lfreetype
endif
# -ObjC++: Juce requires to build some Objective C code
cxxFlags += -ObjC++
-ldAdd += -lsndfile -lfltk -lrtmidi -lsamplerate -ljansson -lm -lpthread \
- -lFLAC -logg -lvorbis -lvorbisenc
+ldAdd += -lsndfile -lfltk -lrtmidi -lsamplerate -lm -lpthread \
+ -lFLAC -logg -lvorbis -lvorbisenc
ldFlags += -framework CoreAudio -framework Cocoa -framework Carbon \
- -framework CoreMIDI -framework CoreFoundation -framework Accelerate \
- -framework WebKit -framework QuartzCore -framework IOKit
+ -framework CoreMIDI -framework CoreFoundation -framework Accelerate \
+ -framework WebKit -framework QuartzCore -framework IOKit
endif
## Copyright
-Giada is Copyright (C) 2010-2019 by Giovanni A. Zuliani | Monocasual
+Giada is Copyright (C) 2010-2020 by Giovanni A. Zuliani | Monocasual Laboratories
Giada - Your Hardcore Loopmachine is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
;;
esac
AM_CONDITIONAL(LINUX, test "x$os" = "xlinux")
-AM_CONDITIONAL(FREEBSD, test "x$os" = "xfreebsd")
AM_CONDITIONAL(WINDOWS, test "x$os" = "xwindows")
AM_CONDITIONAL(OSX, test "x$os" = "xosx")
AM_CONDITIONAL(FREEBSD, test "x$os" = "xfreebsd")
# ------------------------------------------------------------------------------
-# test if files needed for Travis CI are present. If so, define a new macro
-# RUN_TESTS_WITH_LOCAL_FILES used during the test suite
-
-if test -f "giada-midimaps-master.zip" && test -f "dexed.tar.xz" ; then
- AC_DEFINE(RUN_TESTS_WITH_LOCAL_FILES)
-fi
-
-# ------------------------------------------------------------------------------
-
# Check for C++ compiler
AC_PROG_CXX
AC_PROG_OBJCXX
-# Check for C compiler (TODO - is that really needed?)
-
-AC_PROG_CC
-
# Check for make
AC_PROG_MAKE_SET
AC_LANG_POP
fi
-
-AC_LANG_PUSH([C++])
-AC_CHECK_HEADER(
- [jansson.h],
- [],
- [AC_MSG_ERROR([library 'Jansson' not found!])]
-)
-AC_LANG_POP
-
AC_LANG_PUSH([C++])
AC_CHECK_HEADER(
[sndfile.h],
)
AC_LANG_POP
-#~ AC_LANG_PUSH([C++])
-#~ AC_CHECK_HEADER(
- #~ [RtAudio.h],
- #~ [],
- #~ [AC_MSG_ERROR([library 'RtAudio' not found!])]
-#~ )
-#~ AC_LANG_POP
-
AC_LANG_PUSH([C++])
AC_CHECK_HEADER(
[samplerate.h],
AC_LANG_POP
-
# ------------------------------------------------------------------------------
# Check for linux header files.
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
recStatus (ChannelStatus::OFF),
columnId (columnId),
id (id),
+ height (G_GUI_UNIT),
previewMode (PreviewMode::NONE),
pan (0.5f),
volume (G_DEFAULT_VOL),
recStatus (o.recStatus),
columnId (o.columnId),
id (o.id),
+ height (o.height),
previewMode (o.previewMode),
pan (o.pan),
volume (o.volume),
key (o.key),
mute (o.mute),
solo (o.solo),
- volume_i (o.volume_i.load()),
+ volume_i (o.volume_i),
volume_d (o.volume_d),
hasActions (o.hasActions),
readActions (o.readActions),
- midiIn (o.midiIn.load()),
- midiInKeyPress (o.midiInKeyPress.load()),
- midiInKeyRel (o.midiInKeyRel.load()),
- midiInKill (o.midiInKill.load()),
- midiInArm (o.midiInArm.load()),
- midiInVolume (o.midiInVolume.load()),
- midiInMute (o.midiInMute.load()),
- midiInSolo (o.midiInSolo.load()),
- midiInFilter (o.midiInFilter.load()),
- midiOutL (o.midiOutL.load()),
- midiOutLplaying(o.midiOutLplaying.load()),
- midiOutLmute (o.midiOutLmute.load()),
- midiOutLsolo (o.midiOutLsolo.load())
+ midiIn (o.midiIn),
+ midiInKeyPress (o.midiInKeyPress),
+ midiInKeyRel (o.midiInKeyRel),
+ midiInKill (o.midiInKill),
+ midiInArm (o.midiInArm),
+ midiInVolume (o.midiInVolume),
+ midiInMute (o.midiInMute),
+ midiInSolo (o.midiInSolo),
+ midiInFilter (o.midiInFilter),
+ midiOutL (o.midiOutL),
+ midiOutLplaying(o.midiOutLplaying),
+ midiOutLmute (o.midiOutLmute),
+ midiOutLsolo (o.midiOutLsolo)
#ifdef WITH_VST
,pluginIds (o.pluginIds)
#endif
Channel::Channel(const patch::Channel& p, int bufferSize)
: type (p.type),
- playStatus (p.waveId == 0 ? ChannelStatus::EMPTY : ChannelStatus::OFF),
+ playStatus (p.waveId == 0 && type == ChannelType::SAMPLE ? ChannelStatus::EMPTY : ChannelStatus::OFF),
recStatus (ChannelStatus::OFF),
columnId (p.columnId),
id (p.id),
+ height (p.height),
previewMode (PreviewMode::NONE),
pan (p.pan),
volume (p.volume),
,pluginIds (p.pluginIds)
#endif
{
- buffer.alloc(bufferSize, G_MAX_IO_CHANS);
+ buffer.alloc(bufferSize, G_MAX_IO_CHANS);
}
if (!midiOutL || midiOutLmute == 0x0)
return;
if (mute)
- kernelMidi::sendMidiLightning(midiOutLmute, midimap::muteOn);
+ kernelMidi::sendMidiLightning(midiOutLmute, midimap::midimap.muteOn);
else
- kernelMidi::sendMidiLightning(midiOutLmute, midimap::muteOff);
+ kernelMidi::sendMidiLightning(midiOutLmute, midimap::midimap.muteOff);
}
if (!midiOutL || midiOutLsolo == 0x0)
return;
if (solo)
- kernelMidi::sendMidiLightning(midiOutLsolo, midimap::soloOn);
+ kernelMidi::sendMidiLightning(midiOutLsolo, midimap::midimap.soloOn);
else
- kernelMidi::sendMidiLightning(midiOutLsolo, midimap::soloOff);
+ kernelMidi::sendMidiLightning(midiOutLsolo, midimap::midimap.soloOff);
}
return;
switch (playStatus) {
case ChannelStatus::OFF:
- kernelMidi::sendMidiLightning(midiOutLplaying, midimap::stopped);
+ kernelMidi::sendMidiLightning(midiOutLplaying, midimap::midimap.stopped);
break;
case ChannelStatus::WAIT:
- kernelMidi::sendMidiLightning(midiOutLplaying, midimap::waiting);
+ kernelMidi::sendMidiLightning(midiOutLplaying, midimap::midimap.waiting);
break;
case ChannelStatus::ENDING:
- kernelMidi::sendMidiLightning(midiOutLplaying, midimap::stopping);
+ kernelMidi::sendMidiLightning(midiOutLplaying, midimap::midimap.stopping);
break;
case ChannelStatus::PLAY:
if ((mixer::isChannelAudible(this) && !mute) ||
- !midimap::isDefined(midimap::playingInaudible))
- kernelMidi::sendMidiLightning(midiOutLplaying, midimap::playing);
+ !midimap::isDefined(midimap::midimap.playingInaudible))
+ kernelMidi::sendMidiLightning(midiOutLplaying, midimap::midimap.playing);
else
- kernelMidi::sendMidiLightning(midiOutLplaying, midimap::playingInaudible);
+ kernelMidi::sendMidiLightning(midiOutLplaying, midimap::midimap.playingInaudible);
break;
default:
break;
/* -------------------------------------------------------------------------- */
+bool Channel::isInternal() const
+{
+ return id == mixer::MASTER_OUT_CHANNEL_ID ||
+ id == mixer::MASTER_IN_CHANNEL_ID ||
+ id == mixer::PREVIEW_CHANNEL_ID;
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
bool Channel::isReadingActions() const
{
return hasActions && readActions;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
bool isPlaying() const;
float getPan() const;
bool isPreview() const;
+ bool isInternal() const;
/* isMidiInAllowed
Given a MIDI channel 'c' tells whether this channel should be allowed to
ID columnId;
ID id;
+ int height;
+
/* previewMode
Whether the channel is in audio preview mode or not. */
the delta during volume changes (or the line slope between two volume
points). */
- std::atomic<double> volume_i;
+ double volume_i;
double volume_d;
bool hasActions; // If has some actions recorded
bool readActions; // If should read recorded actions
- std::atomic<bool> midiIn; // enable midi input
- std::atomic<uint32_t> midiInKeyPress;
- std::atomic<uint32_t> midiInKeyRel;
- std::atomic<uint32_t> midiInKill;
- std::atomic<uint32_t> midiInArm;
- std::atomic<uint32_t> midiInVolume;
- std::atomic<uint32_t> midiInMute;
- std::atomic<uint32_t> midiInSolo;
+ bool midiIn; // enable midi input
+ uint32_t midiInKeyPress;
+ uint32_t midiInKeyRel;
+ uint32_t midiInKill;
+ uint32_t midiInArm;
+ uint32_t midiInVolume;
+ uint32_t midiInMute;
+ uint32_t midiInSolo;
/* midiInFilter
Which MIDI channel should be filtered out when receiving MIDI messages. -1
means 'all'. */
- std::atomic<int> midiInFilter;
+ int midiInFilter;
/* midiOutL*
Enables MIDI lightning output, plus a set of midi lighting event to be sent
to a device. Those events basically contains the MIDI channel, everything
else gets stripped out. */
- std::atomic<bool> midiOutL;
- std::atomic<uint32_t> midiOutLplaying;
- std::atomic<uint32_t> midiOutLmute;
- std::atomic<uint32_t> midiOutLsolo;
+ bool midiOutL;
+ uint32_t midiOutLplaying;
+ uint32_t midiOutLmute;
+ uint32_t midiOutLsolo;
#ifdef WITH_VST
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* -------------------------------------------------------------------------- */
-std::unique_ptr<Channel> create(const patch::Channel& pch, int bufferSize)
+std::unique_ptr<Channel> deserializeChannel(const patch::Channel& pch, int bufferSize)
{
std::unique_ptr<Channel> ch = nullptr;
return ch;
}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+const patch::Channel serializeChannel(const Channel& c)
+{
+ patch::Channel pc;
+
+ pc.id = c.id;
+ pc.type = c.type;
+
+#ifdef WITH_VST
+ for (ID pid : c.pluginIds)
+ pc.pluginIds.push_back(pid);
+#endif
+
+ if (c.type != ChannelType::MASTER) {
+ pc.height = c.height;
+ pc.name = c.name.c_str();
+ pc.columnId = c.columnId;
+ pc.key = c.key;
+ pc.mute = c.mute;
+ pc.solo = c.solo;
+ pc.volume = c.volume;
+ pc.pan = c.pan;
+ pc.hasActions = c.hasActions;
+ pc.armed = c.armed;
+ pc.midiIn = c.midiIn;
+ pc.midiInKeyPress = c.midiInKeyRel;
+ pc.midiInKeyRel = c.midiInKeyPress;
+ pc.midiInKill = c.midiInKill;
+ pc.midiInArm = c.midiInArm;
+ pc.midiInVolume = c.midiInVolume;
+ pc.midiInMute = c.midiInMute;
+ pc.midiInSolo = c.midiInSolo;
+ pc.midiInFilter = c.midiInFilter;
+ pc.midiOutL = c.midiOutL;
+ pc.midiOutLplaying = c.midiOutLplaying;
+ pc.midiOutLmute = c.midiOutLmute;
+ pc.midiOutLsolo = c.midiOutLsolo;
+ }
+
+ if (c.type == ChannelType::SAMPLE) {
+ const SampleChannel& sc = static_cast<const SampleChannel&>(c);
+ pc.waveId = sc.waveId;
+ pc.mode = sc.mode;
+ pc.begin = sc.begin;
+ pc.end = sc.end;
+ pc.readActions = sc.readActions;
+ pc.pitch = sc.pitch;
+ pc.inputMonitor = sc.inputMonitor;
+ pc.midiInVeloAsVol = sc.midiInVeloAsVol;
+ pc.midiInReadActions = sc.midiInReadActions;
+ pc.midiInPitch = sc.midiInPitch;
+ }
+ else
+ if (c.type == ChannelType::MIDI) {
+ const MidiChannel& mc = static_cast<const MidiChannel&>(c);
+ pc.midiOut = mc.midiOut;
+ pc.midiOutChan = mc.midiOutChan;
+ }
+
+ return pc;
+}
}}}; // giada::m::channelManager
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
std::unique_ptr<Channel> create(const Channel& ch);
-/* create (3)
-Creates a new Channel out of a patch::Channel. */
+/* (de)serializeWave
+Creates a new Channel given the patch raw data and vice versa. */
-std::unique_ptr<Channel> create(const patch::Channel& c, int bufferSize);
+std::unique_ptr<Channel> deserializeChannel(const patch::Channel& c, int bufferSize);
+const patch::Channel serializeChannel(const Channel& c);
}}}; // giada::m::channelManager
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
quantizing (o.quantizing),
inputMonitor (o.inputMonitor),
pitch (o.pitch),
- tracker (o.tracker.load()),
+ tracker (o.tracker),
trackerPreview (0),
begin (o.begin),
end (o.end),
midiInVeloAsVol (o.midiInVeloAsVol),
- midiInReadActions(o.midiInReadActions.load()),
- midiInPitch (o.midiInPitch.load()),
+ midiInReadActions(o.midiInReadActions),
+ midiInPitch (o.midiInPitch),
bufferOffset (o.bufferOffset),
rewinding (o.rewinding),
rsmp_state (src_new(SRC_LINEAR, G_MAX_IO_CHANS, nullptr))
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
bool inputMonitor;
float pitch;
- std::atomic<Frame> tracker; // chan position
- std::atomic<Frame> trackerPreview; // chan position for audio preview
+ Frame tracker; // chan position
+ Frame trackerPreview; // chan position for audio preview
/* begin, end
Begin/end point to read wave data from/to. */
/* midiIn*
MIDI input parameters. */
- bool midiInVeloAsVol;
- std::atomic<uint32_t> midiInReadActions;
- std::atomic<uint32_t> midiInPitch;
+ bool midiInVeloAsVol;
+ uint32_t midiInReadActions;
+ uint32_t midiInPitch;
/* bufferOffset
Offset used while filling the internal buffer with audio data. Value is
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* For one-shot modes, velocity drives the internal volume. */
if (velocity != 0) {
if (ch->isAnySingleMode() && ch->midiInVeloAsVol)
- ch->volume_i.store(u::math::map<int, float>(velocity, 0, G_MAX_VELOCITY, 0.0, 1.0));
+ ch->volume_i = u::math::map<int, float>(velocity, 0, G_MAX_VELOCITY, 0.0, 1.0);
}
switch (ch->playStatus) {
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
return;
quantize_(ch, fe.quantoPassed);
if (fe.onFirstBeat)
- onFirstBeat_(ch, conf::recsStopOnChanHalt);
+ onFirstBeat_(ch, conf::conf.recsStopOnChanHalt);
if (ch->readActions && fe.actions != nullptr)
for (const Action& action : *fe.actions)
if (action.channelId == ch->id)
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* -------------------------------------------------------------------------- */
-/* updateFrameBars
-Updates bpm, frames, beats and so on. */
+/* recomputeFrames_
+Updates bpm, frames, beats and so on. Private version. */
-void updateFrameBars_(model::Clock& c)
+void recomputeFrames_(model::Clock& c)
{
- c.framesInLoop = (conf::samplerate * (60.0f / c.bpm)) * c.beats;
+ c.framesInLoop = (conf::conf.samplerate * (60.0f / c.bpm)) * c.beats;
c.framesInBar = c.framesInLoop / (float) c.bars;
c.framesInBeat = c.framesInLoop / (float) c.beats;
c.framesInSeq = c.framesInBeat * G_MAX_BEATS;
c.beats = G_DEFAULT_BEATS;
c.bpm = G_DEFAULT_BPM;
c.quantize = G_DEFAULT_QUANTIZE;
- updateFrameBars_(c);
+ recomputeFrames_(c);
});
}
/* -------------------------------------------------------------------------- */
+void recomputeFrames()
+{
+ model::onSwap(model::clock, [&](model::Clock& c) { recomputeFrames_(c); });
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
bool isRunning()
{
model::ClockLock lock(model::clock);
model::onSwap(model::clock, [&](model::Clock& c)
{
c.bpm = b;
- updateFrameBars_(c);
+ recomputeFrames_(c);
});
}
{
c.beats = newBeats;
c.bars = newBars;
- updateFrameBars_(c);
+ recomputeFrames_(c);
});
}
model::onSwap(model::clock, [&](model::Clock& c)
{
c.quantize = q;
- updateFrameBars_(c);
+ recomputeFrames_(c);
});
}
});
if (s == ClockStatus::RUNNING) {
- if (conf::midiSync == MIDI_SYNC_CLOCK_M) {
+ if (conf::conf.midiSync == MIDI_SYNC_CLOCK_M) {
kernelMidi::send(MIDI_START, -1, -1);
kernelMidi::send(MIDI_POSITION_PTR, 0, 0);
}
}
else
if (s == ClockStatus::STOPPED) {
- if (conf::midiSync == MIDI_SYNC_CLOCK_M)
+ if (conf::conf.midiSync == MIDI_SYNC_CLOCK_M)
kernelMidi::send(MIDI_STOP, -1, -1);
}
}
/* TODO - only Master (_M) is implemented so far. */
- if (conf::midiSync == MIDI_SYNC_CLOCK_M) {
+ if (conf::conf.midiSync == MIDI_SYNC_CLOCK_M) {
if (currentFrame % (c->framesInBeat / 24) == 0)
kernelMidi::send(MIDI_CLOCK, -1, -1);
return;
}
- if (conf::midiSync == MIDI_SYNC_MTC_M) {
+ if (conf::conf.midiSync == MIDI_SYNC_MTC_M) {
/* check if a new timecode frame has passed. If so, send MIDI TC
* quarter frames. 8 quarter frames, divided in two branches:
/* check if total timecode frames are greater than timecode fps:
* if so, a second has passed */
- if (midiTCframes_ > conf::midiTCfps) {
+ if (midiTCframes_ > conf::conf.midiTCfps) {
midiTCframes_ = 0;
midiTCseconds_++;
if (midiTCseconds_ >= 60) {
* be sent. The Full Frame is a SysEx message that encodes the entire
* SMPTE time in one message */
- if (conf::midiSync == MIDI_SYNC_MTC_M) {
+ if (conf::conf.midiSync == MIDI_SYNC_MTC_M) {
kernelMidi::send(MIDI_SYSEX, 0x7F, 0x00); // send msg on channel 0
kernelMidi::send(0x01, 0x01, 0x00); // hours 0
kernelMidi::send(0x00, 0x00, 0x00); // mins, secs, frames 0
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
void init(int sampleRate, float midiTCfps);
+/* recomputeFrames
+Updates bpm, frames, beats and so on. */
+
+void recomputeFrames();
+
/* sendMIDIsync
Generates MIDI sync output data. */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
+#include <fstream>
#include <cassert>
#include <string>
#include <FL/Fl.H>
+#include "deps/json/single_include/nlohmann/json.hpp"
#include "utils/fs.h"
#include "utils/log.h"
-#include "utils/json.h"
#include "core/const.h"
#include "core/types.h"
#include "conf.h"
+namespace nl = nlohmann;
+
+
namespace giada {
namespace m {
namespace conf
{
namespace
{
-std::string confFilePath = "";
-std::string confDirPath = "";
-
-
-/* -------------------------------------------------------------------------- */
-
-/* sanitize
-Avoids funky values from config file. */
-
-void sanitize()
-{
- if (!(soundSystem & G_SYS_API_ANY)) soundSystem = G_DEFAULT_SOUNDSYS;
- if (soundDeviceOut < 0) soundDeviceOut = G_DEFAULT_SOUNDDEV_OUT;
- if (soundDeviceIn < -1) soundDeviceIn = G_DEFAULT_SOUNDDEV_IN;
- if (channelsOut < 0) channelsOut = 0;
- if (channelsIn < 0) channelsIn = 0;
- if (buffersize < G_MIN_BUF_SIZE || buffersize > G_MAX_BUF_SIZE) buffersize = G_DEFAULT_BUFSIZE;
- if (midiPortOut < -1) midiPortOut = G_DEFAULT_MIDI_SYSTEM;
- if (midiPortOut < -1) midiPortOut = G_DEFAULT_MIDI_PORT_OUT;
- if (midiPortIn < -1) midiPortIn = G_DEFAULT_MIDI_PORT_IN;
- if (browserX < 0) browserX = 0;
- if (browserY < 0) browserY = 0;
- if (browserW < 396) browserW = 396;
- if (browserH < 302) browserH = 302;
- if (actionEditorX < 0) actionEditorX = 0;
- if (actionEditorY < 0) actionEditorY = 0;
- if (actionEditorW < 640) actionEditorW = 640;
- if (actionEditorH < 176) actionEditorH = 176;
- if (actionEditorZoom < 100) actionEditorZoom = 100;
- if (actionEditorGridVal < 0 || actionEditorGridVal > G_MAX_GRID_VAL) actionEditorGridVal = 0;
- if (actionEditorGridOn < 0) actionEditorGridOn = 0;
- if (pianoRollH <= 0) pianoRollH = 422;
- if (sampleActionEditorH <= 0) sampleActionEditorH = 40;
- if (velocityEditorH <= 0) velocityEditorH = 40;
- if (envelopeEditorH <= 0) envelopeEditorH = 40;
- if (sampleEditorX < 0) sampleEditorX = 0;
- if (sampleEditorY < 0) sampleEditorY = 0;
- if (sampleEditorW < 500) sampleEditorW = 500;
- if (sampleEditorH < 292) sampleEditorH = 292;
- if (sampleEditorGridVal < 0 || sampleEditorGridVal > G_MAX_GRID_VAL) sampleEditorGridVal = 0;
- if (sampleEditorGridOn < 0) sampleEditorGridOn = 0;
- if (midiInputX < 0) midiInputX = 0;
- if (midiInputY < 0) midiInputY = 0;
- if (midiInputW < G_DEFAULT_MIDI_INPUT_UI_W) midiInputW = G_DEFAULT_MIDI_INPUT_UI_W;
- if (midiInputH < G_DEFAULT_MIDI_INPUT_UI_H) midiInputH = G_DEFAULT_MIDI_INPUT_UI_H;
- if (configX < 0) configX = 0;
- if (configY < 0) configY = 0;
- if (pluginListX < 0) pluginListX = 0;
- if (pluginListY < 0) pluginListY = 0;
-#ifdef WITH_VST
- if (pluginChooserW < 640) pluginChooserW = 640;
- if (pluginChooserH < 480) pluginChooserW = 480;
-#endif
- if (bpmX < 0) bpmX = 0;
- if (bpmY < 0) bpmY = 0;
- if (beatsX < 0) beatsX = 0;
- if (beatsY < 0) beatsY = 0;
- if (aboutX < 0) aboutX = 0;
- if (aboutY < 0) aboutY = 0;
- if (samplerate < 8000) samplerate = G_DEFAULT_SAMPLERATE;
- if (rsmpQuality < 0 || rsmpQuality > 4) rsmpQuality = 0;
-}
+std::string confFilePath_ = "";
+std::string confDirPath_ = "";
/* -------------------------------------------------------------------------- */
Creates local folder where to put the configuration file. Path differs from OS
to OS. */
-int createConfigFolder()
+int createConfigFolder_()
{
#if defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)
- if (u::fs::dirExists(confDirPath))
+ if (u::fs::dirExists(confDirPath_))
return 1;
u::log::print("[conf::createConfigFolder] .giada folder not present. Updating...\n");
- if (u::fs::mkdir(confDirPath)) {
+ if (u::fs::mkdir(confDirPath_)) {
u::log::print("[conf::createConfigFolder] status: ok\n");
return 1;
}
return 0;
}
-#else // windows
+#else // Windows: nothing to do
return 1;
/* -------------------------------------------------------------------------- */
-std::string header = "GIADACFG";
-
-int logMode = LOG_MODE_MUTE;
-int soundSystem = G_DEFAULT_SOUNDSYS;
-int soundDeviceOut = G_DEFAULT_SOUNDDEV_OUT;
-int soundDeviceIn = G_DEFAULT_SOUNDDEV_IN;
-int channelsOut = 0;
-int channelsIn = 0;
-int samplerate = G_DEFAULT_SAMPLERATE;
-int buffersize = G_DEFAULT_BUFSIZE;
-bool limitOutput = false;
-int rsmpQuality = 0;
-
-int midiSystem = 0;
-int midiPortOut = G_DEFAULT_MIDI_PORT_OUT;
-int midiPortIn = G_DEFAULT_MIDI_PORT_IN;
-std::string midiMapPath = "";
-std::string lastFileMap = "";
-int midiSync = MIDI_SYNC_NONE;
-float midiTCfps = 25.0f;
-
-/* TODO - move these into a RCUList */
-std::atomic<bool> midiIn (false);
-std::atomic<int> midiInFilter (-1);
-std::atomic<uint32_t> midiInRewind (0x0);
-std::atomic<uint32_t> midiInStartStop (0x0);
-std::atomic<uint32_t> midiInActionRec (0x0);
-std::atomic<uint32_t> midiInInputRec (0x0);
-std::atomic<uint32_t> midiInVolumeIn (0x0);
-std::atomic<uint32_t> midiInVolumeOut (0x0);
-std::atomic<uint32_t> midiInBeatDouble(0x0);
-std::atomic<uint32_t> midiInBeatHalf (0x0);
-std::atomic<uint32_t> midiInMetronome (0x0);
-
-bool recsStopOnChanHalt = false;
-bool chansStopOnSeqHalt = false;
-bool treatRecsAsLoops = false;
-bool inputMonitorDefaultOn = false;
-
-std::string pluginPath = "";
-std::string patchPath = "";
-std::string samplePath = "";
-
-int mainWindowX = (Fl::w() / 2) - (G_MIN_GUI_WIDTH / 2);
-int mainWindowY = (Fl::h() / 2) - (G_MIN_GUI_HEIGHT / 2);
-int mainWindowW = G_MIN_GUI_WIDTH;
-int mainWindowH = G_MIN_GUI_HEIGHT;
-
-int browserX = 0;
-int browserY = 0;
-int browserW = 640;
-int browserH = 480;
-int browserPosition = 0;
-int browserLastValue = 0;
-std::string browserLastPath = "";
-
-int actionEditorX = 0;
-int actionEditorY = 0;
-int actionEditorW = 640;
-int actionEditorH = 480;
-int actionEditorZoom = 100;
-int actionEditorGridVal = 1;
-int actionEditorGridOn = false;
-
-int sampleEditorX = 0;
-int sampleEditorY = 0;
-int sampleEditorW = 640;
-int sampleEditorH = 480;
-int sampleEditorGridVal = 0;
-int sampleEditorGridOn = false;
-
-int midiInputX = 0;
-int midiInputY = 0;
-int midiInputW = G_DEFAULT_MIDI_INPUT_UI_W;
-int midiInputH = G_DEFAULT_MIDI_INPUT_UI_H;
-
-int pianoRollY = -1;
-int pianoRollH = 422;
-
-int sampleActionEditorH = 40;
-int velocityEditorH = 40;
-int envelopeEditorH = 40;
-
-int pluginListX = 0;
-int pluginListY = 0;
-
-int configX = 0;
-int configY = 0;
-
-int bpmX = 0;
-int bpmY = 0;
-
-int beatsX = 0;
-int beatsY = 0;
-
-int aboutX = 0;
-int aboutY = 0;
-
-int nameX = 0;
-int nameY = 0;
-
-int recTriggerMode = static_cast<int>(RecTriggerMode::NORMAL);
-float recTriggerLevel = G_DEFAULT_REC_TRIGGER_LEVEL;
-
-#ifdef WITH_VST
-
-int pluginChooserX = 0;
-int pluginChooserY = 0;
-int pluginChooserW = 640;
-int pluginChooserH = 480;
-int pluginSortMethod = 0;
-
-#endif
+Conf conf;
/* -------------------------------------------------------------------------- */
void init()
{
- /* Initialize confFilePath, i.e. the configuration file. In windows it is in
+ conf = Conf();
+
+ /* Initialize confFilePath_, i.e. the configuration file. In windows it is in
* the same dir of the .exe, while in Linux and OS X in ~/.giada */
#if defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)
- confFilePath = u::fs::getHomePath() + G_SLASH + CONF_FILENAME;
- confDirPath = u::fs::getHomePath() + G_SLASH;
+ confFilePath_ = u::fs::getHomePath() + G_SLASH + CONF_FILENAME;
+ confDirPath_ = u::fs::getHomePath() + G_SLASH;
#elif defined(_WIN32)
- confFilePath = CONF_FILENAME;
- confDirPath = "";
+ confFilePath_ = CONF_FILENAME;
+ confDirPath_ = "";
#endif
}
/* -------------------------------------------------------------------------- */
-bool isMidiInAllowed(int c)
-{
- return midiInFilter == -1 || midiInFilter == c;
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
bool read()
{
- namespace uj = u::json;
-
init();
- json_t* j = uj::load(confFilePath);
- if (j == nullptr)
+ std::ifstream ifs(confFilePath_);
+ if (!ifs.good())
return false;
- if (!uj::isObject(j)) {
- json_decref(j);
- return false;
- }
-
- header = uj::readString(j, CONF_KEY_HEADER);
- logMode = uj::readInt(j, CONF_KEY_LOG_MODE);
- soundSystem = uj::readInt(j, CONF_KEY_SOUND_SYSTEM);
- soundDeviceOut = uj::readInt(j, CONF_KEY_SOUND_DEVICE_OUT);
- soundDeviceIn = uj::readInt(j, CONF_KEY_SOUND_DEVICE_IN);
- channelsOut = uj::readInt(j, CONF_KEY_CHANNELS_OUT);
- channelsIn = uj::readInt(j, CONF_KEY_CHANNELS_IN);
- samplerate = uj::readInt(j, CONF_KEY_SAMPLERATE);
- buffersize = uj::readInt(j, CONF_KEY_BUFFER_SIZE);
- limitOutput = uj::readBool(j, CONF_KEY_LIMIT_OUTPUT);
- rsmpQuality = uj::readInt(j, CONF_KEY_RESAMPLE_QUALITY);
- midiSystem = uj::readInt(j, CONF_KEY_MIDI_SYSTEM);
- midiPortOut = uj::readInt(j, CONF_KEY_MIDI_PORT_OUT);
- midiPortIn = uj::readInt(j, CONF_KEY_MIDI_PORT_IN);
- midiMapPath = uj::readString(j, CONF_KEY_MIDIMAP_PATH);
- lastFileMap = uj::readString(j, CONF_KEY_LAST_MIDIMAP);
- midiSync = uj::readInt(j, CONF_KEY_MIDI_SYNC);
- midiTCfps = uj::readFloat(j, CONF_KEY_MIDI_TC_FPS);
- midiIn = uj::readBool(j, CONF_KEY_MIDI_IN);
- midiInFilter = uj::readInt(j, CONF_KEY_MIDI_IN_FILTER);
- midiInRewind = uj::readInt(j, CONF_KEY_MIDI_IN_REWIND);
- midiInStartStop = uj::readInt(j, CONF_KEY_MIDI_IN_START_STOP);
- midiInActionRec = uj::readInt(j, CONF_KEY_MIDI_IN_ACTION_REC);
- midiInInputRec = uj::readInt(j, CONF_KEY_MIDI_IN_INPUT_REC);
- midiInMetronome = uj::readInt(j, CONF_KEY_MIDI_IN_METRONOME);
- midiInVolumeIn = uj::readInt(j, CONF_KEY_MIDI_IN_VOLUME_IN);
- midiInVolumeOut = uj::readInt(j, CONF_KEY_MIDI_IN_VOLUME_OUT);
- midiInBeatDouble = uj::readInt(j, CONF_KEY_MIDI_IN_BEAT_DOUBLE);
- midiInBeatHalf = uj::readInt(j, CONF_KEY_MIDI_IN_BEAT_HALF);
- recsStopOnChanHalt = uj::readBool(j, CONF_KEY_RECS_STOP_ON_CHAN_HALT);
- chansStopOnSeqHalt = uj::readBool(j, CONF_KEY_CHANS_STOP_ON_SEQ_HALT);
- treatRecsAsLoops = uj::readBool(j, CONF_KEY_TREAT_RECS_AS_LOOPS);
- inputMonitorDefaultOn = uj::readBool(j, CONF_KEY_INPUT_MONITOR_DEFAULT_ON);
- pluginPath = uj::readString(j, CONF_KEY_PLUGINS_PATH);
- patchPath = uj::readString(j, CONF_KEY_PATCHES_PATH);
- samplePath = uj::readString(j, CONF_KEY_SAMPLES_PATH);
- mainWindowX = uj::readInt(j, CONF_KEY_MAIN_WINDOW_X);
- mainWindowY = uj::readInt(j, CONF_KEY_MAIN_WINDOW_Y);
- mainWindowW = uj::readInt(j, CONF_KEY_MAIN_WINDOW_W);
- mainWindowH = uj::readInt(j, CONF_KEY_MAIN_WINDOW_H);
- browserX = uj::readInt(j, CONF_KEY_BROWSER_X);
- browserY = uj::readInt(j, CONF_KEY_BROWSER_Y);
- browserW = uj::readInt(j, CONF_KEY_BROWSER_W);
- browserH = uj::readInt(j, CONF_KEY_BROWSER_H);
- browserPosition = uj::readInt(j, CONF_KEY_BROWSER_POSITION);
- browserLastPath = uj::readString(j, CONF_KEY_BROWSER_LAST_PATH);
- browserLastValue = uj::readInt(j, CONF_KEY_BROWSER_LAST_VALUE);
- actionEditorX = uj::readInt(j, CONF_KEY_ACTION_EDITOR_X);
- actionEditorY = uj::readInt(j, CONF_KEY_ACTION_EDITOR_Y);
- actionEditorW = uj::readInt(j, CONF_KEY_ACTION_EDITOR_W);
- actionEditorH = uj::readInt(j, CONF_KEY_ACTION_EDITOR_H);
- actionEditorZoom = uj::readInt(j, CONF_KEY_ACTION_EDITOR_ZOOM);
- actionEditorGridVal = uj::readInt(j, CONF_KEY_ACTION_EDITOR_GRID_VAL);
- actionEditorGridOn = uj::readInt(j, CONF_KEY_ACTION_EDITOR_GRID_ON);
- sampleEditorX = uj::readInt(j, CONF_KEY_SAMPLE_EDITOR_X);
- sampleEditorY = uj::readInt(j, CONF_KEY_SAMPLE_EDITOR_Y);
- sampleEditorW = uj::readInt(j, CONF_KEY_SAMPLE_EDITOR_W);
- sampleEditorH = uj::readInt(j, CONF_KEY_SAMPLE_EDITOR_H);
- sampleEditorGridVal = uj::readInt(j, CONF_KEY_SAMPLE_EDITOR_GRID_VAL);
- sampleEditorGridOn = uj::readInt(j, CONF_KEY_SAMPLE_EDITOR_GRID_ON);
- pianoRollY = uj::readInt(j, CONF_KEY_PIANO_ROLL_Y);
- pianoRollH = uj::readInt(j, CONF_KEY_PIANO_ROLL_H);
- sampleActionEditorH = uj::readInt(j, CONF_KEY_SAMPLE_ACTION_EDITOR_H);
- velocityEditorH = uj::readInt(j, CONF_KEY_VELOCITY_EDITOR_H);
- envelopeEditorH = uj::readInt(j, CONF_KEY_ENVELOPE_EDITOR_H);
- pluginListX = uj::readInt(j, CONF_KEY_PLUGIN_LIST_X);
- pluginListY = uj::readInt(j, CONF_KEY_PLUGIN_LIST_Y);
- configX = uj::readInt(j, CONF_KEY_CONFIG_X);
- configY = uj::readInt(j, CONF_KEY_CONFIG_Y);
- bpmX = uj::readInt(j, CONF_KEY_BPM_X);
- bpmY = uj::readInt(j, CONF_KEY_BPM_Y);
- beatsX = uj::readInt(j, CONF_KEY_BEATS_X);
- beatsY = uj::readInt(j, CONF_KEY_BEATS_Y);
- aboutX = uj::readInt(j, CONF_KEY_ABOUT_X);
- aboutY = uj::readInt(j, CONF_KEY_ABOUT_Y);
- nameX = uj::readInt(j, CONF_KEY_NAME_X);
- nameY = uj::readInt(j, CONF_KEY_NAME_Y);
- midiInputX = uj::readInt(j, CONF_KEY_MIDI_INPUT_X);
- midiInputY = uj::readInt(j, CONF_KEY_MIDI_INPUT_Y);
- midiInputW = uj::readInt(j, CONF_KEY_MIDI_INPUT_W);
- midiInputH = uj::readInt(j, CONF_KEY_MIDI_INPUT_H);
- recTriggerMode = uj::readInt(j, CONF_KEY_REC_TRIGGER_MODE);
- recTriggerLevel = uj::readFloat(j, CONF_KEY_REC_TRIGGER_LEVEL);
-
+ nl::json j = nl::json::parse(ifs);
+
+ conf.logMode = j.value(CONF_KEY_LOG_MODE, conf.logMode);
+ conf.soundSystem = j.value(CONF_KEY_SOUND_SYSTEM, conf.soundSystem);
+ conf.soundDeviceOut = j.value(CONF_KEY_SOUND_DEVICE_OUT, conf.soundDeviceOut);
+ conf.soundDeviceIn = j.value(CONF_KEY_SOUND_DEVICE_IN, conf.soundDeviceIn);
+ conf.channelsOut = j.value(CONF_KEY_CHANNELS_OUT, conf.channelsOut);
+ conf.channelsIn = j.value(CONF_KEY_CHANNELS_IN, conf.channelsIn);
+ conf.samplerate = j.value(CONF_KEY_SAMPLERATE, conf.samplerate);
+ conf.buffersize = j.value(CONF_KEY_BUFFER_SIZE, conf.buffersize);
+ conf.limitOutput = j.value(CONF_KEY_LIMIT_OUTPUT, conf.limitOutput);
+ conf.rsmpQuality = j.value(CONF_KEY_RESAMPLE_QUALITY, conf.rsmpQuality);
+ conf.midiSystem = j.value(CONF_KEY_MIDI_SYSTEM, conf.midiSystem);
+ conf.midiPortOut = j.value(CONF_KEY_MIDI_PORT_OUT, conf.midiPortOut);
+ conf.midiPortIn = j.value(CONF_KEY_MIDI_PORT_IN, conf.midiPortIn);
+ conf.midiMapPath = j.value(CONF_KEY_MIDIMAP_PATH, conf.midiMapPath);
+ conf.lastFileMap = j.value(CONF_KEY_LAST_MIDIMAP, conf.lastFileMap);
+ conf.midiSync = j.value(CONF_KEY_MIDI_SYNC, conf.midiSync);
+ conf.midiTCfps = j.value(CONF_KEY_MIDI_TC_FPS, conf.midiTCfps);
+ conf.recsStopOnChanHalt = j.value(CONF_KEY_RECS_STOP_ON_CHAN_HALT, conf.recsStopOnChanHalt);
+ conf.chansStopOnSeqHalt = j.value(CONF_KEY_CHANS_STOP_ON_SEQ_HALT, conf.chansStopOnSeqHalt);
+ conf.treatRecsAsLoops = j.value(CONF_KEY_TREAT_RECS_AS_LOOPS, conf.treatRecsAsLoops);
+ conf.inputMonitorDefaultOn = j.value(CONF_KEY_INPUT_MONITOR_DEFAULT_ON, conf.inputMonitorDefaultOn);
+ conf.pluginPath = j.value(CONF_KEY_PLUGINS_PATH, conf.pluginPath);
+ conf.patchPath = j.value(CONF_KEY_PATCHES_PATH, conf.patchPath);
+ conf.samplePath = j.value(CONF_KEY_SAMPLES_PATH, conf.samplePath);
+ conf.mainWindowX = j.value(CONF_KEY_MAIN_WINDOW_X, conf.mainWindowX);
+ conf.mainWindowY = j.value(CONF_KEY_MAIN_WINDOW_Y, conf.mainWindowY);
+ conf.mainWindowW = j.value(CONF_KEY_MAIN_WINDOW_W, conf.mainWindowW);
+ conf.mainWindowH = j.value(CONF_KEY_MAIN_WINDOW_H, conf.mainWindowH);
+ conf.browserX = j.value(CONF_KEY_BROWSER_X, conf.browserX);
+ conf.browserY = j.value(CONF_KEY_BROWSER_Y, conf.browserY);
+ conf.browserW = j.value(CONF_KEY_BROWSER_W, conf.browserW);
+ conf.browserH = j.value(CONF_KEY_BROWSER_H, conf.browserH);
+ conf.browserPosition = j.value(CONF_KEY_BROWSER_POSITION, conf.browserPosition);
+ conf.browserLastPath = j.value(CONF_KEY_BROWSER_LAST_PATH, conf.browserLastPath);
+ conf.browserLastValue = j.value(CONF_KEY_BROWSER_LAST_VALUE, conf.browserLastValue);
+ conf.actionEditorX = j.value(CONF_KEY_ACTION_EDITOR_X, conf.actionEditorX);
+ conf.actionEditorY = j.value(CONF_KEY_ACTION_EDITOR_Y, conf.actionEditorY);
+ conf.actionEditorW = j.value(CONF_KEY_ACTION_EDITOR_W, conf.actionEditorW);
+ conf.actionEditorH = j.value(CONF_KEY_ACTION_EDITOR_H, conf.actionEditorH);
+ conf.actionEditorZoom = j.value(CONF_KEY_ACTION_EDITOR_ZOOM, conf.actionEditorZoom);
+ conf.actionEditorGridVal = j.value(CONF_KEY_ACTION_EDITOR_GRID_VAL, conf.actionEditorGridVal);
+ conf.actionEditorGridOn = j.value(CONF_KEY_ACTION_EDITOR_GRID_ON, conf.actionEditorGridOn);
+ conf.sampleEditorX = j.value(CONF_KEY_SAMPLE_EDITOR_X, conf.sampleEditorX);
+ conf.sampleEditorY = j.value(CONF_KEY_SAMPLE_EDITOR_Y, conf.sampleEditorY);
+ conf.sampleEditorW = j.value(CONF_KEY_SAMPLE_EDITOR_W, conf.sampleEditorW);
+ conf.sampleEditorH = j.value(CONF_KEY_SAMPLE_EDITOR_H, conf.sampleEditorH);
+ conf.sampleEditorGridVal = j.value(CONF_KEY_SAMPLE_EDITOR_GRID_VAL, conf.sampleEditorGridVal);
+ conf.sampleEditorGridOn = j.value(CONF_KEY_SAMPLE_EDITOR_GRID_ON, conf.sampleEditorGridOn);
+ conf.pianoRollY = j.value(CONF_KEY_PIANO_ROLL_Y, conf.pianoRollY);
+ conf.pianoRollH = j.value(CONF_KEY_PIANO_ROLL_H, conf.pianoRollH);
+ conf.sampleActionEditorH = j.value(CONF_KEY_SAMPLE_ACTION_EDITOR_H, conf.sampleActionEditorH);
+ conf.velocityEditorH = j.value(CONF_KEY_VELOCITY_EDITOR_H, conf.velocityEditorH);
+ conf.envelopeEditorH = j.value(CONF_KEY_ENVELOPE_EDITOR_H, conf.envelopeEditorH);
+ conf.pluginListX = j.value(CONF_KEY_PLUGIN_LIST_X, conf.pluginListX);
+ conf.pluginListY = j.value(CONF_KEY_PLUGIN_LIST_Y, conf.pluginListY);
+ conf.midiInputX = j.value(CONF_KEY_MIDI_INPUT_X, conf.midiInputX);
+ conf.midiInputY = j.value(CONF_KEY_MIDI_INPUT_Y, conf.midiInputY);
+ conf.midiInputW = j.value(CONF_KEY_MIDI_INPUT_W, conf.midiInputW);
+ conf.midiInputH = j.value(CONF_KEY_MIDI_INPUT_H, conf.midiInputH);
+ conf.recTriggerMode = j.value(CONF_KEY_REC_TRIGGER_MODE, conf.recTriggerMode);
+ conf.recTriggerLevel = j.value(CONF_KEY_REC_TRIGGER_LEVEL, conf.recTriggerLevel);
+ conf.midiInEnabled = j.value(CONF_KEY_MIDI_IN, conf.midiInEnabled);
+ conf.midiInFilter = j.value(CONF_KEY_MIDI_IN_FILTER, conf.midiInFilter);
+ conf.midiInRewind = j.value(CONF_KEY_MIDI_IN_REWIND, conf.midiInRewind);
+ conf.midiInStartStop = j.value(CONF_KEY_MIDI_IN_START_STOP, conf.midiInStartStop);
+ conf.midiInActionRec = j.value(CONF_KEY_MIDI_IN_ACTION_REC, conf.midiInActionRec);
+ conf.midiInInputRec = j.value(CONF_KEY_MIDI_IN_INPUT_REC, conf.midiInInputRec);
+ conf.midiInMetronome = j.value(CONF_KEY_MIDI_IN_METRONOME, conf.midiInMetronome);
+ conf.midiInVolumeIn = j.value(CONF_KEY_MIDI_IN_VOLUME_IN, conf.midiInVolumeIn);
+ conf.midiInVolumeOut = j.value(CONF_KEY_MIDI_IN_VOLUME_OUT, conf.midiInVolumeOut);
+ conf.midiInBeatDouble = j.value(CONF_KEY_MIDI_IN_BEAT_DOUBLE, conf.midiInBeatDouble);
+ conf.midiInBeatHalf = j.value(CONF_KEY_MIDI_IN_BEAT_HALF, conf.midiInBeatHalf);
#ifdef WITH_VST
-
- pluginChooserX = uj::readInt(j, CONF_KEY_PLUGIN_CHOOSER_X);
- pluginChooserY = uj::readInt(j, CONF_KEY_PLUGIN_CHOOSER_Y);
- pluginChooserW = uj::readInt(j, CONF_KEY_PLUGIN_CHOOSER_W);
- pluginChooserH = uj::readInt(j, CONF_KEY_PLUGIN_CHOOSER_H);
- pluginSortMethod = uj::readInt(j, CONF_KEY_PLUGIN_SORT_METHOD);
-
+ conf.pluginChooserX = j.value(CONF_KEY_PLUGIN_CHOOSER_X, conf.pluginChooserX);
+ conf.pluginChooserY = j.value(CONF_KEY_PLUGIN_CHOOSER_Y, conf.pluginChooserY);
+ conf.pluginChooserW = j.value(CONF_KEY_PLUGIN_CHOOSER_W, conf.pluginChooserW);
+ conf.pluginChooserH = j.value(CONF_KEY_PLUGIN_CHOOSER_H, conf.pluginChooserH);
+ conf.pluginSortMethod = j.value(CONF_KEY_PLUGIN_SORT_METHOD, conf.pluginSortMethod);
#endif
- json_decref(j);
-
- sanitize();
-
return true;
}
bool write()
{
- if (!createConfigFolder())
+ if (!createConfigFolder_())
return false;
- json_t* j = json_object();
-
- json_object_set_new(j, CONF_KEY_HEADER, json_string(header.c_str()));
- json_object_set_new(j, CONF_KEY_LOG_MODE, json_integer(logMode));
- json_object_set_new(j, CONF_KEY_SOUND_SYSTEM, json_integer(soundSystem));
- json_object_set_new(j, CONF_KEY_SOUND_DEVICE_OUT, json_integer(soundDeviceOut));
- json_object_set_new(j, CONF_KEY_SOUND_DEVICE_IN, json_integer(soundDeviceIn));
- json_object_set_new(j, CONF_KEY_CHANNELS_OUT, json_integer(channelsOut));
- json_object_set_new(j, CONF_KEY_CHANNELS_IN, json_integer(channelsIn));
- json_object_set_new(j, CONF_KEY_SAMPLERATE, json_integer(samplerate));
- json_object_set_new(j, CONF_KEY_BUFFER_SIZE, json_integer(buffersize));
- json_object_set_new(j, CONF_KEY_LIMIT_OUTPUT, json_boolean(limitOutput));
- json_object_set_new(j, CONF_KEY_RESAMPLE_QUALITY, json_integer(rsmpQuality));
- json_object_set_new(j, CONF_KEY_MIDI_SYSTEM, json_integer(midiSystem));
- json_object_set_new(j, CONF_KEY_MIDI_PORT_OUT, json_integer(midiPortOut));
- json_object_set_new(j, CONF_KEY_MIDI_PORT_IN, json_integer(midiPortIn));
- json_object_set_new(j, CONF_KEY_MIDIMAP_PATH, json_string(midiMapPath.c_str()));
- json_object_set_new(j, CONF_KEY_LAST_MIDIMAP, json_string(lastFileMap.c_str()));
- json_object_set_new(j, CONF_KEY_MIDI_SYNC, json_integer(midiSync));
- json_object_set_new(j, CONF_KEY_MIDI_TC_FPS, json_real(midiTCfps));
- json_object_set_new(j, CONF_KEY_MIDI_IN, json_boolean(midiIn));
- json_object_set_new(j, CONF_KEY_MIDI_IN_FILTER, json_integer(midiInFilter));
- json_object_set_new(j, CONF_KEY_MIDI_IN_REWIND, json_integer(midiInRewind));
- json_object_set_new(j, CONF_KEY_MIDI_IN_START_STOP, json_integer(midiInStartStop));
- json_object_set_new(j, CONF_KEY_MIDI_IN_ACTION_REC, json_integer(midiInActionRec));
- json_object_set_new(j, CONF_KEY_MIDI_IN_INPUT_REC, json_integer(midiInInputRec));
- json_object_set_new(j, CONF_KEY_MIDI_IN_METRONOME, json_integer(midiInMetronome));
- json_object_set_new(j, CONF_KEY_MIDI_IN_VOLUME_IN, json_integer(midiInVolumeIn));
- json_object_set_new(j, CONF_KEY_MIDI_IN_VOLUME_OUT, json_integer(midiInVolumeOut));
- json_object_set_new(j, CONF_KEY_MIDI_IN_BEAT_DOUBLE, json_integer(midiInBeatDouble));
- json_object_set_new(j, CONF_KEY_MIDI_IN_BEAT_HALF, json_integer(midiInBeatHalf));
- json_object_set_new(j, CONF_KEY_RECS_STOP_ON_CHAN_HALT, json_boolean(recsStopOnChanHalt));
- json_object_set_new(j, CONF_KEY_CHANS_STOP_ON_SEQ_HALT, json_boolean(chansStopOnSeqHalt));
- json_object_set_new(j, CONF_KEY_TREAT_RECS_AS_LOOPS, json_boolean(treatRecsAsLoops));
- json_object_set_new(j, CONF_KEY_INPUT_MONITOR_DEFAULT_ON, json_boolean(inputMonitorDefaultOn));
- json_object_set_new(j, CONF_KEY_PLUGINS_PATH, json_string(pluginPath.c_str()));
- json_object_set_new(j, CONF_KEY_PATCHES_PATH, json_string(patchPath.c_str()));
- json_object_set_new(j, CONF_KEY_SAMPLES_PATH, json_string(samplePath.c_str()));
- json_object_set_new(j, CONF_KEY_MAIN_WINDOW_X, json_integer(mainWindowX));
- json_object_set_new(j, CONF_KEY_MAIN_WINDOW_Y, json_integer(mainWindowY));
- json_object_set_new(j, CONF_KEY_MAIN_WINDOW_W, json_integer(mainWindowW));
- json_object_set_new(j, CONF_KEY_MAIN_WINDOW_H, json_integer(mainWindowH));
- json_object_set_new(j, CONF_KEY_BROWSER_X, json_integer(browserX));
- json_object_set_new(j, CONF_KEY_BROWSER_Y, json_integer(browserY));
- json_object_set_new(j, CONF_KEY_BROWSER_W, json_integer(browserW));
- json_object_set_new(j, CONF_KEY_BROWSER_H, json_integer(browserH));
- json_object_set_new(j, CONF_KEY_BROWSER_POSITION, json_integer(browserPosition));
- json_object_set_new(j, CONF_KEY_BROWSER_LAST_PATH, json_string(browserLastPath.c_str()));
- json_object_set_new(j, CONF_KEY_BROWSER_LAST_VALUE, json_integer(browserLastValue));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_X, json_integer(actionEditorX));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_Y, json_integer(actionEditorY));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_W, json_integer(actionEditorW));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_H, json_integer(actionEditorH));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_ZOOM, json_integer(actionEditorZoom));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_GRID_VAL, json_integer(actionEditorGridVal));
- json_object_set_new(j, CONF_KEY_ACTION_EDITOR_GRID_ON, json_integer(actionEditorGridOn));
- json_object_set_new(j, CONF_KEY_SAMPLE_EDITOR_X, json_integer(sampleEditorX));
- json_object_set_new(j, CONF_KEY_SAMPLE_EDITOR_Y, json_integer(sampleEditorY));
- json_object_set_new(j, CONF_KEY_SAMPLE_EDITOR_W, json_integer(sampleEditorW));
- json_object_set_new(j, CONF_KEY_SAMPLE_EDITOR_H, json_integer(sampleEditorH));
- json_object_set_new(j, CONF_KEY_SAMPLE_EDITOR_GRID_VAL, json_integer(sampleEditorGridVal));
- json_object_set_new(j, CONF_KEY_SAMPLE_EDITOR_GRID_ON, json_integer(sampleEditorGridOn));
- json_object_set_new(j, CONF_KEY_PIANO_ROLL_Y, json_integer(pianoRollY));
- json_object_set_new(j, CONF_KEY_PIANO_ROLL_H, json_integer(pianoRollH));
- json_object_set_new(j, CONF_KEY_SAMPLE_ACTION_EDITOR_H, json_integer(sampleActionEditorH));
- json_object_set_new(j, CONF_KEY_VELOCITY_EDITOR_H, json_integer(velocityEditorH));
- json_object_set_new(j, CONF_KEY_ENVELOPE_EDITOR_H, json_integer(envelopeEditorH));
- json_object_set_new(j, CONF_KEY_PLUGIN_LIST_X, json_integer(pluginListX));
- json_object_set_new(j, CONF_KEY_PLUGIN_LIST_Y, json_integer(pluginListY));
- json_object_set_new(j, CONF_KEY_CONFIG_X, json_integer(configX));
- json_object_set_new(j, CONF_KEY_CONFIG_Y, json_integer(configY));
- json_object_set_new(j, CONF_KEY_BPM_X, json_integer(bpmX));
- json_object_set_new(j, CONF_KEY_BPM_Y, json_integer(bpmY));
- json_object_set_new(j, CONF_KEY_BEATS_X, json_integer(beatsX));
- json_object_set_new(j, CONF_KEY_BEATS_Y, json_integer(beatsY));
- json_object_set_new(j, CONF_KEY_ABOUT_X, json_integer(aboutX));
- json_object_set_new(j, CONF_KEY_ABOUT_Y, json_integer(aboutY));
- json_object_set_new(j, CONF_KEY_NAME_X, json_integer(nameX));
- json_object_set_new(j, CONF_KEY_NAME_Y, json_integer(nameY));
- json_object_set_new(j, CONF_KEY_MIDI_INPUT_X, json_integer(midiInputX));
- json_object_set_new(j, CONF_KEY_MIDI_INPUT_Y, json_integer(midiInputY));
- json_object_set_new(j, CONF_KEY_MIDI_INPUT_W, json_integer(midiInputW));
- json_object_set_new(j, CONF_KEY_MIDI_INPUT_H, json_integer(midiInputH));
- json_object_set_new(j, CONF_KEY_REC_TRIGGER_MODE, json_integer(recTriggerMode));
- json_object_set_new(j, CONF_KEY_REC_TRIGGER_LEVEL, json_real(recTriggerLevel));
-
+ nl::json j;
+
+ j[CONF_KEY_HEADER] = "GIADACFG";
+ j[CONF_KEY_LOG_MODE] = conf.logMode;
+ j[CONF_KEY_SOUND_SYSTEM] = conf.soundSystem;
+ j[CONF_KEY_SOUND_DEVICE_OUT] = conf.soundDeviceOut;
+ j[CONF_KEY_SOUND_DEVICE_IN] = conf.soundDeviceIn;
+ j[CONF_KEY_CHANNELS_OUT] = conf.channelsOut;
+ j[CONF_KEY_CHANNELS_IN] = conf.channelsIn;
+ j[CONF_KEY_SAMPLERATE] = conf.samplerate;
+ j[CONF_KEY_BUFFER_SIZE] = conf.buffersize;
+ j[CONF_KEY_LIMIT_OUTPUT] = conf.limitOutput;
+ j[CONF_KEY_RESAMPLE_QUALITY] = conf.rsmpQuality;
+ j[CONF_KEY_MIDI_SYSTEM] = conf.midiSystem;
+ j[CONF_KEY_MIDI_PORT_OUT] = conf.midiPortOut;
+ j[CONF_KEY_MIDI_PORT_IN] = conf.midiPortIn;
+ j[CONF_KEY_MIDIMAP_PATH] = conf.midiMapPath;
+ j[CONF_KEY_LAST_MIDIMAP] = conf.lastFileMap;
+ j[CONF_KEY_MIDI_SYNC] = conf.midiSync;
+ j[CONF_KEY_MIDI_TC_FPS] = conf.midiTCfps;
+ j[CONF_KEY_MIDI_IN] = conf.midiInEnabled;
+ j[CONF_KEY_MIDI_IN_FILTER] = conf.midiInFilter;
+ j[CONF_KEY_MIDI_IN_REWIND] = conf.midiInRewind;
+ j[CONF_KEY_MIDI_IN_START_STOP] = conf.midiInStartStop;
+ j[CONF_KEY_MIDI_IN_ACTION_REC] = conf.midiInActionRec;
+ j[CONF_KEY_MIDI_IN_INPUT_REC] = conf.midiInInputRec;
+ j[CONF_KEY_MIDI_IN_METRONOME] = conf.midiInMetronome;
+ j[CONF_KEY_MIDI_IN_VOLUME_IN] = conf.midiInVolumeIn;
+ j[CONF_KEY_MIDI_IN_VOLUME_OUT] = conf.midiInVolumeOut;
+ j[CONF_KEY_MIDI_IN_BEAT_DOUBLE] = conf.midiInBeatDouble;
+ j[CONF_KEY_MIDI_IN_BEAT_HALF] = conf.midiInBeatHalf;
+ j[CONF_KEY_RECS_STOP_ON_CHAN_HALT] = conf.recsStopOnChanHalt;
+ j[CONF_KEY_CHANS_STOP_ON_SEQ_HALT] = conf.chansStopOnSeqHalt;
+ j[CONF_KEY_TREAT_RECS_AS_LOOPS] = conf.treatRecsAsLoops;
+ j[CONF_KEY_INPUT_MONITOR_DEFAULT_ON] = conf.inputMonitorDefaultOn;
+ j[CONF_KEY_PLUGINS_PATH] = conf.pluginPath;
+ j[CONF_KEY_PATCHES_PATH] = conf.patchPath;
+ j[CONF_KEY_SAMPLES_PATH] = conf.samplePath;
+ j[CONF_KEY_MAIN_WINDOW_X] = conf.mainWindowX;
+ j[CONF_KEY_MAIN_WINDOW_Y] = conf.mainWindowY;
+ j[CONF_KEY_MAIN_WINDOW_W] = conf.mainWindowW;
+ j[CONF_KEY_MAIN_WINDOW_H] = conf.mainWindowH;
+ j[CONF_KEY_BROWSER_X] = conf.browserX;
+ j[CONF_KEY_BROWSER_Y] = conf.browserY;
+ j[CONF_KEY_BROWSER_W] = conf.browserW;
+ j[CONF_KEY_BROWSER_H] = conf.browserH;
+ j[CONF_KEY_BROWSER_POSITION] = conf.browserPosition;
+ j[CONF_KEY_BROWSER_LAST_PATH] = conf.browserLastPath;
+ j[CONF_KEY_BROWSER_LAST_VALUE] = conf.browserLastValue;
+ j[CONF_KEY_ACTION_EDITOR_X] = conf.actionEditorX;
+ j[CONF_KEY_ACTION_EDITOR_Y] = conf.actionEditorY;
+ j[CONF_KEY_ACTION_EDITOR_W] = conf.actionEditorW;
+ j[CONF_KEY_ACTION_EDITOR_H] = conf.actionEditorH;
+ j[CONF_KEY_ACTION_EDITOR_ZOOM] = conf.actionEditorZoom;
+ j[CONF_KEY_ACTION_EDITOR_GRID_VAL] = conf.actionEditorGridVal;
+ j[CONF_KEY_ACTION_EDITOR_GRID_ON] = conf.actionEditorGridOn;
+ j[CONF_KEY_SAMPLE_EDITOR_X] = conf.sampleEditorX;
+ j[CONF_KEY_SAMPLE_EDITOR_Y] = conf.sampleEditorY;
+ j[CONF_KEY_SAMPLE_EDITOR_W] = conf.sampleEditorW;
+ j[CONF_KEY_SAMPLE_EDITOR_H] = conf.sampleEditorH;
+ j[CONF_KEY_SAMPLE_EDITOR_GRID_VAL] = conf.sampleEditorGridVal;
+ j[CONF_KEY_SAMPLE_EDITOR_GRID_ON] = conf.sampleEditorGridOn;
+ j[CONF_KEY_PIANO_ROLL_Y] = conf.pianoRollY;
+ j[CONF_KEY_PIANO_ROLL_H] = conf.pianoRollH;
+ j[CONF_KEY_SAMPLE_ACTION_EDITOR_H] = conf.sampleActionEditorH;
+ j[CONF_KEY_VELOCITY_EDITOR_H] = conf.velocityEditorH;
+ j[CONF_KEY_ENVELOPE_EDITOR_H] = conf.envelopeEditorH;
+ j[CONF_KEY_PLUGIN_LIST_X] = conf.pluginListX;
+ j[CONF_KEY_PLUGIN_LIST_Y] = conf.pluginListY;
+ j[CONF_KEY_MIDI_INPUT_X] = conf.midiInputX;
+ j[CONF_KEY_MIDI_INPUT_Y] = conf.midiInputY;
+ j[CONF_KEY_MIDI_INPUT_W] = conf.midiInputW;
+ j[CONF_KEY_MIDI_INPUT_H] = conf.midiInputH;
+ j[CONF_KEY_REC_TRIGGER_MODE] = static_cast<int>(conf.recTriggerMode);
+ j[CONF_KEY_REC_TRIGGER_LEVEL] = conf.recTriggerLevel;
#ifdef WITH_VST
-
- json_object_set_new(j, CONF_KEY_PLUGIN_CHOOSER_X, json_integer(pluginChooserX));
- json_object_set_new(j, CONF_KEY_PLUGIN_CHOOSER_Y, json_integer(pluginChooserY));
- json_object_set_new(j, CONF_KEY_PLUGIN_CHOOSER_W, json_integer(pluginChooserW));
- json_object_set_new(j, CONF_KEY_PLUGIN_CHOOSER_H, json_integer(pluginChooserH));
- json_object_set_new(j, CONF_KEY_PLUGIN_SORT_METHOD, json_integer(pluginSortMethod));
-
+ j[CONF_KEY_PLUGIN_CHOOSER_X] = conf.pluginChooserX;
+ j[CONF_KEY_PLUGIN_CHOOSER_Y] = conf.pluginChooserY;
+ j[CONF_KEY_PLUGIN_CHOOSER_W] = conf.pluginChooserW;
+ j[CONF_KEY_PLUGIN_CHOOSER_H] = conf.pluginChooserH;
+ j[CONF_KEY_PLUGIN_SORT_METHOD] = conf.pluginSortMethod;
#endif
- if (json_dump_file(j, confFilePath.c_str(), JSON_INDENT(2)) != 0) {
- u::log::print("[conf::write] unable to write configuration file!\n");
- return false;
- }
- return true;
+ std::ofstream ofs(confFilePath_);
+ if (!ofs.good()) {
+ u::log::print("[conf::write] unable to write configuration file!\n");
+ return false;
+ }
+
+ ofs << j;
+ return true;
}
-}}}; // giada::m::conf::
+}}}; // giada::m::conf::
\ No newline at end of file
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#define G_CONF_H
-#include <atomic>
#include <string>
+#include "utils/gui.h"
+#include "core/const.h"
+#include "core/types.h"
namespace giada {
namespace m {
namespace conf
{
-void init();
-bool read();
-bool write();
-
-/* isMidiAllowed
-Given a MIDI channel 'c' tells whether this channel should be allowed to receive
-and process MIDI events on MIDI channel 'c'. */
-
-bool isMidiInAllowed(int c);
-
-extern std::string header;
-
-extern int logMode;
-extern int soundSystem;
-extern int soundDeviceOut;
-extern int soundDeviceIn;
-extern int channelsOut;
-extern int channelsIn;
-extern int samplerate;
-extern int buffersize;
-extern bool limitOutput;
-extern int rsmpQuality;
-
-extern int midiSystem;
-extern int midiPortOut;
-extern int midiPortIn;
-extern std::string midiMapPath;
-extern std::string lastFileMap;
-extern int midiSync; // see const.h
-extern float midiTCfps;
-
-extern std::atomic<bool> midiIn;
-extern std::atomic<int> midiInFilter;
-extern std::atomic<uint32_t> midiInRewind;
-extern std::atomic<uint32_t> midiInStartStop;
-extern std::atomic<uint32_t> midiInActionRec;
-extern std::atomic<uint32_t> midiInInputRec;
-extern std::atomic<uint32_t> midiInMetronome;
-extern std::atomic<uint32_t> midiInVolumeIn;
-extern std::atomic<uint32_t> midiInVolumeOut;
-extern std::atomic<uint32_t> midiInBeatDouble;
-extern std::atomic<uint32_t> midiInBeatHalf;
-
-extern bool recsStopOnChanHalt;
-extern bool chansStopOnSeqHalt;
-extern bool treatRecsAsLoops;
-extern bool inputMonitorDefaultOn;
-
-extern std::string pluginPath;
-extern std::string patchPath;
-extern std::string samplePath;
-
-extern int mainWindowX, mainWindowY, mainWindowW, mainWindowH;
-
-extern int browserX, browserY, browserW, browserH, browserPosition, browserLastValue;
-extern std::string browserLastPath;
-
-extern int actionEditorX, actionEditorY, actionEditorW, actionEditorH, actionEditorZoom;
-extern int actionEditorGridVal;
-extern int actionEditorGridOn;
-
-extern int sampleEditorX, sampleEditorY, sampleEditorW, sampleEditorH;
-extern int sampleEditorGridVal;
-extern int sampleEditorGridOn;
-
-extern int midiInputX, midiInputY, midiInputW, midiInputH;
-
-extern int pianoRollY, pianoRollH;
-extern int sampleActionEditorH;
-extern int velocityEditorH;
-extern int envelopeEditorH;
-extern int pluginListX, pluginListY;
-extern int configX, configY;
-extern int bpmX, bpmY;
-extern int beatsX, beatsY;
-extern int aboutX, aboutY;
-extern int nameX, nameY;
-
-extern int recTriggerMode;
-extern float recTriggerLevel;
+struct Conf
+{
+ int logMode = LOG_MODE_MUTE;
+ int soundSystem = G_DEFAULT_SOUNDSYS;
+ int soundDeviceOut = G_DEFAULT_SOUNDDEV_OUT;
+ int soundDeviceIn = G_DEFAULT_SOUNDDEV_IN;
+ int channelsOut = 0;
+ int channelsIn = 0;
+ int samplerate = G_DEFAULT_SAMPLERATE;
+ int buffersize = G_DEFAULT_BUFSIZE;
+ bool limitOutput = false;
+ int rsmpQuality = 0;
+
+ int midiSystem = 0;
+ int midiPortOut = G_DEFAULT_MIDI_PORT_OUT;
+ int midiPortIn = G_DEFAULT_MIDI_PORT_IN;
+ std::string midiMapPath = "";
+ std::string lastFileMap = "";
+ int midiSync = MIDI_SYNC_NONE;
+ float midiTCfps = 25.0f;
+
+ bool recsStopOnChanHalt = false;
+ bool chansStopOnSeqHalt = false;
+ bool treatRecsAsLoops = false;
+ bool inputMonitorDefaultOn = false;
+
+ std::string pluginPath;
+ std::string patchPath;
+ std::string samplePath;
+
+ int mainWindowX = u::gui::centerWindowX(G_MIN_GUI_WIDTH);
+ int mainWindowY = u::gui::centerWindowY(G_MIN_GUI_HEIGHT);
+ int mainWindowW = G_MIN_GUI_WIDTH;
+ int mainWindowH = G_MIN_GUI_HEIGHT;
+
+ int browserX = u::gui::centerWindowX(G_DEFAULT_SUBWINDOW_W);
+ int browserY = u::gui::centerWindowY(G_DEFAULT_SUBWINDOW_H);
+ int browserW = G_DEFAULT_SUBWINDOW_W;
+ int browserH = G_DEFAULT_SUBWINDOW_H;
+ int browserPosition;
+ int browserLastValue;
+ std::string browserLastPath;
+
+ int actionEditorY = u::gui::centerWindowY(G_DEFAULT_SUBWINDOW_H);
+ int actionEditorX = u::gui::centerWindowX(G_DEFAULT_SUBWINDOW_W);
+ int actionEditorW = G_DEFAULT_SUBWINDOW_W;
+ int actionEditorH = G_DEFAULT_SUBWINDOW_H;
+ int actionEditorZoom = 100;
+ int actionEditorGridVal = 0;
+ int actionEditorGridOn = false;
+
+ int sampleEditorX;
+ int sampleEditorY;
+ int sampleEditorW = G_DEFAULT_SUBWINDOW_W;
+ int sampleEditorH = G_DEFAULT_SUBWINDOW_H;
+ int sampleEditorGridVal = 0;
+ int sampleEditorGridOn = false;
+
+ int midiInputX;
+ int midiInputY;
+ int midiInputW = G_DEFAULT_SUBWINDOW_W;
+ int midiInputH = G_DEFAULT_SUBWINDOW_H;
+
+ int pianoRollY = -1;
+ int pianoRollH = 422;
+
+ int sampleActionEditorH = 40;
+ int velocityEditorH = 40;
+ int envelopeEditorH = 40;
+
+ int pluginListX;
+ int pluginListY;
+
+ RecTriggerMode recTriggerMode = RecTriggerMode::NORMAL;
+ float recTriggerLevel = G_DEFAULT_REC_TRIGGER_LEVEL;
+
+ bool midiInEnabled = false;
+ int midiInFilter = -1;
+ uint32_t midiInRewind = 0x0;
+ uint32_t midiInStartStop = 0x0;
+ uint32_t midiInActionRec = 0x0;
+ uint32_t midiInInputRec = 0x0;
+ uint32_t midiInMetronome = 0x0;
+ uint32_t midiInVolumeIn = 0x0;
+ uint32_t midiInVolumeOut = 0x0;
+ uint32_t midiInBeatDouble = 0x0;
+ uint32_t midiInBeatHalf = 0x0;
#ifdef WITH_VST
-extern int pluginChooserX, pluginChooserY, pluginChooserW, pluginChooserH;
-extern int pluginSortMethod;
+ int pluginChooserX;
+ int pluginChooserY;
+ int pluginChooserW = G_DEFAULT_SUBWINDOW_W;
+ int pluginChooserH = G_DEFAULT_SUBWINDOW_H;
+ int pluginSortMethod = 0;
#endif
+};
+
+
+/* -------------------------------------------------------------------------- */
+
+
+extern Conf conf;
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void init();
+bool read();
+bool write();
}}}; // giada::m::conf::
#endif
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* -- version --------------------------------------------------------------- */
constexpr auto G_APP_NAME = "Giada";
-constexpr auto G_VERSION_STR = "0.16.1";
+constexpr auto G_VERSION_STR = "0.16.2";
constexpr int G_VERSION_MAJOR = 0;
constexpr int G_VERSION_MINOR = 16;
-constexpr int G_VERSION_PATCH = 1;
+constexpr int G_VERSION_PATCH = 2;
constexpr auto CONF_FILENAME = "giada.conf";
constexpr int G_GUI_INNER_MARGIN = 4;
constexpr int G_GUI_OUTER_MARGIN = 8;
constexpr int G_GUI_UNIT = 20; // base unit for elements
-constexpr int G_GUI_CHANNEL_H_1 = G_GUI_UNIT;
-constexpr int G_GUI_CHANNEL_H_2 = G_GUI_UNIT * 2;
-constexpr int G_GUI_CHANNEL_H_3 = G_GUI_UNIT * 4;
-constexpr int G_GUI_CHANNEL_H_4 = G_GUI_UNIT * 6;
constexpr int G_GUI_ZOOM_FACTOR = 2;
#define G_COLOR_RED fl_rgb_color(28, 32, 80)
constexpr float G_DEFAULT_FADEOUT_STEP = 0.01f; // micro-fadeout speed
constexpr int G_DEFAULT_COLUMN_WIDTH = 380;
constexpr auto G_DEFAULT_PATCH_NAME = "(default patch)";
-constexpr int G_DEFAULT_MIDI_INPUT_UI_W = 300;
-constexpr int G_DEFAULT_MIDI_INPUT_UI_H = 350;
constexpr int G_DEFAULT_ACTION_SIZE = 8192; // frames
constexpr int G_DEFAULT_ZOOM_RATIO = 128;
constexpr float G_DEFAULT_REC_TRIGGER_LEVEL = -10.0f;
+constexpr int G_DEFAULT_SUBWINDOW_W = 640;
+constexpr int G_DEFAULT_SUBWINDOW_H = 480;
/* -- midimap signals ------------------------------------------------------- */
-#define MIDIMAP_NOT_SPECIFIED 0x00
-#define MIDIMAP_UNREADABLE 0x01
-#define MIDIMAP_INVALID 0x02
-#define MIDIMAP_READ_OK 0x04
+constexpr int MIDIMAP_NOT_SPECIFIED = 0x00;
+constexpr int MIDIMAP_UNREADABLE = 0x01;
+constexpr int MIDIMAP_INVALID = 0x02;
+constexpr int MIDIMAP_READ_OK = 0x04;
+
+
+
+/* -- MIDI in parameters (for MIDI learning) -------------------------------- */
+constexpr int G_MIDI_IN_ENABLED = 1;
+constexpr int G_MIDI_IN_FILTER = 2;
+constexpr int G_MIDI_IN_REWIND = 3;
+constexpr int G_MIDI_IN_START_STOP = 4;
+constexpr int G_MIDI_IN_ACTION_REC = 5;
+constexpr int G_MIDI_IN_INPUT_REC = 6;
+constexpr int G_MIDI_IN_METRONOME = 7;
+constexpr int G_MIDI_IN_VOLUME_IN = 8;
+constexpr int G_MIDI_IN_VOLUME_OUT = 9;
+constexpr int G_MIDI_IN_BEAT_DOUBLE = 10;
+constexpr int G_MIDI_IN_BEAT_HALF = 11;
+constexpr int G_MIDI_IN_KEYPRESS = 12;
+constexpr int G_MIDI_IN_KEYREL = 13;
+constexpr int G_MIDI_IN_KILL = 14;
+constexpr int G_MIDI_IN_ARM = 15;
+constexpr int G_MIDI_IN_MUTE = 16;
+constexpr int G_MIDI_IN_SOLO = 17;
+constexpr int G_MIDI_IN_VOLUME = 18;
+constexpr int G_MIDI_IN_PITCH = 19;
+constexpr int G_MIDI_IN_READ_ACTIONS = 20;
+
+
+
+/* -- MIDI out parameters (for MIDI output and lightning) ------------------- */
+constexpr int G_MIDI_OUT_ENABLED = 1;
+constexpr int G_MIDI_OUT_L_ENABLED = 2;
+constexpr int G_MIDI_OUT_L_PLAYING = 3;
+constexpr int G_MIDI_OUT_L_MUTE = 4;
+constexpr int G_MIDI_OUT_L_SOLO = 5;
constexpr auto PATCH_KEY_CHANNEL_MODE = "mode";
constexpr auto PATCH_KEY_CHANNEL_BEGIN = "begin";
constexpr auto PATCH_KEY_CHANNEL_END = "end";
+constexpr auto PATCH_KEY_CHANNEL_SHIFT = "shift";
constexpr auto PATCH_KEY_CHANNEL_HAS_ACTIONS = "has_actions";
constexpr auto PATCH_KEY_CHANNEL_READ_ACTIONS = "read_actions";
constexpr auto PATCH_KEY_CHANNEL_PITCH = "pitch";
constexpr auto CONF_KEY_ENVELOPE_EDITOR_H = "envelope_editor_h";
constexpr auto CONF_KEY_PLUGIN_LIST_X = "plugin_list_x";
constexpr auto CONF_KEY_PLUGIN_LIST_Y = "plugin_list_y";
-constexpr auto CONF_KEY_CONFIG_X = "config_x";
-constexpr auto CONF_KEY_CONFIG_Y = "config_y";
-constexpr auto CONF_KEY_BPM_X = "bpm_x";
-constexpr auto CONF_KEY_BPM_Y = "bpm_y";
-constexpr auto CONF_KEY_BEATS_X = "beats_x";
-constexpr auto CONF_KEY_BEATS_Y = "beats_y";
-constexpr auto CONF_KEY_ABOUT_X = "about_x";
-constexpr auto CONF_KEY_ABOUT_Y = "about_y";
-constexpr auto CONF_KEY_NAME_X = "name_x";
-constexpr auto CONF_KEY_NAME_Y = "name_y";
constexpr auto CONF_KEY_PLUGIN_CHOOSER_X = "plugin_chooser_x";
constexpr auto CONF_KEY_PLUGIN_CHOOSER_Y = "plugin_chooser_y";
constexpr auto CONF_KEY_PLUGIN_CHOOSER_W = "plugin_chooser_w";
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
" "};
+const char* readActionDisabled_xpm[] = {
+"18 18 7 1",
+" c None",
+". c #252525",
+"+ c #313131",
+"@ c #393939",
+"# c #424242",
+"$ c #4A4A4A",
+"% c #585858",
+"..................",
+"..................",
+"..................",
+"..................",
+".....@@@@+........",
+".....%%%%%%+......",
+".....%%#+$%$......",
+".....%%@.#%$......",
+".....%%##%%@......",
+".....%%%%%$.......",
+".....%%@+%%#......",
+".....%%@.@%%......",
+".....%%@..%%#.....",
+".....@@...+@@.....",
+"..................",
+"..................",
+"..................",
+".................."};
+
+
const char* metronomeOff_xpm[] = {
"13 23 3 1",
" c None",
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
extern const char* readActionOn_xpm[];
extern const char* readActionOff_xpm[];
+extern const char* readActionDisabled_xpm[];
extern const char* channelStop_xpm[];
extern const char* channelPlay_xpm[];
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
private:
- //static ID m_gen;
ID m_id;
};
}} // giada::m::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <X11/Xlib.h> // For XInitThreads
#endif
#include <FL/Fl.H>
+#include "deps/json/single_include/nlohmann/json.hpp"
#include "gui/updater.h"
#include "utils/log.h"
#include "utils/fs.h"
#include "utils/time.h"
#include "utils/gui.h"
+#include "utils/ver.h"
#include "gui/dialogs/mainWindow.h"
#include "gui/dialogs/warnings.h"
#include "glue/main.h"
midimap::init();
midimap::setDefault();
- if (!u::log::init(conf::logMode))
+ if (!u::log::init(conf::conf.logMode))
u::log::print("[init] log init failed! Using default stdout\n");
- if (midimap::read(conf::midiMapPath) != MIDIMAP_READ_OK)
+ if (midimap::read(conf::conf.midiMapPath) != MIDIMAP_READ_OK)
u::log::print("[init] MIDI map read failed!\n");
}
void initAudio_()
{
kernelAudio::openDevice();
- clock::init(conf::samplerate, conf::midiTCfps);
+ clock::init(conf::conf.samplerate, conf::conf.midiTCfps);
mh::init();
recorder::init();
recorderHandler::init();
#ifdef WITH_VST
- pluginManager::init(conf::samplerate, kernelAudio::getRealBufSize());
+ pluginManager::init(conf::conf.samplerate, kernelAudio::getRealBufSize());
pluginHost::init(kernelAudio::getRealBufSize());
#endif
void initMIDI_()
{
- kernelMidi::setApi(conf::midiSystem);
- kernelMidi::openOutDevice(conf::midiPortOut);
- kernelMidi::openInDevice(conf::midiPortIn);
+ kernelMidi::setApi(conf::conf.midiSystem);
+ kernelMidi::openOutDevice(conf::conf.midiPortOut);
+ kernelMidi::openInDevice(conf::conf.midiPortIn);
}
#endif
G_MainWin = new v::gdMainWindow(G_MIN_GUI_WIDTH, G_MIN_GUI_HEIGHT, "", argc, argv);
- G_MainWin->resize(conf::mainWindowX, conf::mainWindowY, conf::mainWindowW,
- conf::mainWindowH);
+ G_MainWin->resize(conf::conf.mainWindowX, conf::conf.mainWindowY, conf::conf.mainWindowW,
+ conf::conf.mainWindowH);
- u::gui::updateMainWinLabel(patch::name == "" ? G_DEFAULT_PATCH_NAME : patch::name);
+ u::gui::updateMainWinLabel(patch::patch.name == "" ? G_DEFAULT_PATCH_NAME : patch::patch.name);
if (!kernelAudio::isReady())
v::gdAlert("Your soundcard isn't configured correctly.\n"
u::log::print("[init] All subwindows and UI thread closed\n");
}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void printBuildInfo_()
+{
+ u::log::print("[init] Giada %s\n", G_VERSION_STR);
+ u::log::print("[init] Build date: " BUILD_DATE "\n");
+ u::log::print("[init] Dependencies:\n");
+ u::log::print("[init] FLTK - %d.%d.%d\n", FL_MAJOR_VERSION, FL_MINOR_VERSION, FL_PATCH_VERSION);
+ u::log::print("[init] RtAudio - %s\n", u::ver::getRtAudioVersion().c_str());
+ u::log::print("[init] RtMidi - %s\n", u::ver::getRtMidiVersion().c_str());
+ u::log::print("[init] Libsamplerate\n"); // TODO - print version
+ u::log::print("[init] Libsndfile - %s\n", u::ver::getLibsndfileVersion().c_str());
+ u::log::print("[init] JSON for modern C++ - %d.%d.%d\n",
+ NLOHMANN_JSON_VERSION_MAJOR, NLOHMANN_JSON_VERSION_MINOR, NLOHMANN_JSON_VERSION_PATCH);
+#ifdef WITH_VST
+ u::log::print("[init] JUCE - %d.%d.%d\n", JUCE_MAJOR_VERSION, JUCE_MINOR_VERSION, JUCE_BUILDNUMBER);
+#endif
+}
} // {anonymous}
void startup(int argc, char** argv)
{
- time_t t;
- time (&t);
- u::log::print("[init] Giada %s - %s", G_VERSION_STR, ctime(&t));
-
+ printBuildInfo_();
initConf_();
initAudio_();
initMIDI_();
channelManager::init();
waveManager::init();
- clock::init(conf::samplerate, conf::midiTCfps);
+ clock::init(conf::conf.samplerate, conf::conf.midiTCfps);
mh::init();
recorder::init();
#ifdef WITH_VST
- pluginManager::init(conf::samplerate, kernelAudio::getRealBufSize());
+ pluginManager::init(conf::conf.samplerate, kernelAudio::getRealBufSize());
#endif
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
-#include "../deps/rtaudio-mod/RtAudio.h"
+#include "deps/rtaudio/RtAudio.h"
#include "utils/log.h"
#include "glue/main.h"
#include "core/model/model.h"
jack_client_t* jackGetHandle()
{
- return static_cast<jack_client_t*>(rtSystem->rtapi_->__HACK__getJackClient());
+ return static_cast<jack_client_t*>(rtSystem->HACK__getJackClient());
}
#endif
int openDevice()
{
- api = conf::soundSystem;
+ api = conf::conf.soundSystem;
u::log::print("[KA] using system 0x%x\n", api);
#if defined(__linux__) || defined(__FreeBSD__)
}
u::log::print("[KA] Opening devices %d (out), %d (in), f=%d...\n",
- conf::soundDeviceOut, conf::soundDeviceIn, conf::samplerate);
+ conf::conf.soundDeviceOut, conf::conf.soundDeviceIn, conf::conf.samplerate);
numDevs = rtSystem->getDeviceCount();
RtAudio::StreamParameters outParams;
RtAudio::StreamParameters inParams;
- outParams.deviceId = conf::soundDeviceOut == G_DEFAULT_SOUNDDEV_OUT ? getDefaultOut() : conf::soundDeviceOut;
+ outParams.deviceId = conf::conf.soundDeviceOut == G_DEFAULT_SOUNDDEV_OUT ? getDefaultOut() : conf::conf.soundDeviceOut;
outParams.nChannels = G_MAX_IO_CHANS;
- outParams.firstChannel = conf::channelsOut * G_MAX_IO_CHANS; // chan 0=0, 1=2, 2=4, ...
+ outParams.firstChannel = conf::conf.channelsOut * G_MAX_IO_CHANS; // chan 0=0, 1=2, 2=4, ...
/* inDevice can be disabled. */
- if (conf::soundDeviceIn != -1) {
- inParams.deviceId = conf::soundDeviceIn;
+ if (conf::conf.soundDeviceIn != -1) {
+ inParams.deviceId = conf::conf.soundDeviceIn;
inParams.nChannels = G_MAX_IO_CHANS;
- inParams.firstChannel = conf::channelsIn * G_MAX_IO_CHANS; // chan 0=0, 1=2, 2=4, ...
+ inParams.firstChannel = conf::conf.channelsIn * G_MAX_IO_CHANS; // chan 0=0, 1=2, 2=4, ...
inputEnabled = true;
}
else
options.streamName = G_APP_NAME;
options.numberOfBuffers = 4;
- realBufsize = conf::buffersize;
+ realBufsize = conf::conf.buffersize;
#if defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)
if (api == G_SYS_API_JACK) {
- conf::samplerate = getFreq(conf::soundDeviceOut, 0);
- u::log::print("[KA] JACK in use, freq = %d\n", conf::samplerate);
+ conf::conf.samplerate = getFreq(conf::conf.soundDeviceOut, 0);
+ u::log::print("[KA] JACK in use, freq = %d\n", conf::conf.samplerate);
}
#endif
try {
rtSystem->openStream(
&outParams, // output params
- conf::soundDeviceIn != -1 ? &inParams : nullptr, // input params if inDevice is selected
+ conf::conf.soundDeviceIn != -1 ? &inParams : nullptr, // input params if inDevice is selected
RTAUDIO_FLOAT32, // audio format
- conf::samplerate, // sample rate
+ conf::conf.samplerate, // sample rate
&realBufsize, // buffer size in byte
&mixer::masterPlay, // audio callback
nullptr, // user data (unused)
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
void sendMidiLightningInitMsgs_()
{
- for (const midimap::Message& m : midimap::initCommands) {
+ for (const midimap::Message& m : midimap::midimap.initCommands) {
if (m.value != 0x0 && m.channel != -1) {
u::log::print("[KM] MIDI send (init) - Channel %x - Event 0x%X\n", m.channel, m.value);
send(m.value | G_MIDI_CHANS[m.channel]);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* -------------------------------------------------------------------------- */
+bool isMasterMidiInAllowed_(int c)
+{
+ model::MidiInLock l(model::midiIn);
+ int filter = model::midiIn.get()->filter;
+ bool enabled = model::midiIn.get()->enabled;
+ return enabled && (filter == -1 || filter == c);
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+bool isChannelMidiInAllowed_(ID channelId, int c)
+{
+ model::ChannelsLock l(model::channels);
+ return model::get(model::channels, channelId).isMidiInAllowed(c);
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
#ifdef WITH_VST
void processPlugins_(const std::vector<ID>& ids, const MidiEvent& midiEvent)
if (!ch->midiIn || !ch->isMidiInAllowed(midiEvent.getChannel()))
continue;
- if (pure == ch->midiInKeyPress.load()) {
+ if (pure == ch->midiInKeyPress) {
actions.push_back([=] {
u::log::print(" >>> keyPress, ch=%d (pure=0x%X)\n", ch->id, pure);
c::io::keyPress(ch->id, false, false, midiEvent.getVelocity());
});
}
- else if (pure == ch->midiInKeyRel.load()) {
+ else if (pure == ch->midiInKeyRel) {
actions.push_back([=] {
u::log::print(" >>> keyRel ch=%d (pure=0x%X)\n", ch->id, pure);
c::io::keyRelease(ch->id, false, false);
});
}
- else if (pure == ch->midiInMute.load()) {
+ else if (pure == ch->midiInMute) {
actions.push_back([=] {
u::log::print(" >>> mute ch=%d (pure=0x%X)\n", ch->id, pure);
c::channel::toggleMute(ch->id);
});
}
- else if (pure == ch->midiInKill.load()) {
+ else if (pure == ch->midiInKill) {
actions.push_back([=] {
u::log::print(" >>> kill ch=%d (pure=0x%X)\n", ch->id, pure);
c::channel::kill(ch->id, /*record=*/false);
});
}
- else if (pure == ch->midiInArm.load()) {
+ else if (pure == ch->midiInArm) {
actions.push_back([=] {
u::log::print(" >>> arm ch=%d (pure=0x%X)\n", ch->id, pure);
c::channel::toggleArm(ch->id);
});
}
- else if (pure == ch->midiInSolo.load()) {
+ else if (pure == ch->midiInSolo) {
actions.push_back([=] {
u::log::print(" >>> solo ch=%d (pure=0x%X)\n", ch->id, pure);
c::channel::toggleSolo(ch->id);
});
}
- else if (pure == ch->midiInVolume.load()) {
+ else if (pure == ch->midiInVolume) {
actions.push_back([=] {
float vf = u::math::map(midiEvent.getVelocity(), G_MAX_VELOCITY, G_MAX_VOLUME);
u::log::print(" >>> volume ch=%d (pure=0x%X, value=%d, float=%f)\n",
}
else {
const SampleChannel* sch = static_cast<const SampleChannel*>(ch);
- if (pure == sch->midiInPitch.load()) {
+ if (pure == sch->midiInPitch) {
actions.push_back([=] {
float vf = u::math::map(midiEvent.getVelocity(), G_MAX_VELOCITY, G_MAX_PITCH);
u::log::print(" >>> pitch ch=%d (pure=0x%X, value=%d, float=%f)\n",
});
}
else
- if (pure == sch->midiInReadActions.load()) {
+ if (pure == sch->midiInReadActions) {
actions.push_back([=] {
u::log::print(" >>> toggle read actions ch=%d (pure=0x%X)\n", sch->id, pure);
c::channel::toggleReadingActions(sch->id);
void processMaster_(const MidiEvent& midiEvent)
{
- const bool gui = false;
+ m::model::MidiInLock l(m::model::midiIn);
- uint32_t pure = midiEvent.getRawNoVelocity();
+ const uint32_t pure = midiEvent.getRawNoVelocity();
+ const model::MidiIn* midiIn = model::midiIn.get();
- if (pure == conf::midiInRewind) {
- u::log::print(" >>> rewind (master) (pure=0x%X)\n", pure);
+ if (pure == midiIn->rewind) {
mh::rewindSequencer();
+ u::log::print(" >>> rewind (master) (pure=0x%X)\n", pure);
}
- else if (pure == conf::midiInStartStop) {
- u::log::print(" >>> startStop (master) (pure=0x%X)\n", pure);
+ else if (pure == midiIn->startStop) {
mh::toggleSequencer();
+ u::log::print(" >>> startStop (master) (pure=0x%X)\n", pure);
}
- else if (pure == conf::midiInActionRec) {
+ else if (pure == midiIn->actionRec) {
+ recManager::toggleActionRec(conf::conf.recTriggerMode);
u::log::print(" >>> actionRec (master) (pure=0x%X)\n", pure);
- recManager::toggleActionRec(static_cast<RecTriggerMode>(conf::recTriggerMode));
}
- else if (pure == conf::midiInInputRec) {
- u::log::print(" >>> inputRec (master) (pure=0x%X)\n", pure);
+ else if (pure == midiIn->inputRec) {
c::main::toggleInputRec();
+ u::log::print(" >>> inputRec (master) (pure=0x%X)\n", pure);
}
- else if (pure == conf::midiInMetronome) {
- u::log::print(" >>> metronome (master) (pure=0x%X)\n", pure);
+ else if (pure == midiIn->metronome) {
m::mixer::toggleMetronome();
+ u::log::print(" >>> metronome (master) (pure=0x%X)\n", pure);
}
- else if (pure == conf::midiInVolumeIn) {
+ else if (pure == midiIn->volumeIn) {
float vf = u::math::map(midiEvent.getVelocity(), G_MAX_VELOCITY, G_MAX_VOLUME);
+ c::main::setInVol(vf, /*gui=*/false);
u::log::print(" >>> input volume (master) (pure=0x%X, value=%d, float=%f)\n",
pure, midiEvent.getVelocity(), vf);
- c::main::setInVol(vf, gui);
}
- else if (pure == conf::midiInVolumeOut) {
+ else if (pure == midiIn->volumeOut) {
float vf = u::math::map(midiEvent.getVelocity(), G_MAX_VELOCITY, G_MAX_VOLUME);
+ c::main::setOutVol(vf, /*gui=*/false);
u::log::print(" >>> output volume (master) (pure=0x%X, value=%d, float=%f)\n",
pure, midiEvent.getVelocity(), vf);
- c::main::setOutVol(vf, gui);
}
- else if (pure == conf::midiInBeatDouble) {
- u::log::print(" >>> sequencer x2 (master) (pure=0x%X)\n", pure);
+ else if (pure == midiIn->beatDouble) {
c::main::beatsMultiply();
+ u::log::print(" >>> sequencer x2 (master) (pure=0x%X)\n", pure);
}
- else if (pure == conf::midiInBeatHalf) {
- u::log::print(" >>> sequencer /2 (master) (pure=0x%X)\n", pure);
+ else if (pure == midiIn->beatHalf) {
c::main::beatsDivide();
+ u::log::print(" >>> sequencer /2 (master) (pure=0x%X)\n", pure);
}
}
/* -------------------------------------------------------------------------- */
+void learnChannel_(MidiEvent e, int param, ID channelId, std::function<void()> doneCb)
+{
+ if (!isChannelMidiInAllowed_(channelId, e.getChannel()))
+ return;
+
+ uint32_t raw = e.getRawNoVelocity();
+
+ model::onSwap(model::channels, channelId, [&](Channel& c)
+ {
+ switch (param) {
+ case G_MIDI_IN_KEYPRESS: c.midiInKeyPress = raw; break;
+ case G_MIDI_IN_KEYREL: c.midiInKeyRel = raw; break;
+ case G_MIDI_IN_KILL: c.midiInKill = raw; break;
+ case G_MIDI_IN_ARM: c.midiInArm = raw; break;
+ case G_MIDI_IN_MUTE: c.midiInVolume = raw; break;
+ case G_MIDI_IN_SOLO: c.midiInMute = raw; break;
+ case G_MIDI_IN_VOLUME: c.midiInVolume = raw; break;
+ case G_MIDI_IN_PITCH: static_cast<SampleChannel&>(c).midiInPitch = raw; break;
+ case G_MIDI_IN_READ_ACTIONS: static_cast<SampleChannel&>(c).midiInReadActions = raw; break;
+ }
+ });
+
+ stopLearn();
+ doneCb();
+}
+
+
+void learnMaster_(MidiEvent e, int param, std::function<void()> doneCb)
+{
+ if (!isMasterMidiInAllowed_(e.getChannel()))
+ return;
+
+ uint32_t raw = e.getRawNoVelocity();
+
+ model::onSwap(model::midiIn, [&](model::MidiIn& m)
+ {
+ switch (param) {
+ case G_MIDI_IN_REWIND: m.rewind = raw; break;
+ case G_MIDI_IN_START_STOP: m.startStop = raw; break;
+ case G_MIDI_IN_ACTION_REC: m.actionRec = raw; break;
+ case G_MIDI_IN_INPUT_REC: m.inputRec = raw; break;
+ case G_MIDI_IN_METRONOME: m.volumeIn = raw; break;
+ case G_MIDI_IN_VOLUME_IN: m.volumeOut = raw; break;
+ case G_MIDI_IN_VOLUME_OUT: m.beatDouble = raw; break;
+ case G_MIDI_IN_BEAT_DOUBLE: m.beatHalf = raw; break;
+ case G_MIDI_IN_BEAT_HALF: m.metronome = raw; break;
+ }
+ });
+
+ stopLearn();
+ doneCb();
+}
+
+
+#ifdef WITH_VST
+
+void learnPlugin_(MidiEvent e, int paramIndex, ID pluginId, std::function<void()> doneCb)
+{
+ model::onSwap(model::plugins, pluginId, [&](Plugin& p)
+ {
+ p.midiInParams[paramIndex] = e.getRawNoVelocity();
+ });
+
+ stopLearn();
+ doneCb();
+}
+
+#endif
+
+
+/* -------------------------------------------------------------------------- */
+
+
void triggerSignalCb_()
{
if (signalCb_ == nullptr)
/* -------------------------------------------------------------------------- */
-void startMidiLearn(std::function<void(MidiEvent)> f)
+void startChannelLearn(int param, ID channelId, std::function<void()> f)
{
- learnCb_ = f;
+ learnCb_ = [=](m::MidiEvent e) { learnChannel_(e, param, channelId, f); };
}
-/* -------------------------------------------------------------------------- */
+void startMasterLearn (int param, std::function<void()> f)
+{
+ learnCb_ = [=](m::MidiEvent e) { learnMaster_(e, param, f); };
+}
-void stopMidiLearn()
+#ifdef WITH_VST
+
+void startPluginLearn (int paramIndex, ID pluginId, std::function<void()> f)
+{
+ learnCb_ = [=](m::MidiEvent e) { learnPlugin_(e, paramIndex, pluginId, f); };
+}
+
+#endif
+
+
+void stopLearn()
{
learnCb_ = nullptr;
}
/* -------------------------------------------------------------------------- */
+void clearMasterLearn(int param, std::function<void()> f)
+{
+ learnMaster_(MidiEvent(), param, f); // Empty event (0x0)
+}
+
+
+void clearChannelLearn(int param, ID channelId, std::function<void()> f)
+{
+ learnChannel_(MidiEvent(), param, channelId, f); // Empty event (0x0)
+}
+
+
+#ifdef WITH_VST
+
+void clearPluginLearn (int paramIndex, ID pluginId, std::function<void()> f)
+{
+ learnPlugin_(MidiEvent(), paramIndex, pluginId, f); // Empty event (0x0)
+}
+
+#endif
+
+
+/* -------------------------------------------------------------------------- */
+
+
void dispatch(int byte1, int byte2, int byte3)
{
/* Here we want to catch two things: a) note on/note off from a keyboard and
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <functional>
#include <cstdint>
+#include "core/model/model.h"
#include "core/midiEvent.h"
+#include "core/types.h"
namespace giada {
namespace m {
namespace midiDispatcher
{
-/*typedef void (cb_midiLearn) (uint32_t, void*);
-
-void startMidiLearn(cb_midiLearn* cb, void* data);*/
-void startMidiLearn(std::function<void(MidiEvent)> f);
-void stopMidiLearn();
+void startChannelLearn(int param, ID channelId, std::function<void()> f);
+void startMasterLearn (int param, std::function<void()> f);
+void stopLearn();
+void clearMasterLearn (int param, std::function<void()> f);
+void clearChannelLearn(int param, ID channelId, std::function<void()> f);
+#ifdef WITH_VST
+void startPluginLearn (int paramIndex, ID pluginId, std::function<void()> f);
+void clearPluginLearn (int paramIndex, ID pluginId, std::function<void()> f);
+#endif
void dispatch(int byte1, int byte2, int byte3);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
+#include <fstream>
#include <vector>
#include <string>
#include <cstring>
#include <dirent.h>
+#include "deps/json/single_include/nlohmann/json.hpp"
#include "utils/string.h"
#include "utils/log.h"
#include "utils/fs.h"
-#include "utils/json.h"
#include "const.h"
#include "midiMapConf.h"
+namespace nl = nlohmann;
+
+
namespace giada {
namespace m {
namespace midimap
{
namespace
{
-bool readInitCommands_(json_t* j)
+bool readInitCommands_(const nl::json& j)
{
- namespace uj = u::json;
-
- json_t* jcs = json_object_get(j, MIDIMAP_KEY_INIT_COMMANDS);
- if (jcs == nullptr)
- return false;
-
- size_t i;
- json_t* jc;
- json_array_foreach(jcs, i, jc) {
-
- if (!uj::isObject(jc))
- return false;
+ if (j.find(MIDIMAP_KEY_INIT_COMMANDS) == j.end())
+ return false;
+ for (const auto& jc : j[MIDIMAP_KEY_INIT_COMMANDS])
+ {
Message m;
- m.channel = uj::readInt (jc, MIDIMAP_KEY_CHANNEL);
- m.valueStr = uj::readString(jc, MIDIMAP_KEY_MESSAGE);
+ m.channel = jc[MIDIMAP_KEY_CHANNEL];
+ m.valueStr = jc[MIDIMAP_KEY_MESSAGE];
m.value = strtoul(m.valueStr.c_str(), nullptr, 16);
- initCommands.push_back(m);
+ midimap.initCommands.push_back(m);
}
return true;
/* -------------------------------------------------------------------------- */
-bool readCommand_(json_t* j, Message& m, const std::string& key)
+bool readCommand_(const nl::json& j, Message& m, const std::string& key)
{
- namespace uj = u::json;
-
- json_t* jc = json_object_get(j, key.c_str());
- if (jc == nullptr)
+ if (j.find(key) == j.end())
return false;
- m.channel = uj::readInt (jc, MIDIMAP_KEY_CHANNEL);
- m.valueStr = uj::readString(jc, MIDIMAP_KEY_MESSAGE);
+ const nl::json& jc = j[key];
+
+ m.channel = jc[MIDIMAP_KEY_CHANNEL];
+ m.valueStr = jc[MIDIMAP_KEY_MESSAGE];
return true;
}
/* -------------------------------------------------------------------------- */
-
-std::string brand;
-std::string device;
-std::vector<Message> initCommands;
-Message muteOn;
-Message muteOff;
-Message soloOn;
-Message soloOff;
-Message waiting;
-Message playing;
-Message stopping;
-Message stopped;
-Message playingInaudible;
-
-std::string midimapsPath;
+MidiMap midimap;
+std::string midimapsPath;
std::vector<std::string> maps;
void setDefault()
{
- brand = "";
- device = "";
- muteOn.channel = 0;
- muteOn.valueStr = "";
- muteOn.offset = -1;
- muteOn.value = 0;
- muteOff.channel = 0;
- muteOff.valueStr = "";
- muteOff.offset = -1;
- muteOff.value = 0;
- soloOn.channel = 0;
- soloOn.valueStr = "";
- soloOn.offset = -1;
- soloOn.value = 0;
- soloOff.channel = 0;
- soloOff.valueStr = "";
- soloOff.offset = -1;
- soloOff.value = 0;
- waiting.channel = 0;
- waiting.valueStr = "";
- waiting.offset = -1;
- waiting.value = 0;
- playing.channel = 0;
- playing.valueStr = "";
- playing.offset = -1;
- playing.value = 0;
- stopping.channel = 0;
- stopping.valueStr = "";
- stopping.offset = -1;
- stopping.value = 0;
- stopped.channel = 0;
- stopped.valueStr = "";
- stopped.offset = -1;
- stopped.value = 0;
- playingInaudible.channel = 0;
- playingInaudible.valueStr = "";
- playingInaudible.offset = -1;
- playingInaudible.value = 0;
+ midimap = MidiMap();
}
int read(const std::string& file)
{
- namespace uj = u::json;
-
if (file.empty()) {
u::log::print("[midiMapConf::read] midimap not specified, nothing to do\n");
return MIDIMAP_NOT_SPECIFIED;
u::log::print("[midiMapConf::read] reading midimap file '%s'\n", file.c_str());
- json_t* j = uj::load(std::string(midimapsPath + file).c_str());
- if (j == nullptr)
+ std::ifstream ifs(midimapsPath + file);
+ if (!ifs.good())
return MIDIMAP_UNREADABLE;
- brand = uj::readString(j, MIDIMAP_KEY_BRAND);
- device = uj::readString(j, MIDIMAP_KEY_DEVICE);
+ nl::json j = nl::json::parse(ifs);
+
+ midimap.brand = j[MIDIMAP_KEY_BRAND];
+ midimap.device = j[MIDIMAP_KEY_DEVICE];
if (!readInitCommands_(j)) return MIDIMAP_UNREADABLE;
- if (readCommand_(j, muteOn, MIDIMAP_KEY_MUTE_ON)) parse_(muteOn);
- if (readCommand_(j, muteOff, MIDIMAP_KEY_MUTE_OFF)) parse_(muteOff);
- if (readCommand_(j, soloOn, MIDIMAP_KEY_SOLO_ON)) parse_(soloOn);
- if (readCommand_(j, soloOff, MIDIMAP_KEY_SOLO_OFF)) parse_(soloOff);
- if (readCommand_(j, waiting, MIDIMAP_KEY_WAITING)) parse_(waiting);
- if (readCommand_(j, playing, MIDIMAP_KEY_PLAYING)) parse_(playing);
- if (readCommand_(j, stopping, MIDIMAP_KEY_STOPPING)) parse_(stopping);
- if (readCommand_(j, stopped, MIDIMAP_KEY_STOPPED)) parse_(stopped);
- if (readCommand_(j, playingInaudible, MIDIMAP_KEY_PLAYING_INAUDIBLE)) parse_(playingInaudible);
+ if (readCommand_(j, midimap.muteOn, MIDIMAP_KEY_MUTE_ON)) parse_(midimap.muteOn);
+ if (readCommand_(j, midimap.muteOff, MIDIMAP_KEY_MUTE_OFF)) parse_(midimap.muteOff);
+ if (readCommand_(j, midimap.soloOn, MIDIMAP_KEY_SOLO_ON)) parse_(midimap.soloOn);
+ if (readCommand_(j, midimap.soloOff, MIDIMAP_KEY_SOLO_OFF)) parse_(midimap.soloOff);
+ if (readCommand_(j, midimap.waiting, MIDIMAP_KEY_WAITING)) parse_(midimap.waiting);
+ if (readCommand_(j, midimap.playing, MIDIMAP_KEY_PLAYING)) parse_(midimap.playing);
+ if (readCommand_(j, midimap.stopping, MIDIMAP_KEY_STOPPING)) parse_(midimap.stopping);
+ if (readCommand_(j, midimap.stopped, MIDIMAP_KEY_STOPPED)) parse_(midimap.stopped);
+ if (readCommand_(j, midimap.playingInaudible, MIDIMAP_KEY_PLAYING_INAUDIBLE)) parse_(midimap.playingInaudible);
return MIDIMAP_READ_OK;
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
struct Message
{
- int channel;
- std::string valueStr;
- int offset;
- uint32_t value;
+ int channel = 0;
+ std::string valueStr = "";
+ int offset = -1;
+ uint32_t value = 0;
};
-extern std::string brand;
-extern std::string device;
-extern std::vector<Message> initCommands;
-extern Message muteOn;
-extern Message muteOff;
-extern Message soloOn;
-extern Message soloOff;
-extern Message waiting;
-extern Message playing;
-extern Message stopping;
-extern Message stopped;
-extern Message playingInaudible;
+
+struct MidiMap
+{
+ std::string brand;
+ std::string device;
+ std::vector<Message> initCommands;
+ Message muteOn;
+ Message muteOff;
+ Message soloOn;
+ Message soloOff;
+ Message waiting;
+ Message playing;
+ Message stopping;
+ Message stopped;
+ Message playingInaudible;
+};
+
+
+/* -------------------------------------------------------------------------- */
+
+/* midimap
+The actual MidiMap struct with data. */
+
+extern MidiMap midimap;
/* midimapsPath
Path of midimap files, different between OSes. */
extern std::vector<std::string> maps;
+
+/* -------------------------------------------------------------------------- */
+
/* init
Parses the midi maps folders and find the available maps. */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <cassert>
#include <cstring>
-#include "deps/rtaudio-mod/RtAudio.h"
+#include "deps/rtaudio/RtAudio.h"
#include "utils/log.h"
#include "utils/math.h"
#include "core/model/model.h"
computePeak_(inBuf, peakIn);
- if (signalCb_ != nullptr && u::math::linearToDB(peakIn) > conf::recTriggerLevel) {
+ if (signalCb_ != nullptr && u::math::linearToDB(peakIn) > conf::conf.recTriggerLevel) {
signalCb_();
signalCb_ = nullptr;
}
void limitOutput_(AudioBuffer& outBuf)
{
- if (!conf::limitOutput)
+ if (!conf::conf.limitOutput)
return;
for (int i=0; i<outBuf.countFrames(); i++)
for (int j=0; j<outBuf.countChannels(); j++)
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <atomic>
#include <functional>
#include <vector>
-#include "deps/rtaudio-mod/RtAudio.h"
+#include "deps/rtaudio/RtAudio.h"
#include "core/recorder.h"
#include "core/types.h"
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
std::unique_ptr<Channel> createChannel_(ChannelType type, ID columnId, ID channelId=0)
{
std::unique_ptr<Channel> ch = channelManager::create(type,
- kernelAudio::getRealBufSize(), conf::inputMonitorDefaultOn, columnId);
+ kernelAudio::getRealBufSize(), conf::conf.inputMonitorDefaultOn, columnId);
if (type == ChannelType::MASTER) {
assert(channelId != 0);
waveManager::Result res = waveManager::createFromFile(fname);
if (res.status != G_RES_OK)
return res;
- if (res.wave->getRate() != conf::samplerate) {
+ if (res.wave->getRate() != conf::conf.samplerate) {
u::log::print("[mh::createWave_] input rate (%d) != system rate (%d), conversion needed\n",
- res.wave->getRate(), conf::samplerate);
- res.status = waveManager::resample(*res.wave.get(), conf::rsmpQuality, conf::samplerate);
+ res.wave->getRate(), conf::conf.samplerate);
+ res.status = waveManager::resample(*res.wave.get(), conf::conf.rsmpQuality, conf::conf.samplerate);
if (res.status != G_RES_OK)
return res;
}
model::channels.lock();
for (Channel* c : model::channels)
- c->stopBySeq(conf::chansStopOnSeqHalt);
+ c->stopBySeq(conf::conf.chansStopOnSeqHalt);
model::channels.unlock();
#ifdef __linux__
kernelAudio::jackSetPosition(0);
#endif
- if (conf::midiSync == MIDI_SYNC_CLOCK_M)
+ if (conf::conf.midiSync == MIDI_SYNC_CLOCK_M)
kernelMidi::send(MIDI_POSITION_PTR, 0, 0);
}
/* Create a new Wave with audio coming from Mixer's virtual input. */
- std::string filename = "TAKE-" + std::to_string(patch::lastTakeId++) + ".wav";
+ std::string filename = "TAKE-" + std::to_string(patch::patch.lastTakeId++) + ".wav";
std::unique_ptr<Wave> wave = waveManager::createEmpty(clock::getFramesInLoop(),
- G_MAX_IO_CHANS, conf::samplerate, filename);
+ G_MAX_IO_CHANS, conf::conf.samplerate, filename);
wave->copyData(virtualInput[0], virtualInput.countFrames());
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
RCUList<Mixer> mixer(std::make_unique<Mixer>());
RCUList<Kernel> kernel(std::make_unique<Kernel>());
RCUList<Recorder> recorder(std::make_unique<Recorder>());
+RCUList<MidiIn> midiIn(std::make_unique<MidiIn>());
RCUList<Actions> actions(std::make_unique<Actions>());
RCUList<Channel> channels;
RCUList<Wave> waves;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
};
+struct MidiIn
+{
+ bool enabled = false;
+ int filter = -1;
+ uint32_t rewind = 0x0;
+ uint32_t startStop = 0x0;
+ uint32_t actionRec = 0x0;
+ uint32_t inputRec = 0x0;
+ uint32_t volumeIn = 0x0;
+ uint32_t volumeOut = 0x0;
+ uint32_t beatDouble = 0x0;
+ uint32_t beatHalf = 0x0;
+ uint32_t metronome = 0x0;
+};
+
+
struct Actions
{
Actions() = default;
using MixerLock = RCUList<Mixer>::Lock;
using KernelLock = RCUList<Kernel>::Lock;
using RecorderLock = RCUList<Recorder>::Lock;
+using MidiInLock = RCUList<MidiIn>::Lock;
using ActionsLock = RCUList<Actions>::Lock;
using ChannelsLock = RCUList<Channel>::Lock;
using WavesLock = RCUList<Wave>::Lock;
extern RCUList<Mixer> mixer;
extern RCUList<Kernel> kernel;
extern RCUList<Recorder> recorder;
+extern RCUList<MidiIn> midiIn;
extern RCUList<Actions> actions;
extern RCUList<Channel> channels;
extern RCUList<Wave> waves;
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#include <cassert>
+#include "core/model/model.h"
+#include "core/channels/channelManager.h"
+#include "core/channels/sampleChannel.h"
+#include "core/channels/midiChannel.h"
+#include "core/kernelAudio.h"
+#include "core/patch.h"
+#include "core/conf.h"
+#include "core/pluginManager.h"
+#include "core/recorderHandler.h"
+#include "core/waveManager.h"
+#include "core/model/storage.h"
+
+
+namespace giada {
+namespace m {
+namespace model
+{
+void store(patch::Patch& patch)
+{
+#ifdef WITH_VST
+ PluginsLock pl (plugins);
+#endif
+ ActionsLock al (actions);
+ WavesLock wl (waves);
+ ClockLock cl (clock);
+ ChannelsLock chl(channels);
+
+ patch.bars = clock.get()->bars;
+ patch.beats = clock.get()->beats;
+ patch.bpm = clock.get()->bpm;
+ patch.quantize = clock.get()->quantize;
+ patch.metronome = mixer::isMetronomeOn(); // TODO - not here
+
+#ifdef WITH_VST
+ for (const Plugin* p : plugins)
+ patch.plugins.push_back(pluginManager::serializePlugin(*p));
+#endif
+
+ patch.actions = recorderHandler::serializeActions(actions.get()->map);
+
+ for (const Wave* w : waves)
+ patch.waves.push_back(waveManager::serializeWave(*w));
+
+ for (const Channel* c : channels)
+ patch.channels.push_back(channelManager::serializeChannel(*c));
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void store(conf::Conf& conf)
+{
+ MidiInLock l(midiIn);
+
+ conf.midiInEnabled = midiIn.get()->enabled;
+ conf.midiInFilter = midiIn.get()->filter;
+ conf.midiInRewind = midiIn.get()->rewind;
+ conf.midiInStartStop = midiIn.get()->startStop;
+ conf.midiInActionRec = midiIn.get()->actionRec;
+ conf.midiInInputRec = midiIn.get()->inputRec;
+ conf.midiInMetronome = midiIn.get()->metronome;
+ conf.midiInVolumeIn = midiIn.get()->volumeIn;
+ conf.midiInVolumeOut = midiIn.get()->volumeOut;
+ conf.midiInBeatDouble = midiIn.get()->beatDouble;
+ conf.midiInBeatHalf = midiIn.get()->beatHalf;
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void load(const patch::Patch& patch)
+{
+ onSwap(clock, [&](Clock& c)
+ {
+ c.status = ClockStatus::STOPPED;
+ c.bars = patch.bars;
+ c.beats = patch.beats;
+ c.bpm = patch.bpm;
+ c.quantize = patch.quantize;
+ });
+
+ onSwap(actions, [&](Actions& a)
+ {
+ a.map = std::move(recorderHandler::deserializeActions(patch.actions));
+ });
+#ifdef WITH_VST
+ for (const patch::Plugin& pplugin : patch.plugins)
+ plugins.push(pluginManager::deserializePlugin(pplugin));
+#endif
+
+ for (const patch::Wave& pwave : patch.waves)
+ waves.push(std::move(waveManager::deserializeWave(pwave)));
+
+ for (const patch::Channel& pchannel : patch.channels) {
+ if (pchannel.type == ChannelType::MASTER || pchannel.type == ChannelType::PREVIEW)
+ onSwap(channels, pchannel.id, [&](Channel& ch) { ch.load(pchannel); });
+ else
+ channels.push(channelManager::deserializeChannel(pchannel, kernelAudio::getRealBufSize()));
+ }
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void load(const conf::Conf& c)
+{
+ onSwap(midiIn, [&](MidiIn& m)
+ {
+ m.enabled = c.midiInEnabled;
+ m.filter = c.midiInFilter;
+ m.rewind = c.midiInRewind;
+ m.startStop = c.midiInStartStop;
+ m.actionRec = c.midiInActionRec;
+ m.inputRec = c.midiInInputRec;
+ m.volumeIn = c.midiInMetronome;
+ m.volumeOut = c.midiInVolumeIn;
+ m.beatDouble = c.midiInVolumeOut;
+ m.beatHalf = c.midiInBeatDouble;
+ m.metronome = c.midiInBeatHalf;
+ });
+}
+}}} // giada::m::model::
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifndef G_MODEL_STORAGE_H
+#define G_MODEL_STORAGE_H
+
+
+namespace giada {
+namespace m {
+namespace patch
+{
+struct Patch;
+}
+namespace conf
+{
+struct Conf;
+}
+namespace model
+{
+void store(conf::Conf& c);
+void store(patch::Patch& p);
+void load(const patch::Patch& p);
+void load(const conf::Conf& c);
+}}} // giada::m::model::
+
+
+#endif
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
-#include <jansson.h>
-#include "utils/log.h"
-#include "utils/string.h"
-#include "utils/ver.h"
+#include <fstream>
+#include "deps/json/single_include/nlohmann/json.hpp"
#include "utils/math.h"
-#include "utils/fs.h"
-#include "utils/json.h"
-#include "gui/elems/mainWindow/keyboard/column.h"
-#include "gui/elems/mainWindow/keyboard/channel.h"
-#include "gui/elems/mainWindow/keyboard/keyboard.h"
-#include "gui/dialogs/mainWindow.h"
-#include "core/model/model.h"
-#include "core/channels/channelManager.h"
-#include "core/channels/channel.h"
-#include "core/channels/midiChannel.h"
-#include "core/channels/sampleChannel.h"
-#include "core/pluginManager.h"
-#include "core/waveManager.h"
-#include "core/const.h"
-#include "core/kernelAudio.h"
-#include "core/clock.h"
-#include "core/types.h"
-#include "core/midiEvent.h"
-#include "core/recorderHandler.h"
-#include "core/conf.h"
+#include "utils/log.h"
#include "core/mixer.h"
#include "patch.h"
-extern giada::v::gdMainWindow* G_MainWin;
+namespace nl = nlohmann;
namespace giada {
{
namespace
{
-void sanitize_()
+void readCommons_(const nl::json& j)
{
- namespace um = u::math;
- samplerate = um::bound(samplerate, 0, G_DEFAULT_SAMPLERATE);
-}
-
-
-void sanitize_(Channel& c)
-{
- namespace um = u::math;
- c.size = um::bound(c.size, G_GUI_CHANNEL_H_1, G_GUI_CHANNEL_H_4);
- c.volume = um::bound(c.volume, 0.0f, G_DEFAULT_VOL);
- c.pan = um::bound(c.pan, 0.0f, 1.0f);
- c.pitch = um::bound(c.pitch, 0.1f, G_MAX_PITCH);
- c.midiOutChan = um::bound(c.midiOutChan, 0, G_MAX_MIDI_CHANS - 1);
+ patch.name = j.value(PATCH_KEY_NAME, G_DEFAULT_PATCH_NAME);
+ patch.bars = j.value(PATCH_KEY_BARS, G_DEFAULT_BARS);
+ patch.beats = j.value(PATCH_KEY_BEATS, G_DEFAULT_BEATS);
+ patch.bpm = j.value(PATCH_KEY_BPM, G_DEFAULT_BPM);
+ patch.quantize = j.value(PATCH_KEY_QUANTIZE, G_DEFAULT_QUANTIZE);
+ patch.lastTakeId = j.value(PATCH_KEY_LAST_TAKE_ID, 0);
+ patch.samplerate = j.value(PATCH_KEY_SAMPLERATE, G_DEFAULT_SAMPLERATE);
+ patch.metronome = j.value(PATCH_KEY_METRONOME, false);
}
/* -------------------------------------------------------------------------- */
-void readCommons_(json_t* j)
+void readColumns_(const nl::json& j)
{
- namespace uj = u::json;
-
- name = uj::readString(j, PATCH_KEY_NAME);
- samplerate = uj::readInt(j, PATCH_KEY_SAMPLERATE);
- lastTakeId = uj::readInt(j, PATCH_KEY_LAST_TAKE_ID);
- metronome = uj::readBool(j, PATCH_KEY_METRONOME);
-
- clock::setBpm (uj::readFloat(j, PATCH_KEY_BPM));
- clock::setBeats (uj::readInt(j, PATCH_KEY_BEATS), uj::readInt(j, PATCH_KEY_BARS));
- clock::setQuantize(uj::readInt(j, PATCH_KEY_QUANTIZE));
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void readColumns_(json_t* j)
-{
- namespace uj = u::json;
-
- json_t* jcs = json_object_get(j, PATCH_KEY_COLUMNS);
- if (jcs == nullptr)
- return;
-
- G_MainWin->keyboard->deleteAllColumns();
-
- size_t i;
- json_t* jc;
- json_array_foreach(jcs, i, jc) {
- G_MainWin->keyboard->addColumn(
- uj::readInt(jc, PATCH_KEY_COLUMN_WIDTH),
- uj::readInt(jc, PATCH_KEY_COLUMN_ID));
- };
+ ID id = 0;
+ for (const auto& jcol : j[PATCH_KEY_COLUMNS]) {
+ Column c;
+ c.id = jcol.value(PATCH_KEY_COLUMN_ID, ++id);
+ c.width = jcol.value(PATCH_KEY_COLUMN_WIDTH, G_DEFAULT_COLUMN_WIDTH);
+ patch.columns.push_back(c);
+ }
}
#ifdef WITH_VST
-void readPluginParams_(json_t* j, std::vector<float>& params)
-{
- json_t* jps = json_object_get(j, PATCH_KEY_PLUGIN_PARAMS);
- if (jps == nullptr)
- return;
-
- size_t i;
- json_t* jp;
- json_array_foreach(jps, i, jp)
- params.push_back(json_real_value(jp));
-}
-
-void readMidiInPluginParams_(json_t* j, std::vector<uint32_t>& params)
+void readPlugins_(const nl::json& j)
{
- json_t* jps = json_object_get(j, PATCH_KEY_PLUGIN_MIDI_IN_PARAMS);
- if (jps == nullptr)
+ if (!j.contains(PATCH_KEY_PLUGINS))
return;
- size_t i;
- json_t* jp;
- json_array_foreach(jps, i, jp)
- params.push_back(json_integer_value(jp));
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void readPlugins_(json_t* j)
-{
- namespace uj = u::json;
-
- json_t* jps = json_object_get(j, PATCH_KEY_PLUGINS);
- if (jps == nullptr)
- return;
-
- size_t i;
- json_t* jp;
- json_array_foreach(jps, i, jp) {
-
- if (!uj::isObject(jp))
- continue;
-
+ ID id = 0;
+ for (const auto& jplugin : j[PATCH_KEY_PLUGINS]) {
Plugin p;
- p.id = uj::readInt (jp, PATCH_KEY_PLUGIN_ID);
- p.path = uj::readString(jp, PATCH_KEY_PLUGIN_PATH);
- p.bypass = uj::readBool (jp, PATCH_KEY_PLUGIN_BYPASS);
+ p.id = jplugin.value(PATCH_KEY_PLUGIN_ID, ++id);
+ p.path = jplugin.value(PATCH_KEY_PLUGIN_PATH, "");
+ p.bypass = jplugin.value(PATCH_KEY_PLUGIN_BYPASS, false);
- readPluginParams_(jp, p.params);
- readMidiInPluginParams_(jp, p.midiInParams);
+ for (const auto& jparam : jplugin[PATCH_KEY_PLUGIN_PARAMS])
+ p.params.push_back(jparam);
- model::plugins.push(pluginManager::makePlugin(p));
+ for (const auto& jmidiParam : jplugin[PATCH_KEY_PLUGIN_MIDI_IN_PARAMS])
+ p.midiInParams.push_back(jmidiParam);
+
+ patch.plugins.push_back(p);
}
}
+
#endif
+
/* -------------------------------------------------------------------------- */
-void readWaves_(json_t* j, const std::string& basePath)
+void readWaves_(const nl::json& j, const std::string& basePath)
{
- namespace uj = u::json;
-
- json_t* jws = json_object_get(j, PATCH_KEY_WAVES);
- if (jws == nullptr)
+ if (!j.contains(PATCH_KEY_WAVES))
return;
- size_t i;
- json_t* jw;
- json_array_foreach(jws, i, jw) {
-
- if (!uj::isObject(jw))
- continue;
-
+ ID id = 0;
+ for (const auto& jwave : j[PATCH_KEY_WAVES]) {
Wave w;
- w.id = uj::readInt(jw, PATCH_KEY_WAVE_ID);
- w.path = basePath + uj::readString(jw, PATCH_KEY_WAVE_PATH);
-
- model::waves.push(std::move(waveManager::createFromPatch(w)));
+ w.id = jwave.value(PATCH_KEY_WAVE_ID, ++id);
+ w.path = basePath + jwave.value(PATCH_KEY_WAVE_PATH, "");
+ patch.waves.push_back(w);
}
- return;
}
/* -------------------------------------------------------------------------- */
-void readActions_(json_t* j)
+void readActions_(const nl::json& j)
{
- namespace uj = u::json;
-
- json_t* jas = json_object_get(j, PATCH_KEY_ACTIONS);
- if (jas == nullptr)
+ if (!j.contains(PATCH_KEY_ACTIONS))
return;
- std::vector<Action> actions;
- size_t i;
- json_t* ja;
- json_array_foreach(jas, i, ja) {
-
- if (!uj::isObject(ja))
- continue;
-
+ ID id = 0;
+ for (const auto& jaction : j[PATCH_KEY_ACTIONS]) {
Action a;
- a.id = uj::readInt(ja, G_PATCH_KEY_ACTION_ID);
- a.channelId = uj::readInt(ja, G_PATCH_KEY_ACTION_CHANNEL);
- a.frame = uj::readInt(ja, G_PATCH_KEY_ACTION_FRAME);
- a.event = uj::readInt(ja, G_PATCH_KEY_ACTION_EVENT);
- a.prevId = uj::readInt(ja, G_PATCH_KEY_ACTION_PREV);
- a.nextId = uj::readInt(ja, G_PATCH_KEY_ACTION_NEXT);
-
- actions.push_back(a);
+ a.id = jaction.value(G_PATCH_KEY_ACTION_ID, ++id);
+ a.channelId = jaction.value(G_PATCH_KEY_ACTION_CHANNEL, 0);
+ a.frame = jaction.value(G_PATCH_KEY_ACTION_FRAME, 0);
+ a.event = jaction.value(G_PATCH_KEY_ACTION_EVENT, 0);
+ a.prevId = jaction.value(G_PATCH_KEY_ACTION_PREV, 0);
+ a.nextId = jaction.value(G_PATCH_KEY_ACTION_NEXT, 0);
+ patch.actions.push_back(a);
}
-
- model::onSwap(model::actions, [&](model::Actions& a)
- {
- a.map = std::move(recorderHandler::makeActionsFromPatch(actions));
- });
}
/* -------------------------------------------------------------------------- */
-void readChannelPlugins_(json_t* j, std::vector<ID>& pluginIds)
+void readChannels_(const nl::json& j)
{
- json_t* jps = json_object_get(j, PATCH_KEY_CHANNEL_PLUGINS);
- if (jps == nullptr)
+ if (!j.contains(PATCH_KEY_CHANNELS))
return;
- size_t i;
- json_t* jp;
- json_array_foreach(jps, i, jp)
- pluginIds.push_back(json_integer_value(jp));
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void readChannels_(json_t* j)
-{
- namespace uj = u::json;
-
- json_t* jcs = json_object_get(j, PATCH_KEY_CHANNELS);
- if (jcs == nullptr)
- return;
-
- size_t i;
- json_t* jc;
- json_array_foreach(jcs, i, jc) {
-
- if (!uj::isObject(jc))
- continue;
+ ID id = mixer::PREVIEW_CHANNEL_ID;
+ for (const auto& jchannel : j[PATCH_KEY_CHANNELS]) {
Channel c;
- c.id = uj::readInt (jc, PATCH_KEY_CHANNEL_ID);
- c.type = static_cast<ChannelType>(uj::readInt(jc, PATCH_KEY_CHANNEL_TYPE));
- c.volume = uj::readFloat(jc, PATCH_KEY_CHANNEL_VOLUME);
-
- if (c.type != ChannelType::MASTER) {
- c.size = G_GUI_CHANNEL_H_1; // TODO temporarily disabled - uj::readInt (jc, PATCH_KEY_CHANNEL_SIZE);
- c.name = uj::readString(jc, PATCH_KEY_CHANNEL_NAME);
- c.columnId = uj::readInt (jc, PATCH_KEY_CHANNEL_COLUMN);
- c.key = uj::readInt (jc, PATCH_KEY_CHANNEL_KEY);
- c.mute = uj::readInt (jc, PATCH_KEY_CHANNEL_MUTE);
- c.solo = uj::readInt (jc, PATCH_KEY_CHANNEL_SOLO);
- c.pan = uj::readFloat (jc, PATCH_KEY_CHANNEL_PAN);
- c.hasActions = uj::readBool (jc, PATCH_KEY_CHANNEL_HAS_ACTIONS);
- c.midiIn = uj::readBool (jc, PATCH_KEY_CHANNEL_MIDI_IN);
- c.midiInKeyPress = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_KEYPRESS);
- c.midiInKeyRel = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_KEYREL);
- c.midiInKill = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_KILL);
- c.midiInArm = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_ARM);
- c.midiInVolume = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_VOLUME);
- c.midiInMute = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_MUTE);
- c.midiInSolo = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_SOLO);
- c.midiInFilter = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_FILTER);
- c.midiOutL = uj::readBool (jc, PATCH_KEY_CHANNEL_MIDI_OUT_L);
- c.midiOutLplaying = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_OUT_L_PLAYING);
- c.midiOutLmute = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_OUT_L_MUTE);
- c.midiOutLsolo = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_OUT_L_SOLO);
- c.armed = uj::readBool (jc, PATCH_KEY_CHANNEL_ARMED);
- }
+ c.id = jchannel.value(PATCH_KEY_CHANNEL_ID, ++id);
+ c.type = static_cast<ChannelType>(jchannel.value(PATCH_KEY_CHANNEL_TYPE, 1));
+ c.volume = jchannel.value(PATCH_KEY_CHANNEL_VOLUME, G_DEFAULT_VOL);
+ c.height = jchannel.value(PATCH_KEY_CHANNEL_SIZE, G_GUI_UNIT);
+ c.name = jchannel.value(PATCH_KEY_CHANNEL_NAME, "");
+ c.columnId = jchannel.value(PATCH_KEY_CHANNEL_COLUMN, 1);
+ c.key = jchannel.value(PATCH_KEY_CHANNEL_KEY, 0);
+ c.mute = jchannel.value(PATCH_KEY_CHANNEL_MUTE, 0);
+ c.solo = jchannel.value(PATCH_KEY_CHANNEL_SOLO, 0);
+ c.pan = jchannel.value(PATCH_KEY_CHANNEL_PAN, 0.5);
+ c.hasActions = jchannel.value(PATCH_KEY_CHANNEL_HAS_ACTIONS, false);
+ c.midiIn = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN, 0);
+ c.midiInKeyPress = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_KEYPRESS, 0);
+ c.midiInKeyRel = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_KEYREL, 0);
+ c.midiInKill = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_KILL, 0);
+ c.midiInArm = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_ARM, 0);
+ c.midiInVolume = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_VOLUME, 0);
+ c.midiInMute = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_MUTE, 0);
+ c.midiInSolo = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_SOLO, 0);
+ c.midiInFilter = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_FILTER, 0);
+ c.midiOutL = jchannel.value(PATCH_KEY_CHANNEL_MIDI_OUT_L, 0);
+ c.midiOutLplaying = jchannel.value(PATCH_KEY_CHANNEL_MIDI_OUT_L_PLAYING, 0);
+ c.midiOutLmute = jchannel.value(PATCH_KEY_CHANNEL_MIDI_OUT_L_MUTE, 0);
+ c.midiOutLsolo = jchannel.value(PATCH_KEY_CHANNEL_MIDI_OUT_L_SOLO, 0);
+ c.armed = jchannel.value(PATCH_KEY_CHANNEL_ARMED, false);
+ c.mode = static_cast<ChannelMode>(jchannel.value(PATCH_KEY_CHANNEL_MODE, 1));
+ c.waveId = jchannel.value(PATCH_KEY_CHANNEL_WAVE_ID, 0);
+ c.begin = jchannel.value(PATCH_KEY_CHANNEL_BEGIN, 0);
+ c.end = jchannel.value(PATCH_KEY_CHANNEL_END, 0);
+ c.shift = jchannel.value(PATCH_KEY_CHANNEL_SHIFT, 0);
+ c.readActions = jchannel.value(PATCH_KEY_CHANNEL_READ_ACTIONS, false);
+ c.pitch = jchannel.value(PATCH_KEY_CHANNEL_PITCH, G_DEFAULT_PITCH);
+ c.inputMonitor = jchannel.value(PATCH_KEY_CHANNEL_INPUT_MONITOR, false);
+ c.midiInVeloAsVol = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_VELO_AS_VOL, 0);
+ c.midiInReadActions = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_READ_ACTIONS, 0);
+ c.midiInPitch = jchannel.value(PATCH_KEY_CHANNEL_MIDI_IN_PITCH, 0);
+ c.midiOut = jchannel.value(PATCH_KEY_CHANNEL_MIDI_OUT, 0);
+ c.midiOutChan = jchannel.value(PATCH_KEY_CHANNEL_MIDI_OUT_CHAN, 0);
#ifdef WITH_VST
- readChannelPlugins_(jc, c.pluginIds);
+ if (jchannel.contains(PATCH_KEY_CHANNEL_PLUGINS))
+ for (const auto& jplugin : jchannel[PATCH_KEY_CHANNEL_PLUGINS])
+ c.pluginIds.push_back(jplugin);
#endif
- if (c.type == ChannelType::SAMPLE) {
- c.waveId = uj::readInt (jc, PATCH_KEY_CHANNEL_WAVE_ID);
- c.mode = static_cast<ChannelMode>(uj::readInt(jc, PATCH_KEY_CHANNEL_MODE));
- c.begin = uj::readInt (jc, PATCH_KEY_CHANNEL_BEGIN);
- c.end = uj::readInt (jc, PATCH_KEY_CHANNEL_END);
- c.readActions = uj::readBool (jc, PATCH_KEY_CHANNEL_READ_ACTIONS);
- c.pitch = uj::readFloat(jc, PATCH_KEY_CHANNEL_PITCH);
- c.inputMonitor = uj::readBool (jc, PATCH_KEY_CHANNEL_INPUT_MONITOR);
- c.midiInVeloAsVol = uj::readBool (jc, PATCH_KEY_CHANNEL_MIDI_IN_VELO_AS_VOL);
- c.midiInReadActions = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_READ_ACTIONS);
- c.midiInPitch = uj::readInt (jc, PATCH_KEY_CHANNEL_MIDI_IN_PITCH);
- }
- else
- if (c.type == ChannelType::MIDI) {
- c.midiOut = uj::readInt(jc, PATCH_KEY_CHANNEL_MIDI_OUT);
- c.midiOutChan = uj::readInt(jc, PATCH_KEY_CHANNEL_MIDI_OUT_CHAN);
- }
-
- sanitize_(c);
-
- if (c.type == ChannelType::MASTER || c.type == ChannelType::PREVIEW) {
- if (c.id == mixer::MASTER_OUT_CHANNEL_ID)
- model::onSwap(model::channels, mixer::MASTER_OUT_CHANNEL_ID, [&](m::Channel& ch) { ch.load(c); });
- else
- if (c.id == mixer::MASTER_IN_CHANNEL_ID)
- model::onSwap(model::channels, mixer::MASTER_IN_CHANNEL_ID, [&](m::Channel& ch) { ch.load(c); });
- }
- else
- model::channels.push(channelManager::create(c, kernelAudio::getRealBufSize()));
+ patch.channels.push_back(c);
}
}
#ifdef WITH_VST
-void writePlugins_(json_t* j)
+void writePlugins_(nl::json& j)
{
- model::PluginsLock pl(model::plugins);
-
- json_t* jps = json_array();
-
- for (const m::Plugin* p : model::plugins) {
-
- /* Plugin. */
+ j[PATCH_KEY_PLUGINS] = nl::json::array();
- json_t* jp = json_object();
- json_object_set_new(jp, PATCH_KEY_PLUGIN_ID, json_integer(p->id));
- json_object_set_new(jp, PATCH_KEY_PLUGIN_PATH, json_string(p->getUniqueId().c_str()));
- json_object_set_new(jp, PATCH_KEY_PLUGIN_BYPASS, json_boolean(p->isBypassed()));
- json_array_append_new(jps, jp);
+ for (const Plugin& p : patch.plugins) {
- /* Plugin parameters. */
+ nl::json jplugin;
- json_t* jparams = json_array();
- for (int k = 0; k < p->getNumParameters(); k++)
- json_array_append_new(jparams, json_real(p->getParameter(k)));
- json_object_set_new(jp, PATCH_KEY_PLUGIN_PARAMS, jparams);
+ jplugin[PATCH_KEY_PLUGIN_ID] = p.id;
+ jplugin[PATCH_KEY_PLUGIN_PATH] = p.path;
+ jplugin[PATCH_KEY_PLUGIN_BYPASS] = p.bypass;
- /* MidiIn params (midi learning on plugins' parameters). */
+ jplugin[PATCH_KEY_PLUGIN_PARAMS] = nl::json::array();
+ for (float p : p.params)
+ jplugin[PATCH_KEY_PLUGIN_PARAMS].push_back(p);
- json_t* jmidiparams = json_array();
- for (uint32_t param : p->midiInParams)
- json_array_append_new(jmidiparams, json_integer(param));
- json_object_set_new(jp, PATCH_KEY_PLUGIN_MIDI_IN_PARAMS, jmidiparams);
+ jplugin[PATCH_KEY_PLUGIN_MIDI_IN_PARAMS] = nl::json::array();
+ for (uint32_t p : p.midiInParams)
+ jplugin[PATCH_KEY_PLUGIN_MIDI_IN_PARAMS].push_back(p);
+
+ j[PATCH_KEY_PLUGINS].push_back(jplugin);
}
- json_object_set_new(j, PATCH_KEY_PLUGINS, jps);
}
#endif
/* -------------------------------------------------------------------------- */
-void writeColumns_(json_t* j)
+void writeColumns_(nl::json& j)
{
- json_t* jcs = json_array();
-
- G_MainWin->keyboard->forEachColumn([&](const v::geColumn& c)
- {
- json_t* jc = json_object();
- json_object_set_new(jc, PATCH_KEY_COLUMN_ID, json_integer(c.id));
- json_object_set_new(jc, PATCH_KEY_COLUMN_WIDTH, json_integer(c.w()));
+ j[PATCH_KEY_COLUMNS] = nl::json::array();
- json_t* jchans = json_array();
- c.forEachChannel([&](v::geChannel* c)
- {
- json_array_append_new(jchans, json_integer(c->channelId));
- });
- json_object_set_new(jc, PATCH_KEY_COLUMN_CHANNELS, jchans);
-
- json_array_append_new(jcs, jc);
-
- });
- json_object_set_new(j, PATCH_KEY_COLUMNS, jcs);
+ for (const Column& column : patch.columns) {
+ nl::json jcolumn;
+ jcolumn[PATCH_KEY_COLUMN_ID] = column.id;
+ jcolumn[PATCH_KEY_COLUMN_WIDTH] = column.width;
+ j[PATCH_KEY_COLUMNS].push_back(jcolumn);
+ }
}
/* -------------------------------------------------------------------------- */
-void writeActions_(json_t* j)
+void writeActions_(nl::json& j)
{
- model::ActionsLock l(model::actions);
-
- json_t* jas = json_array();
-
- for (auto& kv : model::actions.get()->map) {
- for (m::Action& a : kv.second) {
- json_t* ja = json_object();
- json_object_set_new(ja, G_PATCH_KEY_ACTION_ID, json_integer(a.id));
- json_object_set_new(ja, G_PATCH_KEY_ACTION_CHANNEL, json_integer(a.channelId));
- json_object_set_new(ja, G_PATCH_KEY_ACTION_FRAME, json_integer(a.frame));
- json_object_set_new(ja, G_PATCH_KEY_ACTION_EVENT, json_integer(a.event.getRaw()));
- json_object_set_new(ja, G_PATCH_KEY_ACTION_PREV, json_integer(a.prevId));
- json_object_set_new(ja, G_PATCH_KEY_ACTION_NEXT, json_integer(a.nextId));
- json_array_append_new(jas, ja);
- }
+ j[PATCH_KEY_ACTIONS] = nl::json::array();
+
+ for (const Action& a : patch.actions) {
+ nl::json jaction;
+ jaction[G_PATCH_KEY_ACTION_ID] = a.id;
+ jaction[G_PATCH_KEY_ACTION_CHANNEL] = a.channelId;
+ jaction[G_PATCH_KEY_ACTION_FRAME] = a.frame;
+ jaction[G_PATCH_KEY_ACTION_EVENT] = a.event;
+ jaction[G_PATCH_KEY_ACTION_PREV] = a.prevId;
+ jaction[G_PATCH_KEY_ACTION_NEXT] = a.nextId;
+ j[PATCH_KEY_ACTIONS].push_back(jaction);
}
- json_object_set_new(j, PATCH_KEY_ACTIONS, jas);
}
/* -------------------------------------------------------------------------- */
-void writeWaves_(json_t* j, bool isProject)
+void writeWaves_(nl::json& j)
{
- model::WavesLock l(model::waves);
+ j[PATCH_KEY_WAVES] = nl::json::array();
- json_t* jws = json_array();
+ for (const Wave& w : patch.waves) {
+ nl::json jwave;
+ jwave[PATCH_KEY_WAVE_ID] = w.id;
+ jwave[PATCH_KEY_WAVE_PATH] = w.path;
- for (const m::Wave* w : model::waves) {
-
- std::string path = isProject ? u::fs::basename(w->getPath()) : w->getPath();
-
- json_t* jw = json_object();
- json_object_set_new(jw, PATCH_KEY_WAVE_ID, json_integer(w->id));
- json_object_set_new(jw, PATCH_KEY_WAVE_PATH, json_string(path.c_str()));
- json_array_append_new(jws, jw);
+ j[PATCH_KEY_WAVES].push_back(jwave);
}
- json_object_set_new(j, PATCH_KEY_WAVES, jws);
}
/* -------------------------------------------------------------------------- */
-void writeCommons_(json_t* j, const std::string& name)
+void writeCommons_(nl::json& j, const std::string& name)
{
- model::ClockLock cl(model::clock);
- model::MixerLock ml(model::mixer);
-
- json_object_set_new(j, PATCH_KEY_HEADER, json_string("GIADAPTC"));
- json_object_set_new(j, PATCH_KEY_VERSION_MAJOR, json_integer(G_VERSION_MAJOR));
- json_object_set_new(j, PATCH_KEY_VERSION_MINOR, json_integer(G_VERSION_MINOR));
- json_object_set_new(j, PATCH_KEY_VERSION_PATCH, json_integer(G_VERSION_PATCH));
- json_object_set_new(j, PATCH_KEY_NAME, json_string(name.c_str()));
- json_object_set_new(j, PATCH_KEY_BARS, json_integer(model::clock.get()->bars));
- json_object_set_new(j, PATCH_KEY_BEATS, json_integer(model::clock.get()->beats));
- json_object_set_new(j, PATCH_KEY_BPM, json_real(model::clock.get()->bpm));
- json_object_set_new(j, PATCH_KEY_QUANTIZE, json_integer(model::clock.get()->quantize));
- json_object_set_new(j, PATCH_KEY_LAST_TAKE_ID, json_integer(lastTakeId));
- json_object_set_new(j, PATCH_KEY_SAMPLERATE, json_integer(samplerate));
- json_object_set_new(j, PATCH_KEY_METRONOME, json_boolean(mixer::isMetronomeOn()));
+ j[PATCH_KEY_HEADER] = "GIADAPTC";
+ j[PATCH_KEY_VERSION_MAJOR] = G_VERSION_MAJOR;
+ j[PATCH_KEY_VERSION_MINOR] = G_VERSION_MINOR;
+ j[PATCH_KEY_VERSION_PATCH] = G_VERSION_PATCH;
+ j[PATCH_KEY_NAME] = patch.name;
+ j[PATCH_KEY_BARS] = patch.bars;
+ j[PATCH_KEY_BEATS] = patch.beats;
+ j[PATCH_KEY_BPM] = patch.bpm;
+ j[PATCH_KEY_QUANTIZE] = patch.quantize;
+ j[PATCH_KEY_LAST_TAKE_ID] = patch.lastTakeId;
+ j[PATCH_KEY_SAMPLERATE] = patch.samplerate;
+ j[PATCH_KEY_METRONOME] = patch.metronome;
}
/* -------------------------------------------------------------------------- */
-void writeChannels_(json_t* j)
+void writeChannels_(nl::json& j)
{
- model::ChannelsLock l(model::channels);
-
- json_t* jcs = json_array();
-
- for (m::Channel* c : model::channels) {
-
- json_t* jc = json_object();
-
- json_object_set_new(jc, PATCH_KEY_CHANNEL_ID, json_integer(c->id));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_TYPE, json_integer(static_cast<int>(c->type)));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_VOLUME, json_real(c->volume));
-
- if (c->type != ChannelType::MASTER) {
- json_object_set_new(jc, PATCH_KEY_CHANNEL_SIZE, json_integer(G_MainWin->keyboard->getChannel(c->id)->getSize()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_NAME, json_string(c->name.c_str()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_COLUMN, json_integer(c->columnId));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MUTE, json_integer(c->mute));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_SOLO, json_integer(c->solo));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_PAN, json_real(c->pan));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_ARMED, json_boolean(c->armed));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_HAS_ACTIONS, json_boolean(c->hasActions));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN, json_boolean(c->midiIn.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_KEYREL, json_integer(c->midiInKeyRel.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_KEYPRESS, json_integer(c->midiInKeyPress.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_KILL, json_integer(c->midiInKill.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_ARM, json_integer(c->midiInArm.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_VOLUME, json_integer(c->midiInVolume.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_MUTE, json_integer(c->midiInMute.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_SOLO, json_integer(c->midiInSolo.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_FILTER, json_integer(c->midiInFilter.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_OUT_L, json_boolean(c->midiOutL.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_OUT_L_PLAYING, json_integer(c->midiOutLplaying.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_OUT_L_MUTE, json_integer(c->midiOutLmute.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_OUT_L_SOLO, json_integer(c->midiOutLsolo.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_KEY, json_integer(c->key));
- }
+ j[PATCH_KEY_CHANNELS] = nl::json::array();
+
+ for (const Channel& c : patch.channels) {
+
+ nl::json jchannel;
+
+ jchannel[PATCH_KEY_CHANNEL_ID] = c.id;
+ jchannel[PATCH_KEY_CHANNEL_TYPE] = static_cast<int>(c.type);
+ jchannel[PATCH_KEY_CHANNEL_SIZE] = c.height;
+ jchannel[PATCH_KEY_CHANNEL_NAME] = c.name;
+ jchannel[PATCH_KEY_CHANNEL_COLUMN] = c.columnId;
+ jchannel[PATCH_KEY_CHANNEL_MUTE] = c.mute;
+ jchannel[PATCH_KEY_CHANNEL_SOLO] = c.solo;
+ jchannel[PATCH_KEY_CHANNEL_VOLUME] = c.volume;
+ jchannel[PATCH_KEY_CHANNEL_PAN] = c.pan;
+ jchannel[PATCH_KEY_CHANNEL_HAS_ACTIONS] = c.hasActions;
+ jchannel[PATCH_KEY_CHANNEL_ARMED] = c.armed;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN] = c.midiIn;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_KEYREL] = c.midiInKeyRel;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_KEYPRESS] = c.midiInKeyPress;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_KILL] = c.midiInKill;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_ARM] = c.midiInArm;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_VOLUME] = c.midiInVolume;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_MUTE] = c.midiInMute;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_SOLO] = c.midiInSolo;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_FILTER] = c.midiInFilter;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_OUT_L] = c.midiOutL;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_OUT_L_PLAYING] = c.midiOutLplaying;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_OUT_L_MUTE] = c.midiOutLmute;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_OUT_L_SOLO] = c.midiOutLsolo;
+ jchannel[PATCH_KEY_CHANNEL_KEY] = c.key;
+ jchannel[PATCH_KEY_CHANNEL_WAVE_ID] = c.waveId;
+ jchannel[PATCH_KEY_CHANNEL_MODE] = static_cast<int>(c.mode);
+ jchannel[PATCH_KEY_CHANNEL_BEGIN] = c.begin;
+ jchannel[PATCH_KEY_CHANNEL_END] = c.end;
+ jchannel[PATCH_KEY_CHANNEL_SHIFT] = c.shift;
+ jchannel[PATCH_KEY_CHANNEL_READ_ACTIONS] = c.readActions;
+ jchannel[PATCH_KEY_CHANNEL_PITCH] = c.pitch;
+ jchannel[PATCH_KEY_CHANNEL_INPUT_MONITOR] = c.inputMonitor;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_VELO_AS_VOL] = c.midiInVeloAsVol;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_READ_ACTIONS] = c.midiInReadActions;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_IN_PITCH] = c.midiInPitch;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_OUT] = c.midiOut;
+ jchannel[PATCH_KEY_CHANNEL_MIDI_OUT_CHAN] = c.midiOutChan;
#ifdef WITH_VST
- json_t* jplugins = json_array();
- for (ID pid : c->pluginIds)
- json_array_append_new(jplugins, json_integer(pid));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_PLUGINS, jplugins);
+ jchannel[PATCH_KEY_CHANNEL_PLUGINS] = nl::json::array();
+ for (ID pid : c.pluginIds)
+ jchannel[PATCH_KEY_CHANNEL_PLUGINS].push_back(pid);
#endif
- if (c->type == ChannelType::SAMPLE) {
- SampleChannel* sc = static_cast<SampleChannel*>(c);
-
- json_object_set_new(jc, PATCH_KEY_CHANNEL_WAVE_ID, json_integer(sc->waveId));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MODE, json_integer(static_cast<int>(sc->mode)));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_BEGIN, json_integer(sc->begin));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_END, json_integer(sc->end));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_READ_ACTIONS, json_boolean(sc->readActions));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_PITCH, json_real(sc->pitch));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_INPUT_MONITOR, json_boolean(sc->inputMonitor));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_VELO_AS_VOL, json_boolean(sc->midiInVeloAsVol));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_READ_ACTIONS, json_integer(sc->midiInReadActions.load()));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_IN_PITCH, json_integer(sc->midiInPitch.load()));
- }
- else
- if (c->type == ChannelType::MIDI) {
- MidiChannel* mc = static_cast<MidiChannel*>(c);
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_OUT, json_integer(mc->midiOut));
- json_object_set_new(jc, PATCH_KEY_CHANNEL_MIDI_OUT_CHAN, json_integer(mc->midiOutChan));
- }
-
- json_array_append_new(jcs, jc);
+ j[PATCH_KEY_CHANNELS].push_back(jchannel);
}
- json_object_set_new(j, PATCH_KEY_CHANNELS, jcs);
}
}; // {anonymous}
/* -------------------------------------------------------------------------- */
-std::string name;
-int samplerate;
-int lastTakeId;
-bool metronome;
+Patch patch;
/* -------------------------------------------------------------------------- */
void init()
{
- lastTakeId = 0;
- samplerate = G_DEFAULT_SAMPLERATE;
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-int verify(const std::string& file)
-{
- namespace uj = u::json;
-
- json_t* j = uj::load(file);
- if (j == nullptr)
- return G_PATCH_UNREADABLE;
-
- if (uj::readString(j, PATCH_KEY_HEADER) != "GIADAPTC")
- return G_PATCH_INVALID;
-
- Version version = {
- static_cast<int>(uj::readInt(j, PATCH_KEY_VERSION_MAJOR)),
- static_cast<int>(uj::readInt(j, PATCH_KEY_VERSION_MINOR)),
- static_cast<int>(uj::readInt(j, PATCH_KEY_VERSION_PATCH))
- };
- if (version < Version{0, 16, 0})
- return G_PATCH_UNSUPPORTED;
-
- return G_PATCH_OK;
+ patch = Patch();
}
/* -------------------------------------------------------------------------- */
-bool write(const std::string& name, const std::string& file, bool isProject)
+bool write(const std::string& name, const std::string& file)
{
- json_t* j = json_object();
+ nl::json j;
writeCommons_(j, name);
writeColumns_(j);
writeChannels_(j);
writeActions_(j);
- writeWaves_(j, isProject);
+ writeWaves_(j);
#ifdef WITH_VST
writePlugins_(j);
#endif
- if (json_dump_file(j, file.c_str(), JSON_COMPACT) != 0) {
- u::log::print("[patch::write] unable to write patch file!\n");
+ std::ofstream ofs(file);
+ if (!ofs.good())
return false;
- }
+
+ ofs << j;
return true;
}
int read(const std::string& file, const std::string& basePath)
{
- namespace uj = u::json;
-
- json_t* j = uj::load(file);
- if (j == nullptr)
+ std::ifstream ifs(file);
+ if (!ifs.good())
return G_PATCH_UNREADABLE;
- init();
- readCommons_(j);
- readColumns_(j);
-#ifdef WITH_VST
- readPlugins_(j);
-#endif
- readWaves_(j, basePath);
- readActions_(j);
- readChannels_(j);
+ nl::json j = nl::json::parse(ifs);
- json_decref(j);
+ if (j[PATCH_KEY_HEADER] != "GIADAPTC")
+ return G_PATCH_INVALID;
+
+ Version version = {
+ static_cast<int>(j[PATCH_KEY_VERSION_MAJOR]),
+ static_cast<int>(j[PATCH_KEY_VERSION_MINOR]),
+ static_cast<int>(j[PATCH_KEY_VERSION_PATCH])
+ };
+ if (version < Version{0, 16, 0})
+ return G_PATCH_UNSUPPORTED;
- sanitize_();
+ try {
+ readCommons_(j);
+ readColumns_(j);
+#ifdef WITH_VST
+ readPlugins_(j);
+#endif
+ readWaves_(j, basePath);
+ readActions_(j);
+ readChannels_(j);
+ }
+ catch (nl::json::exception& e) {
+ u::log::print("[patch::read] Exception thrown: %s\n", e.what());
+ return G_PATCH_INVALID;
+ }
return G_PATCH_OK;
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
int patch = G_VERSION_PATCH;
bool operator ==(const Version& o) const;
- bool operator <(const Version& o) const;
+ bool operator < (const Version& o) const;
};
-struct Action
-{
- ID id;
- ID channelId;
- Frame frame;
- uint32_t event;
- ID prevId;
- ID nextId;
-};
-
-
-#ifdef WITH_VST
-struct Plugin
-{
- ID id;
- std::string path;
- bool bypass;
- std::vector<float> params;
- std::vector<uint32_t> midiInParams;
-};
-#endif
-
-
-struct Wave
+struct Column
{
- ID id;
- std::string path;
+ ID id;
+ int width;
};
{
ID id;
ChannelType type;
- int size;
+ int height;
std::string name;
ID columnId;
int key;
bool mute;
bool solo;
float volume = G_DEFAULT_VOL;
- float pan;
+ float pan = 0.5f;
bool hasActions;
+ bool armed;
bool midiIn;
uint32_t midiInKeyPress;
uint32_t midiInKeyRel;
uint32_t midiOutLplaying;
uint32_t midiOutLmute;
uint32_t midiOutLsolo;
- bool armed;
// sample channel
ID waveId;
ChannelMode mode;
Frame begin;
Frame end;
- // TODO - shift
+ Frame shift;
bool readActions;
float pitch = G_DEFAULT_PITCH;
bool inputMonitor;
#endif
};
-extern std::string name;
-extern int samplerate; // Original samplerate when the patch was saved
-extern int lastTakeId;
-extern bool metronome;
+
+struct Action
+{
+ ID id;
+ ID channelId;
+ Frame frame;
+ uint32_t event;
+ ID prevId;
+ ID nextId;
+};
+
+
+struct Wave
+{
+ ID id;
+ std::string path;
+};
+
+
+#ifdef WITH_VST
+struct Plugin
+{
+ ID id;
+ std::string path;
+ bool bypass;
+ std::vector<float> params;
+ std::vector<uint32_t> midiInParams;
+};
+#endif
+
+
+struct Patch
+{
+ std::string name = G_DEFAULT_PATCH_NAME;
+ int bars = G_DEFAULT_BARS;
+ int beats = G_DEFAULT_BEATS;
+ float bpm = G_DEFAULT_BPM;
+ bool quantize = G_DEFAULT_QUANTIZE;
+ int lastTakeId = 0;
+ int samplerate = G_DEFAULT_SAMPLERATE;
+ bool metronome = false;
+
+ std::vector<Column> columns;
+ std::vector<Channel> channels;
+ std::vector<Action> actions;
+ std::vector<Wave> waves;
+#ifdef WITH_VST
+ std::vector<Plugin> plugins;
+#endif
+};
+
+
+/* -------------------------------------------------------------------------- */
+
+
+extern Patch patch;
+
+
+/* -------------------------------------------------------------------------- */
/* init
Initializes the patch with default values. */
void init();
-/* verify
-Checks if the patch is valid. */
-
-int verify(const std::string& file);
-
/* read
-Reads patch from file. Always call verify() first in order to see if the patch
-format is valid. It takes 'basePath' as parameter for Wave reading.*/
+Reads patch from file. It takes 'basePath' as parameter for Wave reading. */
int read(const std::string& file, const std::string& basePath);
/* write
Writes patch to file. */
-bool write(const std::string& name, const std::string& file, bool isProject);
+bool write(const std::string& name, const std::string& file);
}}}; // giada::m::patch::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
Plugin::Plugin(ID id, juce::AudioPluginInstance* plugin, double samplerate,
int buffersize)
-: id (id),
- valid (true),
- m_plugin(plugin),
- m_bypass(false)
+: id (id),
+ valid (true),
+ m_plugin (plugin),
+ m_bypass (false)
{
- /* Init midiInParams. All values are empty (0x0): they will be filled during
- midi learning process. */
+ /* Initialize midiInParams vector, where midiInParams.size == number of
+ plugin parameters. All values are initially empty (0x0): they will be filled
+ during MIDI learning process. */
+
+ midiInParams = std::vector<uint32_t>(m_plugin->getParameters().size());
- midiInParams = std::deque<std::atomic<uint32_t>>(m_plugin->getParameters().size());
-
m_buffer.setSize(G_MAX_IO_CHANS, buffersize);
/* Try to set the main bus to the current number of channels. In the future
Plugin::Plugin(const Plugin& o)
-: id (o.id),
- valid (true),
- m_plugin(o.m_plugin),
- m_bypass(o.m_bypass.load())
+: id (o.id),
+ valid (o.valid),
+ m_plugin (o.m_plugin),
+ m_bypass (o.m_bypass.load()),
+ midiInParams(o.midiInParams)
{
- for (const std::atomic<uint32_t>& p : o.midiInParams)
- midiInParams.emplace_back(p.load());
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
ID id;
/* midiInParams
- A list of midiIn hex values for parameter automation. Why not a vector?
- Unfortunately std::atomic types are not copy-constructible, nor
- copy-assignable: such type won't suit a std::vector. */
+ A vector of midiIn hex values for parameter automation. */
- std::deque<std::atomic<uint32_t>> midiInParams;
+ std::vector<uint32_t> midiInParams;
/* valid
A missing plug-in is loaded anyway, yet marked as 'invalid'. */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
missingPlugins_ = false;
unknownPluginList_.clear();
loadList(u::fs::getHomePath() + G_SLASH + "plugins.xml");
- sortPlugins(static_cast<pluginManager::SortMethod>(conf::pluginSortMethod));
+ sortPlugins(static_cast<pluginManager::SortMethod>(conf::conf.pluginSortMethod));
}
/* -------------------------------------------------------------------------- */
-std::unique_ptr<Plugin> makePlugin(const patch::Plugin& p)
+const patch::Plugin serializePlugin(const Plugin& p)
+{
+ patch::Plugin pp;
+ pp.id = p.id;
+ pp.path = p.getUniqueId();
+ pp.bypass = p.isBypassed();
+
+ for (int i = 0; i < p.getNumParameters(); i++)
+ pp.params.push_back(p.getParameter(i));
+
+ for (uint32_t param : p.midiInParams)
+ pp.midiInParams.push_back(param);
+
+ return pp;
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+std::unique_ptr<Plugin> deserializePlugin(const patch::Plugin& p)
{
std::unique_ptr<Plugin> plugin = makePlugin(p.path, p.id);
if (!plugin->valid)
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
std::unique_ptr<Plugin> makePlugin(const std::string& fid, ID id=0);
std::unique_ptr<Plugin> makePlugin(int index);
std::unique_ptr<Plugin> makePlugin(const Plugin& other);
-std::unique_ptr<Plugin> makePlugin(const patch::Plugin& p);
+
+/* (de)serializePlugin
+Transforms patch data into a Plugin object and vice versa. */
+
+const patch::Plugin serializePlugin(const Plugin& p);
+std::unique_ptr<Plugin> deserializePlugin(const patch::Plugin& p);
/* getAvailablePluginInfo
Returns the available plugin information (name, type, ...) given a plug-in
-#ifndef FIFO_H
-#define FIFO_H
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifndef G_QUEUE_H
+#define G_QUEUE_H
#include <array>
namespace giada {
namespace m
{
+/* Queue
+Single producer, single consumer lock-free queue. */
+
template<typename T, size_t size>
class Queue
{
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* -------------------------------------------------------------------------- */
-recorder::ActionMap makeActionsFromPatch(const std::vector<patch::Action>& pactions)
+recorder::ActionMap deserializeActions(const std::vector<patch::Action>& pactions)
{
recorder::ActionMap out;
return out;
}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+std::vector<patch::Action> serializeActions(const recorder::ActionMap& actions)
+{
+ std::vector<patch::Action> out;
+ for (const auto& kv : actions) {
+ for (const Action& a : kv.second) {
+ out.push_back({
+ a.id,
+ a.channelId,
+ a.frame,
+ a.event.getRaw(),
+ a.prevId,
+ a.nextId,
+ });
+ }
+ }
+ return out;
+}
+
}}}; // giada::m::recorderHandler::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
void clearAllActions();
-recorder::ActionMap makeActionsFromPatch(const std::vector<patch::Action>& pactions);
+/* (de)serializeActions
+Creates new Actions given the patch raw data and vice versa. */
+recorder::ActionMap deserializeActions(const std::vector<patch::Action>& as);
+std::vector<patch::Action> serializeActions(const recorder::ActionMap& as);
}}}; // giada::m::recorderHandler::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* -------------------------------------------------------------------------- */
-std::unique_ptr<Wave> createFromPatch(const patch::Wave& w)
+std::unique_ptr<Wave> deserializeWave(const patch::Wave& w)
{
return createFromFile(w.path, w.id).wave;
}
+const patch::Wave serializeWave(const Wave& w)
+{
+ return { w.id, u::fs::basename(w.getPath()) };
+}
+
+
/* -------------------------------------------------------------------------- */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
std::unique_ptr<Wave> createFromWave(const Wave& src, int a, int b);
-/* createFromPatch
-Creates a new Wave given the patch raw data. */
+/* (de)serializeWave
+Creates a new Wave given the patch raw data and vice versa. */
-std::unique_ptr<Wave> createFromPatch(const patch::Wave& w);
+std::unique_ptr<Wave> deserializeWave(const patch::Wave& w);
+const patch::Wave serializeWave(const Wave& w);
int resample(Wave& w, int quality, int samplerate);
+++ /dev/null
-/************************************************************************/\r
-/*! \class RtAudio\r
- \brief Realtime audio i/o C++ classes.\r
-\r
- RtAudio provides a common API (Application Programming Interface)\r
- for realtime audio input/output across Linux (native ALSA, Jack,\r
- and OSS), Macintosh OS X (CoreAudio and Jack), and Windows\r
- (DirectSound, ASIO and WASAPI) operating systems.\r
-\r
- RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/\r
-\r
- RtAudio: realtime audio i/o C++ classes\r
- Copyright (c) 2001-2016 Gary P. Scavone\r
-\r
- Permission is hereby granted, free of charge, to any person\r
- obtaining a copy of this software and associated documentation files\r
- (the "Software"), to deal in the Software without restriction,\r
- including without limitation the rights to use, copy, modify, merge,\r
- publish, distribute, sublicense, and/or sell copies of the Software,\r
- and to permit persons to whom the Software is furnished to do so,\r
- subject to the following conditions:\r
-\r
- The above copyright notice and this permission notice shall be\r
- included in all copies or substantial portions of the Software.\r
-\r
- Any person wishing to distribute modifications to the Software is\r
- asked to send the modifications to the original developer so that\r
- they can be incorporated into the canonical version. This is,\r
- however, not a binding provision of this license.\r
-\r
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,\r
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\r
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\r
- IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR\r
- ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF\r
- CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\r
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\r
-*/\r
-/************************************************************************/\r
-\r
-// RtAudio: Version 4.1.2\r
-\r
-#include "RtAudio.h"\r
-#include <iostream>\r
-#include <cstdlib>\r
-#include <cstring>\r
-#include <climits>\r
-#include <algorithm>\r
-\r
-// Static variable definitions.\r
-const unsigned int RtApi::MAX_SAMPLE_RATES = 14;\r
-const unsigned int RtApi::SAMPLE_RATES[] = {\r
- 4000, 5512, 8000, 9600, 11025, 16000, 22050,\r
- 32000, 44100, 48000, 88200, 96000, 176400, 192000\r
-};\r
-\r
-#if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) || defined(__WINDOWS_WASAPI__)\r
- #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)\r
- #define MUTEX_DESTROY(A) DeleteCriticalSection(A)\r
- #define MUTEX_LOCK(A) EnterCriticalSection(A)\r
- #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)\r
-\r
- #include "tchar.h"\r
-\r
- static std::string convertCharPointerToStdString(const char *text)\r
- {\r
- return std::string(text);\r
- }\r
-\r
- static std::string convertCharPointerToStdString(const wchar_t *text)\r
- {\r
- int length = WideCharToMultiByte(CP_UTF8, 0, text, -1, NULL, 0, NULL, NULL);\r
- std::string s( length-1, '\0' );\r
- WideCharToMultiByte(CP_UTF8, 0, text, -1, &s[0], length, NULL, NULL);\r
- return s;\r
- }\r
-\r
-#elif defined(__LINUX_ALSA__) || defined(__LINUX_PULSE__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)\r
- // pthread API\r
- #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)\r
- #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)\r
- #define MUTEX_LOCK(A) pthread_mutex_lock(A)\r
- #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)\r
-#else\r
- #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions\r
- #define MUTEX_DESTROY(A) abs(*A) // dummy definitions\r
-#endif\r
-\r
-// *************************************************** //\r
-//\r
-// RtAudio definitions.\r
-//\r
-// *************************************************** //\r
-\r
-std::string RtAudio :: getVersion( void ) throw()\r
-{\r
- return RTAUDIO_VERSION;\r
-}\r
-\r
-void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()\r
-{\r
- apis.clear();\r
-\r
- // The order here will control the order of RtAudio's API search in\r
- // the constructor.\r
-#if defined(__UNIX_JACK__)\r
- apis.push_back( UNIX_JACK );\r
-#endif\r
-#if defined(__LINUX_ALSA__)\r
- apis.push_back( LINUX_ALSA );\r
-#endif\r
-#if defined(__LINUX_PULSE__)\r
- apis.push_back( LINUX_PULSE );\r
-#endif\r
-#if defined(__LINUX_OSS__)\r
- apis.push_back( LINUX_OSS );\r
-#endif\r
-#if defined(__WINDOWS_ASIO__)\r
- apis.push_back( WINDOWS_ASIO );\r
-#endif\r
-#if defined(__WINDOWS_WASAPI__)\r
- apis.push_back( WINDOWS_WASAPI );\r
-#endif\r
-#if defined(__WINDOWS_DS__)\r
- apis.push_back( WINDOWS_DS );\r
-#endif\r
-#if defined(__MACOSX_CORE__)\r
- apis.push_back( MACOSX_CORE );\r
-#endif\r
-#if defined(__RTAUDIO_DUMMY__)\r
- apis.push_back( RTAUDIO_DUMMY );\r
-#endif\r
-}\r
-\r
-void RtAudio :: openRtApi( RtAudio::Api api )\r
-{\r
- if ( rtapi_ )\r
- delete rtapi_;\r
- rtapi_ = 0;\r
-\r
-#if defined(__UNIX_JACK__)\r
- if ( api == UNIX_JACK )\r
- rtapi_ = new RtApiJack();\r
-#endif\r
-#if defined(__LINUX_ALSA__)\r
- if ( api == LINUX_ALSA )\r
- rtapi_ = new RtApiAlsa();\r
-#endif\r
-#if defined(__LINUX_PULSE__)\r
- if ( api == LINUX_PULSE )\r
- rtapi_ = new RtApiPulse();\r
-#endif\r
-#if defined(__LINUX_OSS__)\r
- if ( api == LINUX_OSS )\r
- rtapi_ = new RtApiOss();\r
-#endif\r
-#if defined(__WINDOWS_ASIO__)\r
- if ( api == WINDOWS_ASIO )\r
- rtapi_ = new RtApiAsio();\r
-#endif\r
-#if defined(__WINDOWS_WASAPI__)\r
- if ( api == WINDOWS_WASAPI )\r
- rtapi_ = new RtApiWasapi();\r
-#endif\r
-#if defined(__WINDOWS_DS__)\r
- if ( api == WINDOWS_DS )\r
- rtapi_ = new RtApiDs();\r
-#endif\r
-#if defined(__MACOSX_CORE__)\r
- if ( api == MACOSX_CORE )\r
- rtapi_ = new RtApiCore();\r
-#endif\r
-#if defined(__RTAUDIO_DUMMY__)\r
- if ( api == RTAUDIO_DUMMY )\r
- rtapi_ = new RtApiDummy();\r
-#endif\r
-}\r
-\r
-RtAudio :: RtAudio( RtAudio::Api api )\r
-{\r
- rtapi_ = 0;\r
-\r
- if ( api != UNSPECIFIED ) {\r
- // Attempt to open the specified API.\r
- openRtApi( api );\r
- if ( rtapi_ ) return;\r
-\r
- // No compiled support for specified API value. Issue a debug\r
- // warning and continue as if no API was specified.\r
- std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;\r
- }\r
-\r
- // Iterate through the compiled APIs and return as soon as we find\r
- // one with at least one device or we reach the end of the list.\r
- std::vector< RtAudio::Api > apis;\r
- getCompiledApi( apis );\r
- for ( unsigned int i=0; i<apis.size(); i++ ) {\r
- openRtApi( apis[i] );\r
- if ( rtapi_ && rtapi_->getDeviceCount() ) break;\r
- }\r
-\r
- if ( rtapi_ ) return;\r
-\r
- // It should not be possible to get here because the preprocessor\r
- // definition __RTAUDIO_DUMMY__ is automatically defined if no\r
- // API-specific definitions are passed to the compiler. But just in\r
- // case something weird happens, we'll thow an error.\r
- std::string errorText = "\nRtAudio: no compiled API support found ... critical error!!\n\n";\r
- throw( RtAudioError( errorText, RtAudioError::UNSPECIFIED ) );\r
-}\r
-\r
-RtAudio :: ~RtAudio() throw()\r
-{\r
- if ( rtapi_ )\r
- delete rtapi_;\r
-}\r
-\r
-void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,\r
- RtAudio::StreamParameters *inputParameters,\r
- RtAudioFormat format, unsigned int sampleRate,\r
- unsigned int *bufferFrames,\r
- RtAudioCallback callback, void *userData,\r
- RtAudio::StreamOptions *options,\r
- RtAudioErrorCallback errorCallback )\r
-{\r
- return rtapi_->openStream( outputParameters, inputParameters, format,\r
- sampleRate, bufferFrames, callback,\r
- userData, options, errorCallback );\r
-}\r
-\r
-// *************************************************** //\r
-//\r
-// Public RtApi definitions (see end of file for\r
-// private or protected utility functions).\r
-//\r
-// *************************************************** //\r
-\r
-RtApi :: RtApi()\r
-{\r
- stream_.state = STREAM_CLOSED;\r
- stream_.mode = UNINITIALIZED;\r
- stream_.apiHandle = 0;\r
- stream_.userBuffer[0] = 0;\r
- stream_.userBuffer[1] = 0;\r
- MUTEX_INITIALIZE( &stream_.mutex );\r
- showWarnings_ = true;\r
- firstErrorOccurred_ = false;\r
-}\r
-\r
-RtApi :: ~RtApi()\r
-{\r
- MUTEX_DESTROY( &stream_.mutex );\r
-}\r
-\r
-void RtApi :: openStream( RtAudio::StreamParameters *oParams,\r
- RtAudio::StreamParameters *iParams,\r
- RtAudioFormat format, unsigned int sampleRate,\r
- unsigned int *bufferFrames,\r
- RtAudioCallback callback, void *userData,\r
- RtAudio::StreamOptions *options,\r
- RtAudioErrorCallback errorCallback )\r
-{\r
- if ( stream_.state != STREAM_CLOSED ) {\r
- errorText_ = "RtApi::openStream: a stream is already open!";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
-\r
- // Clear stream information potentially left from a previously open stream.\r
- clearStreamInfo();\r
-\r
- if ( oParams && oParams->nChannels < 1 ) {\r
- errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
-\r
- if ( iParams && iParams->nChannels < 1 ) {\r
- errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
-\r
- if ( oParams == NULL && iParams == NULL ) {\r
- errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
-\r
- if ( formatBytes(format) == 0 ) {\r
- errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
-\r
- unsigned int nDevices = getDeviceCount();\r
- unsigned int oChannels = 0;\r
- if ( oParams ) {\r
- oChannels = oParams->nChannels;\r
- if ( oParams->deviceId >= nDevices ) {\r
- errorText_ = "RtApi::openStream: output device parameter value is invalid.";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
- }\r
-\r
- unsigned int iChannels = 0;\r
- if ( iParams ) {\r
- iChannels = iParams->nChannels;\r
- if ( iParams->deviceId >= nDevices ) {\r
- errorText_ = "RtApi::openStream: input device parameter value is invalid.";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
- }\r
-\r
- bool result;\r
-\r
- if ( oChannels > 0 ) {\r
-\r
- result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,\r
- sampleRate, format, bufferFrames, options );\r
- if ( result == false ) {\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- if ( iChannels > 0 ) {\r
-\r
- result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,\r
- sampleRate, format, bufferFrames, options );\r
- if ( result == false ) {\r
- if ( oChannels > 0 ) closeStream();\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- stream_.callbackInfo.callback = (void *) callback;\r
- stream_.callbackInfo.userData = userData;\r
- stream_.callbackInfo.errorCallback = (void *) errorCallback;\r
-\r
- if ( options ) options->numberOfBuffers = stream_.nBuffers;\r
- stream_.state = STREAM_STOPPED;\r
-}\r
-\r
-unsigned int RtApi :: getDefaultInputDevice( void )\r
-{\r
- // Should be implemented in subclasses if possible.\r
- return 0;\r
-}\r
-\r
-unsigned int RtApi :: getDefaultOutputDevice( void )\r
-{\r
- // Should be implemented in subclasses if possible.\r
- return 0;\r
-}\r
-\r
-void RtApi :: closeStream( void )\r
-{\r
- // MUST be implemented in subclasses!\r
- return;\r
-}\r
-\r
-bool RtApi :: probeDeviceOpen( unsigned int /*device*/, StreamMode /*mode*/, unsigned int /*channels*/,\r
- unsigned int /*firstChannel*/, unsigned int /*sampleRate*/,\r
- RtAudioFormat /*format*/, unsigned int * /*bufferSize*/,\r
- RtAudio::StreamOptions * /*options*/ )\r
-{\r
- // MUST be implemented in subclasses!\r
- return FAILURE;\r
-}\r
-\r
-void RtApi :: tickStreamTime( void )\r
-{\r
- // Subclasses that do not provide their own implementation of\r
- // getStreamTime should call this function once per buffer I/O to\r
- // provide basic stream time support.\r
-\r
- stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );\r
-\r
-#if defined( HAVE_GETTIMEOFDAY )\r
- gettimeofday( &stream_.lastTickTimestamp, NULL );\r
-#endif\r
-}\r
-\r
-long RtApi :: getStreamLatency( void )\r
-{\r
- verifyStream();\r
-\r
- long totalLatency = 0;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )\r
- totalLatency = stream_.latency[0];\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX )\r
- totalLatency += stream_.latency[1];\r
-\r
- return totalLatency;\r
-}\r
-\r
-double RtApi :: getStreamTime( void )\r
-{\r
- verifyStream();\r
-\r
-#if defined( HAVE_GETTIMEOFDAY )\r
- // Return a very accurate estimate of the stream time by\r
- // adding in the elapsed time since the last tick.\r
- struct timeval then;\r
- struct timeval now;\r
-\r
- if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )\r
- return stream_.streamTime;\r
-\r
- gettimeofday( &now, NULL );\r
- then = stream_.lastTickTimestamp;\r
- return stream_.streamTime +\r
- ((now.tv_sec + 0.000001 * now.tv_usec) -\r
- (then.tv_sec + 0.000001 * then.tv_usec));\r
-#else\r
- return stream_.streamTime;\r
-#endif\r
-}\r
-\r
-void RtApi :: setStreamTime( double time )\r
-{\r
- verifyStream();\r
-\r
- if ( time >= 0.0 )\r
- stream_.streamTime = time;\r
-}\r
-\r
-unsigned int RtApi :: getStreamSampleRate( void )\r
-{\r
- verifyStream();\r
-\r
- return stream_.sampleRate;\r
-}\r
-\r
-\r
-// *************************************************** //\r
-//\r
-// OS/API-specific methods.\r
-//\r
-// *************************************************** //\r
-\r
-#if defined(__MACOSX_CORE__)\r
-\r
-// The OS X CoreAudio API is designed to use a separate callback\r
-// procedure for each of its audio devices. A single RtAudio duplex\r
-// stream using two different devices is supported here, though it\r
-// cannot be guaranteed to always behave correctly because we cannot\r
-// synchronize these two callbacks.\r
-//\r
-// A property listener is installed for over/underrun information.\r
-// However, no functionality is currently provided to allow property\r
-// listeners to trigger user handlers because it is unclear what could\r
-// be done if a critical stream parameter (buffer size, sample rate,\r
-// device disconnect) notification arrived. The listeners entail\r
-// quite a bit of extra code and most likely, a user program wouldn't\r
-// be prepared for the result anyway. However, we do provide a flag\r
-// to the client callback function to inform of an over/underrun.\r
-\r
-// A structure to hold various information related to the CoreAudio API\r
-// implementation.\r
-struct CoreHandle {\r
- AudioDeviceID id[2]; // device ids\r
-#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )\r
- AudioDeviceIOProcID procId[2];\r
-#endif\r
- UInt32 iStream[2]; // device stream index (or first if using multiple)\r
- UInt32 nStreams[2]; // number of streams to use\r
- bool xrun[2];\r
- char *deviceBuffer;\r
- pthread_cond_t condition;\r
- int drainCounter; // Tracks callback counts when draining\r
- bool internalDrain; // Indicates if stop is initiated from callback or not.\r
-\r
- CoreHandle()\r
- :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }\r
-};\r
-\r
-RtApiCore:: RtApiCore()\r
-{\r
-#if defined( AVAILABLE_MAC_OS_X_VERSION_10_6_AND_LATER )\r
- // This is a largely undocumented but absolutely necessary\r
- // requirement starting with OS-X 10.6. If not called, queries and\r
- // updates to various audio device properties are not handled\r
- // correctly.\r
- CFRunLoopRef theRunLoop = NULL;\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyRunLoop,\r
- kAudioObjectPropertyScopeGlobal,\r
- kAudioObjectPropertyElementMaster };\r
- OSStatus result = AudioObjectSetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, sizeof(CFRunLoopRef), &theRunLoop);\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::RtApiCore: error setting run loop property!";\r
- error( RtAudioError::WARNING );\r
- }\r
-#endif\r
-}\r
-\r
-RtApiCore :: ~RtApiCore()\r
-{\r
- // The subclass destructor gets called before the base class\r
- // destructor, so close an existing stream before deallocating\r
- // apiDeviceId memory.\r
- if ( stream_.state != STREAM_CLOSED ) closeStream();\r
-}\r
-\r
-unsigned int RtApiCore :: getDeviceCount( void )\r
-{\r
- // Find out how many audio devices there are, if any.\r
- UInt32 dataSize;\r
- AudioObjectPropertyAddress propertyAddress = { kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };\r
- OSStatus result = AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- return dataSize / sizeof( AudioDeviceID );\r
-}\r
-\r
-unsigned int RtApiCore :: getDefaultInputDevice( void )\r
-{\r
- unsigned int nDevices = getDeviceCount();\r
- if ( nDevices <= 1 ) return 0;\r
-\r
- AudioDeviceID id;\r
- UInt32 dataSize = sizeof( AudioDeviceID );\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultInputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };\r
- OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- dataSize *= nDevices;\r
- AudioDeviceID deviceList[ nDevices ];\r
- property.mSelector = kAudioHardwarePropertyDevices;\r
- result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- for ( unsigned int i=0; i<nDevices; i++ )\r
- if ( id == deviceList[i] ) return i;\r
-\r
- errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
-}\r
-\r
-unsigned int RtApiCore :: getDefaultOutputDevice( void )\r
-{\r
- unsigned int nDevices = getDeviceCount();\r
- if ( nDevices <= 1 ) return 0;\r
-\r
- AudioDeviceID id;\r
- UInt32 dataSize = sizeof( AudioDeviceID );\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultOutputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };\r
- OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- dataSize = sizeof( AudioDeviceID ) * nDevices;\r
- AudioDeviceID deviceList[ nDevices ];\r
- property.mSelector = kAudioHardwarePropertyDevices;\r
- result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- for ( unsigned int i=0; i<nDevices; i++ )\r
- if ( id == deviceList[i] ) return i;\r
-\r
- errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = false;\r
-\r
- // Get device ID\r
- unsigned int nDevices = getDeviceCount();\r
- if ( nDevices == 0 ) {\r
- errorText_ = "RtApiCore::getDeviceInfo: no devices found!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- AudioDeviceID deviceList[ nDevices ];\r
- UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,\r
- kAudioObjectPropertyScopeGlobal,\r
- kAudioObjectPropertyElementMaster };\r
- OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,\r
- 0, NULL, &dataSize, (void *) &deviceList );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- AudioDeviceID id = deviceList[ device ];\r
-\r
- // Get the device name.\r
- info.name.erase();\r
- CFStringRef cfname;\r
- dataSize = sizeof( CFStringRef );\r
- property.mSelector = kAudioObjectPropertyManufacturer;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- //const char *mname = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );\r
- int length = CFStringGetLength(cfname);\r
- char *mname = (char *)malloc(length * 3 + 1);\r
-#if defined( UNICODE ) || defined( _UNICODE )\r
- CFStringGetCString(cfname, mname, length * 3 + 1, kCFStringEncodingUTF8);\r
-#else\r
- CFStringGetCString(cfname, mname, length * 3 + 1, CFStringGetSystemEncoding());\r
-#endif\r
- info.name.append( (const char *)mname, strlen(mname) );\r
- info.name.append( ": " );\r
- CFRelease( cfname );\r
- free(mname);\r
-\r
- property.mSelector = kAudioObjectPropertyName;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- //const char *name = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );\r
- length = CFStringGetLength(cfname);\r
- char *name = (char *)malloc(length * 3 + 1);\r
-#if defined( UNICODE ) || defined( _UNICODE )\r
- CFStringGetCString(cfname, name, length * 3 + 1, kCFStringEncodingUTF8);\r
-#else\r
- CFStringGetCString(cfname, name, length * 3 + 1, CFStringGetSystemEncoding());\r
-#endif\r
- info.name.append( (const char *)name, strlen(name) );\r
- CFRelease( cfname );\r
- free(name);\r
-\r
- // Get the output stream "configuration".\r
- AudioBufferList *bufferList = nil;\r
- property.mSelector = kAudioDevicePropertyStreamConfiguration;\r
- property.mScope = kAudioDevicePropertyScopeOutput;\r
- // property.mElement = kAudioObjectPropertyElementWildcard;\r
- dataSize = 0;\r
- result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );\r
- if ( result != noErr || dataSize == 0 ) {\r
- errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Allocate the AudioBufferList.\r
- bufferList = (AudioBufferList *) malloc( dataSize );\r
- if ( bufferList == NULL ) {\r
- errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );\r
- if ( result != noErr || dataSize == 0 ) {\r
- free( bufferList );\r
- errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Get output channel information.\r
- unsigned int i, nStreams = bufferList->mNumberBuffers;\r
- for ( i=0; i<nStreams; i++ )\r
- info.outputChannels += bufferList->mBuffers[i].mNumberChannels;\r
- free( bufferList );\r
-\r
- // Get the input stream "configuration".\r
- property.mScope = kAudioDevicePropertyScopeInput;\r
- result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );\r
- if ( result != noErr || dataSize == 0 ) {\r
- errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Allocate the AudioBufferList.\r
- bufferList = (AudioBufferList *) malloc( dataSize );\r
- if ( bufferList == NULL ) {\r
- errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );\r
- if (result != noErr || dataSize == 0) {\r
- free( bufferList );\r
- errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Get input channel information.\r
- nStreams = bufferList->mNumberBuffers;\r
- for ( i=0; i<nStreams; i++ )\r
- info.inputChannels += bufferList->mBuffers[i].mNumberChannels;\r
- free( bufferList );\r
-\r
- // If device opens for both playback and capture, we determine the channels.\r
- if ( info.outputChannels > 0 && info.inputChannels > 0 )\r
- info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;\r
-\r
- // Probe the device sample rates.\r
- bool isInput = false;\r
- if ( info.outputChannels == 0 ) isInput = true;\r
-\r
- // Determine the supported sample rates.\r
- property.mSelector = kAudioDevicePropertyAvailableNominalSampleRates;\r
- if ( isInput == false ) property.mScope = kAudioDevicePropertyScopeOutput;\r
- result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );\r
- if ( result != kAudioHardwareNoError || dataSize == 0 ) {\r
- errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- UInt32 nRanges = dataSize / sizeof( AudioValueRange );\r
- AudioValueRange rangeList[ nRanges ];\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &rangeList );\r
- if ( result != kAudioHardwareNoError ) {\r
- errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // The sample rate reporting mechanism is a bit of a mystery. It\r
- // seems that it can either return individual rates or a range of\r
- // rates. I assume that if the min / max range values are the same,\r
- // then that represents a single supported rate and if the min / max\r
- // range values are different, the device supports an arbitrary\r
- // range of values (though there might be multiple ranges, so we'll\r
- // use the most conservative range).\r
- Float64 minimumRate = 1.0, maximumRate = 10000000000.0;\r
- bool haveValueRange = false;\r
- info.sampleRates.clear();\r
- for ( UInt32 i=0; i<nRanges; i++ ) {\r
- if ( rangeList[i].mMinimum == rangeList[i].mMaximum ) {\r
- unsigned int tmpSr = (unsigned int) rangeList[i].mMinimum;\r
- info.sampleRates.push_back( tmpSr );\r
-\r
- if ( !info.preferredSampleRate || ( tmpSr <= 48000 && tmpSr > info.preferredSampleRate ) )\r
- info.preferredSampleRate = tmpSr;\r
-\r
- } else {\r
- haveValueRange = true;\r
- if ( rangeList[i].mMinimum > minimumRate ) minimumRate = rangeList[i].mMinimum;\r
- if ( rangeList[i].mMaximum < maximumRate ) maximumRate = rangeList[i].mMaximum;\r
- }\r
- }\r
-\r
- if ( haveValueRange ) {\r
- for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {\r
- if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[k] );\r
-\r
- if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )\r
- info.preferredSampleRate = SAMPLE_RATES[k];\r
- }\r
- }\r
- }\r
-\r
- // Sort and remove any redundant values\r
- std::sort( info.sampleRates.begin(), info.sampleRates.end() );\r
- info.sampleRates.erase( unique( info.sampleRates.begin(), info.sampleRates.end() ), info.sampleRates.end() );\r
-\r
- if ( info.sampleRates.size() == 0 ) {\r
- errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // CoreAudio always uses 32-bit floating point data for PCM streams.\r
- // Thus, any other "physical" formats supported by the device are of\r
- // no interest to the client.\r
- info.nativeFormats = RTAUDIO_FLOAT32;\r
-\r
- if ( info.outputChannels > 0 )\r
- if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;\r
- if ( info.inputChannels > 0 )\r
- if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;\r
-\r
- info.probed = true;\r
- return info;\r
-}\r
-\r
-static OSStatus callbackHandler( AudioDeviceID inDevice,\r
- const AudioTimeStamp* /*inNow*/,\r
- const AudioBufferList* inInputData,\r
- const AudioTimeStamp* /*inInputTime*/,\r
- AudioBufferList* outOutputData,\r
- const AudioTimeStamp* /*inOutputTime*/,\r
- void* infoPointer )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) infoPointer;\r
-\r
- RtApiCore *object = (RtApiCore *) info->object;\r
- if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )\r
- return kAudioHardwareUnspecifiedError;\r
- else\r
- return kAudioHardwareNoError;\r
-}\r
-\r
-static OSStatus xrunListener( AudioObjectID /*inDevice*/,\r
- UInt32 nAddresses,\r
- const AudioObjectPropertyAddress properties[],\r
- void* handlePointer )\r
-{\r
- CoreHandle *handle = (CoreHandle *) handlePointer;\r
- for ( UInt32 i=0; i<nAddresses; i++ ) {\r
- if ( properties[i].mSelector == kAudioDeviceProcessorOverload ) {\r
- if ( properties[i].mScope == kAudioDevicePropertyScopeInput )\r
- handle->xrun[1] = true;\r
- else\r
- handle->xrun[0] = true;\r
- }\r
- }\r
-\r
- return kAudioHardwareNoError;\r
-}\r
-\r
-static OSStatus rateListener( AudioObjectID inDevice,\r
- UInt32 /*nAddresses*/,\r
- const AudioObjectPropertyAddress /*properties*/[],\r
- void* ratePointer )\r
-{\r
- Float64 *rate = (Float64 *) ratePointer;\r
- UInt32 dataSize = sizeof( Float64 );\r
- AudioObjectPropertyAddress property = { kAudioDevicePropertyNominalSampleRate,\r
- kAudioObjectPropertyScopeGlobal,\r
- kAudioObjectPropertyElementMaster };\r
- AudioObjectGetPropertyData( inDevice, &property, 0, NULL, &dataSize, rate );\r
- return kAudioHardwareNoError;\r
-}\r
-\r
-bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int *bufferSize,\r
- RtAudio::StreamOptions *options )\r
-{\r
- // Get device ID\r
- unsigned int nDevices = getDeviceCount();\r
- if ( nDevices == 0 ) {\r
- // This should not happen because a check is made before this function is called.\r
- errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";\r
- return FAILURE;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- // This should not happen because a check is made before this function is called.\r
- errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";\r
- return FAILURE;\r
- }\r
-\r
- AudioDeviceID deviceList[ nDevices ];\r
- UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,\r
- kAudioObjectPropertyScopeGlobal,\r
- kAudioObjectPropertyElementMaster };\r
- OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,\r
- 0, NULL, &dataSize, (void *) &deviceList );\r
- if ( result != noErr ) {\r
- errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";\r
- return FAILURE;\r
- }\r
-\r
- AudioDeviceID id = deviceList[ device ];\r
-\r
- // Setup for stream mode.\r
- bool isInput = false;\r
- if ( mode == INPUT ) {\r
- isInput = true;\r
- property.mScope = kAudioDevicePropertyScopeInput;\r
- }\r
- else\r
- property.mScope = kAudioDevicePropertyScopeOutput;\r
-\r
- // Get the stream "configuration".\r
- AudioBufferList *bufferList = nil;\r
- dataSize = 0;\r
- property.mSelector = kAudioDevicePropertyStreamConfiguration;\r
- result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );\r
- if ( result != noErr || dataSize == 0 ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Allocate the AudioBufferList.\r
- bufferList = (AudioBufferList *) malloc( dataSize );\r
- if ( bufferList == NULL ) {\r
- errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";\r
- return FAILURE;\r
- }\r
-\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );\r
- if (result != noErr || dataSize == 0) {\r
- free( bufferList );\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Search for one or more streams that contain the desired number of\r
- // channels. CoreAudio devices can have an arbitrary number of\r
- // streams and each stream can have an arbitrary number of channels.\r
- // For each stream, a single buffer of interleaved samples is\r
- // provided. RtAudio prefers the use of one stream of interleaved\r
- // data or multiple consecutive single-channel streams. However, we\r
- // now support multiple consecutive multi-channel streams of\r
- // interleaved data as well.\r
- UInt32 iStream, offsetCounter = firstChannel;\r
- UInt32 nStreams = bufferList->mNumberBuffers;\r
- bool monoMode = false;\r
- bool foundStream = false;\r
-\r
- // First check that the device supports the requested number of\r
- // channels.\r
- UInt32 deviceChannels = 0;\r
- for ( iStream=0; iStream<nStreams; iStream++ )\r
- deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;\r
-\r
- if ( deviceChannels < ( channels + firstChannel ) ) {\r
- free( bufferList );\r
- errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Look for a single stream meeting our needs.\r
- UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;\r
- for ( iStream=0; iStream<nStreams; iStream++ ) {\r
- streamChannels = bufferList->mBuffers[iStream].mNumberChannels;\r
- if ( streamChannels >= channels + offsetCounter ) {\r
- firstStream = iStream;\r
- channelOffset = offsetCounter;\r
- foundStream = true;\r
- break;\r
- }\r
- if ( streamChannels > offsetCounter ) break;\r
- offsetCounter -= streamChannels;\r
- }\r
-\r
- // If we didn't find a single stream above, then we should be able\r
- // to meet the channel specification with multiple streams.\r
- if ( foundStream == false ) {\r
- monoMode = true;\r
- offsetCounter = firstChannel;\r
- for ( iStream=0; iStream<nStreams; iStream++ ) {\r
- streamChannels = bufferList->mBuffers[iStream].mNumberChannels;\r
- if ( streamChannels > offsetCounter ) break;\r
- offsetCounter -= streamChannels;\r
- }\r
-\r
- firstStream = iStream;\r
- channelOffset = offsetCounter;\r
- Int32 channelCounter = channels + offsetCounter - streamChannels;\r
-\r
- if ( streamChannels > 1 ) monoMode = false;\r
- while ( channelCounter > 0 ) {\r
- streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;\r
- if ( streamChannels > 1 ) monoMode = false;\r
- channelCounter -= streamChannels;\r
- streamCount++;\r
- }\r
- }\r
-\r
- free( bufferList );\r
-\r
- // Determine the buffer size.\r
- AudioValueRange bufferRange;\r
- dataSize = sizeof( AudioValueRange );\r
- property.mSelector = kAudioDevicePropertyBufferFrameSizeRange;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &bufferRange );\r
-\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;\r
- else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;\r
- if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;\r
-\r
- // Set the buffer size. For multiple streams, I'm assuming we only\r
- // need to make this setting for the master channel.\r
- UInt32 theSize = (UInt32) *bufferSize;\r
- dataSize = sizeof( UInt32 );\r
- property.mSelector = kAudioDevicePropertyBufferFrameSize;\r
- result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &theSize );\r
-\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // If attempting to setup a duplex stream, the bufferSize parameter\r
- // MUST be the same in both directions!\r
- *bufferSize = theSize;\r
- if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- stream_.bufferSize = *bufferSize;\r
- stream_.nBuffers = 1;\r
-\r
- // Try to set "hog" mode ... it's not clear to me this is working.\r
- if ( options && options->flags & RTAUDIO_HOG_DEVICE ) {\r
- pid_t hog_pid;\r
- dataSize = sizeof( hog_pid );\r
- property.mSelector = kAudioDevicePropertyHogMode;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &hog_pid );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting 'hog' state!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- if ( hog_pid != getpid() ) {\r
- hog_pid = getpid();\r
- result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &hog_pid );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
- }\r
-\r
- // Check and if necessary, change the sample rate for the device.\r
- Float64 nominalRate;\r
- dataSize = sizeof( Float64 );\r
- property.mSelector = kAudioDevicePropertyNominalSampleRate;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &nominalRate );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting current sample rate.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Only change the sample rate if off by more than 1 Hz.\r
- if ( fabs( nominalRate - (double)sampleRate ) > 1.0 ) {\r
-\r
- // Set a property listener for the sample rate change\r
- Float64 reportedRate = 0.0;\r
- AudioObjectPropertyAddress tmp = { kAudioDevicePropertyNominalSampleRate, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };\r
- result = AudioObjectAddPropertyListener( id, &tmp, rateListener, (void *) &reportedRate );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate property listener for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- nominalRate = (Float64) sampleRate;\r
- result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &nominalRate );\r
- if ( result != noErr ) {\r
- AudioObjectRemovePropertyListener( id, &tmp, rateListener, (void *) &reportedRate );\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Now wait until the reported nominal rate is what we just set.\r
- UInt32 microCounter = 0;\r
- while ( reportedRate != nominalRate ) {\r
- microCounter += 5000;\r
- if ( microCounter > 5000000 ) break;\r
- usleep( 5000 );\r
- }\r
-\r
- // Remove the property listener.\r
- AudioObjectRemovePropertyListener( id, &tmp, rateListener, (void *) &reportedRate );\r
-\r
- if ( microCounter > 5000000 ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: timeout waiting for sample rate update for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
-\r
- // Now set the stream format for all streams. Also, check the\r
- // physical format of the device and change that if necessary.\r
- AudioStreamBasicDescription description;\r
- dataSize = sizeof( AudioStreamBasicDescription );\r
- property.mSelector = kAudioStreamPropertyVirtualFormat;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &description );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Set the sample rate and data format id. However, only make the\r
- // change if the sample rate is not within 1.0 of the desired\r
- // rate and the format is not linear pcm.\r
- bool updateFormat = false;\r
- if ( fabs( description.mSampleRate - (Float64)sampleRate ) > 1.0 ) {\r
- description.mSampleRate = (Float64) sampleRate;\r
- updateFormat = true;\r
- }\r
-\r
- if ( description.mFormatID != kAudioFormatLinearPCM ) {\r
- description.mFormatID = kAudioFormatLinearPCM;\r
- updateFormat = true;\r
- }\r
-\r
- if ( updateFormat ) {\r
- result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &description );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
-\r
- // Now check the physical format.\r
- property.mSelector = kAudioStreamPropertyPhysicalFormat;\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &description );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- //std::cout << "Current physical stream format:" << std::endl;\r
- //std::cout << " mBitsPerChan = " << description.mBitsPerChannel << std::endl;\r
- //std::cout << " aligned high = " << (description.mFormatFlags & kAudioFormatFlagIsAlignedHigh) << ", isPacked = " << (description.mFormatFlags & kAudioFormatFlagIsPacked) << std::endl;\r
- //std::cout << " bytesPerFrame = " << description.mBytesPerFrame << std::endl;\r
- //std::cout << " sample rate = " << description.mSampleRate << std::endl;\r
-\r
- if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 16 ) {\r
- description.mFormatID = kAudioFormatLinearPCM;\r
- //description.mSampleRate = (Float64) sampleRate;\r
- AudioStreamBasicDescription testDescription = description;\r
- UInt32 formatFlags;\r
-\r
- // We'll try higher bit rates first and then work our way down.\r
- std::vector< std::pair<UInt32, UInt32> > physicalFormats;\r
- formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsFloat) & ~kLinearPCMFormatFlagIsSignedInteger;\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 32, formatFlags ) );\r
- formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked) & ~kLinearPCMFormatFlagIsFloat;\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 32, formatFlags ) );\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 24, formatFlags ) ); // 24-bit packed\r
- formatFlags &= ~( kAudioFormatFlagIsPacked | kAudioFormatFlagIsAlignedHigh );\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 24.2, formatFlags ) ); // 24-bit in 4 bytes, aligned low\r
- formatFlags |= kAudioFormatFlagIsAlignedHigh;\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 24.4, formatFlags ) ); // 24-bit in 4 bytes, aligned high\r
- formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked) & ~kLinearPCMFormatFlagIsFloat;\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 16, formatFlags ) );\r
- physicalFormats.push_back( std::pair<Float32, UInt32>( 8, formatFlags ) );\r
-\r
- bool setPhysicalFormat = false;\r
- for( unsigned int i=0; i<physicalFormats.size(); i++ ) {\r
- testDescription = description;\r
- testDescription.mBitsPerChannel = (UInt32) physicalFormats[i].first;\r
- testDescription.mFormatFlags = physicalFormats[i].second;\r
- if ( (24 == (UInt32)physicalFormats[i].first) && ~( physicalFormats[i].second & kAudioFormatFlagIsPacked ) )\r
- testDescription.mBytesPerFrame = 4 * testDescription.mChannelsPerFrame;\r
- else\r
- testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;\r
- testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;\r
- result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &testDescription );\r
- if ( result == noErr ) {\r
- setPhysicalFormat = true;\r
- //std::cout << "Updated physical stream format:" << std::endl;\r
- //std::cout << " mBitsPerChan = " << testDescription.mBitsPerChannel << std::endl;\r
- //std::cout << " aligned high = " << (testDescription.mFormatFlags & kAudioFormatFlagIsAlignedHigh) << ", isPacked = " << (testDescription.mFormatFlags & kAudioFormatFlagIsPacked) << std::endl;\r
- //std::cout << " bytesPerFrame = " << testDescription.mBytesPerFrame << std::endl;\r
- //std::cout << " sample rate = " << testDescription.mSampleRate << std::endl;\r
- break;\r
- }\r
- }\r
-\r
- if ( !setPhysicalFormat ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- } // done setting virtual/physical formats.\r
-\r
- // Get the stream / device latency.\r
- UInt32 latency;\r
- dataSize = sizeof( UInt32 );\r
- property.mSelector = kAudioDevicePropertyLatency;\r
- if ( AudioObjectHasProperty( id, &property ) == true ) {\r
- result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &latency );\r
- if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;\r
- else {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- }\r
- }\r
-\r
- // Byte-swapping: According to AudioHardware.h, the stream data will\r
- // always be presented in native-endian format, so we should never\r
- // need to byte swap.\r
- stream_.doByteSwap[mode] = false;\r
-\r
- // From the CoreAudio documentation, PCM data must be supplied as\r
- // 32-bit floats.\r
- stream_.userFormat = format;\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;\r
-\r
- if ( streamCount == 1 )\r
- stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;\r
- else // multiple streams\r
- stream_.nDeviceChannels[mode] = channels;\r
- stream_.nUserChannels[mode] = channels;\r
- stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;\r
- else stream_.userInterleaved = true;\r
- stream_.deviceInterleaved[mode] = true;\r
- if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;\r
-\r
- // Set flags for buffer conversion.\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( streamCount == 1 ) {\r
- if ( stream_.nUserChannels[mode] > 1 &&\r
- stream_.userInterleaved != stream_.deviceInterleaved[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- }\r
- else if ( monoMode && stream_.userInterleaved )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate our CoreHandle structure for the stream.\r
- CoreHandle *handle = 0;\r
- if ( stream_.apiHandle == 0 ) {\r
- try {\r
- handle = new CoreHandle;\r
- }\r
- catch ( std::bad_alloc& ) {\r
- errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";\r
- goto error;\r
- }\r
-\r
- if ( pthread_cond_init( &handle->condition, NULL ) ) {\r
- errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";\r
- goto error;\r
- }\r
- stream_.apiHandle = (void *) handle;\r
- }\r
- else\r
- handle = (CoreHandle *) stream_.apiHandle;\r
- handle->iStream[mode] = firstStream;\r
- handle->nStreams[mode] = streamCount;\r
- handle->id[mode] = id;\r
-\r
- // Allocate necessary internal buffers.\r
- unsigned long bufferBytes;\r
- bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- // stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- stream_.userBuffer[mode] = (char *) malloc( bufferBytes * sizeof(char) );\r
- memset( stream_.userBuffer[mode], 0, bufferBytes * sizeof(char) );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
-\r
- // If possible, we will make use of the CoreAudio stream buffers as\r
- // "device buffers". However, we can't do this if using multiple\r
- // streams.\r
- if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {\r
-\r
- bool makeBuffer = true;\r
- bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );\r
- if ( mode == INPUT ) {\r
- if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- if ( bufferBytes <= bytesOut ) makeBuffer = false;\r
- }\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- stream_.sampleRate = sampleRate;\r
- stream_.device[mode] = device;\r
- stream_.state = STREAM_STOPPED;\r
- stream_.callbackInfo.object = (void *) this;\r
-\r
- // Setup the buffer conversion information structure.\r
- if ( stream_.doConvertBuffer[mode] ) {\r
- if ( streamCount > 1 ) setConvertInfo( mode, 0 );\r
- else setConvertInfo( mode, channelOffset );\r
- }\r
-\r
- if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )\r
- // Only one callback procedure per device.\r
- stream_.mode = DUPLEX;\r
- else {\r
-#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )\r
- result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );\r
-#else\r
- // deprecated in favor of AudioDeviceCreateIOProcID()\r
- result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );\r
-#endif\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
- if ( stream_.mode == OUTPUT && mode == INPUT )\r
- stream_.mode = DUPLEX;\r
- else\r
- stream_.mode = mode;\r
- }\r
-\r
- // Setup the device property listener for over/underload.\r
- property.mSelector = kAudioDeviceProcessorOverload;\r
- property.mScope = kAudioObjectPropertyScopeGlobal;\r
- result = AudioObjectAddPropertyListener( id, &property, xrunListener, (void *) handle );\r
-\r
- return SUCCESS;\r
-\r
- error:\r
- if ( handle ) {\r
- pthread_cond_destroy( &handle->condition );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.state = STREAM_CLOSED;\r
- return FAILURE;\r
-}\r
-\r
-void RtApiCore :: closeStream( void )\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiCore::closeStream(): no open stream to close!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- CoreHandle *handle = (CoreHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- if (handle) {\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,\r
- kAudioObjectPropertyScopeGlobal,\r
- kAudioObjectPropertyElementMaster };\r
-\r
- property.mSelector = kAudioDeviceProcessorOverload;\r
- property.mScope = kAudioObjectPropertyScopeGlobal;\r
- if (AudioObjectRemovePropertyListener( handle->id[0], &property, xrunListener, (void *) handle ) != noErr) {\r
- errorText_ = "RtApiCore::closeStream(): error removing property listener!";\r
- error( RtAudioError::WARNING );\r
- }\r
- }\r
- if ( stream_.state == STREAM_RUNNING )\r
- AudioDeviceStop( handle->id[0], callbackHandler );\r
-#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )\r
- AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );\r
-#else\r
- // deprecated in favor of AudioDeviceDestroyIOProcID()\r
- AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );\r
-#endif\r
- }\r
-\r
- if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {\r
- if (handle) {\r
- AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,\r
- kAudioObjectPropertyScopeGlobal,\r
- kAudioObjectPropertyElementMaster };\r
-\r
- property.mSelector = kAudioDeviceProcessorOverload;\r
- property.mScope = kAudioObjectPropertyScopeGlobal;\r
- if (AudioObjectRemovePropertyListener( handle->id[1], &property, xrunListener, (void *) handle ) != noErr) {\r
- errorText_ = "RtApiCore::closeStream(): error removing property listener!";\r
- error( RtAudioError::WARNING );\r
- }\r
- }\r
- if ( stream_.state == STREAM_RUNNING )\r
- AudioDeviceStop( handle->id[1], callbackHandler );\r
-#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )\r
- AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );\r
-#else\r
- // deprecated in favor of AudioDeviceDestroyIOProcID()\r
- AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );\r
-#endif\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- // Destroy pthread condition variable.\r
- pthread_cond_destroy( &handle->condition );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
-\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-void RtApiCore :: startStream( void )\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiCore::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- OSStatus result = noErr;\r
- CoreHandle *handle = (CoreHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- result = AudioDeviceStart( handle->id[0], callbackHandler );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- if ( stream_.mode == INPUT ||\r
- ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {\r
-\r
- result = AudioDeviceStart( handle->id[1], callbackHandler );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- handle->drainCounter = 0;\r
- handle->internalDrain = false;\r
- stream_.state = STREAM_RUNNING;\r
-\r
- unlock:\r
- if ( result == noErr ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiCore :: stopStream( void )\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- OSStatus result = noErr;\r
- CoreHandle *handle = (CoreHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- if ( handle->drainCounter == 0 ) {\r
- handle->drainCounter = 2;\r
- pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled\r
- }\r
-\r
- result = AudioDeviceStop( handle->id[0], callbackHandler );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {\r
-\r
- result = AudioDeviceStop( handle->id[1], callbackHandler );\r
- if ( result != noErr ) {\r
- errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
-\r
- unlock:\r
- if ( result == noErr ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiCore :: abortStream( void )\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- CoreHandle *handle = (CoreHandle *) stream_.apiHandle;\r
- handle->drainCounter = 2;\r
-\r
- stopStream();\r
-}\r
-\r
-// This function will be called by a spawned thread when the user\r
-// callback function signals that the stream should be stopped or\r
-// aborted. It is better to handle it this way because the\r
-// callbackEvent() function probably should return before the AudioDeviceStop()\r
-// function is called.\r
-static void *coreStopStream( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiCore *object = (RtApiCore *) info->object;\r
-\r
- object->stopStream();\r
- pthread_exit( NULL );\r
-}\r
-\r
-bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,\r
- const AudioBufferList *inBufferList,\r
- const AudioBufferList *outBufferList )\r
-{\r
- if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) return SUCCESS;\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return FAILURE;\r
- }\r
-\r
- CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;\r
- CoreHandle *handle = (CoreHandle *) stream_.apiHandle;\r
-\r
- // Check if we were draining the stream and signal is finished.\r
- if ( handle->drainCounter > 3 ) {\r
- ThreadHandle threadId;\r
-\r
- stream_.state = STREAM_STOPPING;\r
- if ( handle->internalDrain == true )\r
- pthread_create( &threadId, NULL, coreStopStream, info );\r
- else // external call to stopStream()\r
- pthread_cond_signal( &handle->condition );\r
- return SUCCESS;\r
- }\r
-\r
- AudioDeviceID outputDevice = handle->id[0];\r
-\r
- // Invoke user callback to get fresh output data UNLESS we are\r
- // draining stream or duplex mode AND the input/output devices are\r
- // different AND this function is called for the input device.\r
- if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {\r
- RtAudioCallback callback = (RtAudioCallback) info->callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- if ( stream_.mode != INPUT && handle->xrun[0] == true ) {\r
- status |= RTAUDIO_OUTPUT_UNDERFLOW;\r
- handle->xrun[0] = false;\r
- }\r
- if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {\r
- status |= RTAUDIO_INPUT_OVERFLOW;\r
- handle->xrun[1] = false;\r
- }\r
-\r
- int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],\r
- stream_.bufferSize, streamTime, status, info->userData );\r
- if ( cbReturnValue == 2 ) {\r
- stream_.state = STREAM_STOPPING;\r
- handle->drainCounter = 2;\r
- abortStream();\r
- return SUCCESS;\r
- }\r
- else if ( cbReturnValue == 1 ) {\r
- handle->drainCounter = 1;\r
- handle->internalDrain = true;\r
- }\r
- }\r
-\r
- if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {\r
-\r
- if ( handle->drainCounter > 1 ) { // write zeros to the output stream\r
-\r
- if ( handle->nStreams[0] == 1 ) {\r
- memset( outBufferList->mBuffers[handle->iStream[0]].mData,\r
- 0,\r
- outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );\r
- }\r
- else { // fill multiple streams with zeros\r
- for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {\r
- memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,\r
- 0,\r
- outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );\r
- }\r
- }\r
- }\r
- else if ( handle->nStreams[0] == 1 ) {\r
- if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer\r
- convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,\r
- stream_.userBuffer[0], stream_.convertInfo[0] );\r
- }\r
- else { // copy from user buffer\r
- memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,\r
- stream_.userBuffer[0],\r
- outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );\r
- }\r
- }\r
- else { // fill multiple streams\r
- Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];\r
- if ( stream_.doConvertBuffer[0] ) {\r
- convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
- inBuffer = (Float32 *) stream_.deviceBuffer;\r
- }\r
-\r
- if ( stream_.deviceInterleaved[0] == false ) { // mono mode\r
- UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;\r
- for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {\r
- memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,\r
- (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );\r
- }\r
- }\r
- else { // fill multiple multi-channel streams with interleaved data\r
- UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;\r
- Float32 *out, *in;\r
-\r
- bool inInterleaved = ( stream_.userInterleaved ) ? true : false;\r
- UInt32 inChannels = stream_.nUserChannels[0];\r
- if ( stream_.doConvertBuffer[0] ) {\r
- inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode\r
- inChannels = stream_.nDeviceChannels[0];\r
- }\r
-\r
- if ( inInterleaved ) inOffset = 1;\r
- else inOffset = stream_.bufferSize;\r
-\r
- channelsLeft = inChannels;\r
- for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {\r
- in = inBuffer;\r
- out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;\r
- streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;\r
-\r
- outJump = 0;\r
- // Account for possible channel offset in first stream\r
- if ( i == 0 && stream_.channelOffset[0] > 0 ) {\r
- streamChannels -= stream_.channelOffset[0];\r
- outJump = stream_.channelOffset[0];\r
- out += outJump;\r
- }\r
-\r
- // Account for possible unfilled channels at end of the last stream\r
- if ( streamChannels > channelsLeft ) {\r
- outJump = streamChannels - channelsLeft;\r
- streamChannels = channelsLeft;\r
- }\r
-\r
- // Determine input buffer offsets and skips\r
- if ( inInterleaved ) {\r
- inJump = inChannels;\r
- in += inChannels - channelsLeft;\r
- }\r
- else {\r
- inJump = 1;\r
- in += (inChannels - channelsLeft) * inOffset;\r
- }\r
-\r
- for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {\r
- for ( unsigned int j=0; j<streamChannels; j++ ) {\r
- *out++ = in[j*inOffset];\r
- }\r
- out += outJump;\r
- in += inJump;\r
- }\r
- channelsLeft -= streamChannels;\r
- }\r
- }\r
- }\r
- }\r
-\r
- // Don't bother draining input\r
- if ( handle->drainCounter ) {\r
- handle->drainCounter++;\r
- goto unlock;\r
- }\r
-\r
- AudioDeviceID inputDevice;\r
- inputDevice = handle->id[1];\r
- if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {\r
-\r
- if ( handle->nStreams[1] == 1 ) {\r
- if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer\r
- convertBuffer( stream_.userBuffer[1],\r
- (char *) inBufferList->mBuffers[handle->iStream[1]].mData,\r
- stream_.convertInfo[1] );\r
- }\r
- else { // copy to user buffer\r
- memcpy( stream_.userBuffer[1],\r
- inBufferList->mBuffers[handle->iStream[1]].mData,\r
- inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );\r
- }\r
- }\r
- else { // read from multiple streams\r
- Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];\r
- if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;\r
-\r
- if ( stream_.deviceInterleaved[1] == false ) { // mono mode\r
- UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;\r
- for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {\r
- memcpy( (void *)&outBuffer[i*stream_.bufferSize],\r
- inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );\r
- }\r
- }\r
- else { // read from multiple multi-channel streams\r
- UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;\r
- Float32 *out, *in;\r
-\r
- bool outInterleaved = ( stream_.userInterleaved ) ? true : false;\r
- UInt32 outChannels = stream_.nUserChannels[1];\r
- if ( stream_.doConvertBuffer[1] ) {\r
- outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode\r
- outChannels = stream_.nDeviceChannels[1];\r
- }\r
-\r
- if ( outInterleaved ) outOffset = 1;\r
- else outOffset = stream_.bufferSize;\r
-\r
- channelsLeft = outChannels;\r
- for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {\r
- out = outBuffer;\r
- in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;\r
- streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;\r
-\r
- inJump = 0;\r
- // Account for possible channel offset in first stream\r
- if ( i == 0 && stream_.channelOffset[1] > 0 ) {\r
- streamChannels -= stream_.channelOffset[1];\r
- inJump = stream_.channelOffset[1];\r
- in += inJump;\r
- }\r
-\r
- // Account for possible unread channels at end of the last stream\r
- if ( streamChannels > channelsLeft ) {\r
- inJump = streamChannels - channelsLeft;\r
- streamChannels = channelsLeft;\r
- }\r
-\r
- // Determine output buffer offsets and skips\r
- if ( outInterleaved ) {\r
- outJump = outChannels;\r
- out += outChannels - channelsLeft;\r
- }\r
- else {\r
- outJump = 1;\r
- out += (outChannels - channelsLeft) * outOffset;\r
- }\r
-\r
- for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {\r
- for ( unsigned int j=0; j<streamChannels; j++ ) {\r
- out[j*outOffset] = *in++;\r
- }\r
- out += outJump;\r
- in += inJump;\r
- }\r
- channelsLeft -= streamChannels;\r
- }\r
- }\r
-\r
- if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer\r
- convertBuffer( stream_.userBuffer[1],\r
- stream_.deviceBuffer,\r
- stream_.convertInfo[1] );\r
- }\r
- }\r
- }\r
-\r
- unlock:\r
- //MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- RtApi::tickStreamTime();\r
- return SUCCESS;\r
-}\r
-\r
-const char* RtApiCore :: getErrorCode( OSStatus code )\r
-{\r
- switch( code ) {\r
-\r
- case kAudioHardwareNotRunningError:\r
- return "kAudioHardwareNotRunningError";\r
-\r
- case kAudioHardwareUnspecifiedError:\r
- return "kAudioHardwareUnspecifiedError";\r
-\r
- case kAudioHardwareUnknownPropertyError:\r
- return "kAudioHardwareUnknownPropertyError";\r
-\r
- case kAudioHardwareBadPropertySizeError:\r
- return "kAudioHardwareBadPropertySizeError";\r
-\r
- case kAudioHardwareIllegalOperationError:\r
- return "kAudioHardwareIllegalOperationError";\r
-\r
- case kAudioHardwareBadObjectError:\r
- return "kAudioHardwareBadObjectError";\r
-\r
- case kAudioHardwareBadDeviceError:\r
- return "kAudioHardwareBadDeviceError";\r
-\r
- case kAudioHardwareBadStreamError:\r
- return "kAudioHardwareBadStreamError";\r
-\r
- case kAudioHardwareUnsupportedOperationError:\r
- return "kAudioHardwareUnsupportedOperationError";\r
-\r
- case kAudioDeviceUnsupportedFormatError:\r
- return "kAudioDeviceUnsupportedFormatError";\r
-\r
- case kAudioDevicePermissionsError:\r
- return "kAudioDevicePermissionsError";\r
-\r
- default:\r
- return "CoreAudio unknown error";\r
- }\r
-}\r
-\r
- //******************** End of __MACOSX_CORE__ *********************//\r
-#endif\r
-\r
-#if defined(__UNIX_JACK__)\r
-\r
-// JACK is a low-latency audio server, originally written for the\r
-// GNU/Linux operating system and now also ported to OS-X. It can\r
-// connect a number of different applications to an audio device, as\r
-// well as allowing them to share audio between themselves.\r
-//\r
-// When using JACK with RtAudio, "devices" refer to JACK clients that\r
-// have ports connected to the server. The JACK server is typically\r
-// started in a terminal as follows:\r
-//\r
-// .jackd -d alsa -d hw:0\r
-//\r
-// or through an interface program such as qjackctl. Many of the\r
-// parameters normally set for a stream are fixed by the JACK server\r
-// and can be specified when the JACK server is started. In\r
-// particular,\r
-//\r
-// .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4\r
-//\r
-// specifies a sample rate of 44100 Hz, a buffer size of 512 sample\r
-// frames, and number of buffers = 4. Once the server is running, it\r
-// is not possible to override these values. If the values are not\r
-// specified in the command-line, the JACK server uses default values.\r
-//\r
-// The JACK server does not have to be running when an instance of\r
-// RtApiJack is created, though the function getDeviceCount() will\r
-// report 0 devices found until JACK has been started. When no\r
-// devices are available (i.e., the JACK server is not running), a\r
-// stream cannot be opened.\r
-\r
-#include <jack/jack.h>\r
-#include <unistd.h>\r
-#include <cstdio>\r
-\r
-// A structure to hold various information related to the Jack API\r
-// implementation.\r
-struct JackHandle {\r
- jack_client_t *client;\r
- jack_port_t **ports[2];\r
- std::string deviceName[2];\r
- bool xrun[2];\r
- pthread_cond_t condition;\r
- int drainCounter; // Tracks callback counts when draining\r
- bool internalDrain; // Indicates if stop is initiated from callback or not.\r
-\r
- JackHandle()\r
- :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }\r
-};\r
-\r
-/* --- Monocasual hack ------------------------------------------------------ */\r
-#if defined(__linux__) || defined(__FreeBSD__)\r
-void *RtApi :: __HACK__getJackClient() {\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
- return (void*) handle->client;\r
-}\r
-#endif\r
-/* -------------------------------------------------------------------------- */\r
-\r
-static void jackSilentError( const char * ) {}\r
-\r
-RtApiJack :: RtApiJack()\r
-{\r
- // Nothing to do here.\r
-#if !defined(__RTAUDIO_DEBUG__)\r
- // Turn off Jack's internal error reporting.\r
- jack_set_error_function( &jackSilentError );\r
-#endif\r
-}\r
-\r
-RtApiJack :: ~RtApiJack()\r
-{\r
- if ( stream_.state != STREAM_CLOSED ) closeStream();\r
-}\r
-\r
-unsigned int RtApiJack :: getDeviceCount( void )\r
-{\r
- // See if we can become a jack client.\r
- jack_options_t options = (jack_options_t) ( JackNoStartServer ); //JackNullOption;\r
- jack_status_t *status = NULL;\r
- jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );\r
- if ( client == 0 ) return 0;\r
-\r
- const char **ports;\r
- std::string port, previousPort;\r
- unsigned int nChannels = 0, nDevices = 0;\r
- ports = jack_get_ports( client, NULL, NULL, 0 );\r
- if ( ports ) {\r
- // Parse the port names up to the first colon (:).\r
- size_t iColon = 0;\r
- do {\r
- port = (char *) ports[ nChannels ];\r
- iColon = port.find(":");\r
- if ( iColon != std::string::npos ) {\r
- port = port.substr( 0, iColon + 1 );\r
- if ( port != previousPort ) {\r
- nDevices++;\r
- previousPort = port;\r
- }\r
- }\r
- } while ( ports[++nChannels] );\r
- free( ports );\r
- }\r
-\r
- jack_client_close( client );\r
- return nDevices;\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = false;\r
-\r
- jack_options_t options = (jack_options_t) ( JackNoStartServer ); //JackNullOption\r
- jack_status_t *status = NULL;\r
- jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );\r
- if ( client == 0 ) {\r
- errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- const char **ports;\r
- std::string port, previousPort;\r
- unsigned int nPorts = 0, nDevices = 0;\r
- ports = jack_get_ports( client, NULL, NULL, 0 );\r
- if ( ports ) {\r
- // Parse the port names up to the first colon (:).\r
- size_t iColon = 0;\r
- do {\r
- port = (char *) ports[ nPorts ];\r
- iColon = port.find(":");\r
- if ( iColon != std::string::npos ) {\r
- port = port.substr( 0, iColon );\r
- if ( port != previousPort ) {\r
- if ( nDevices == device ) info.name = port;\r
- nDevices++;\r
- previousPort = port;\r
- }\r
- }\r
- } while ( ports[++nPorts] );\r
- free( ports );\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- jack_client_close( client );\r
- errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- // Get the current jack server sample rate.\r
- info.sampleRates.clear();\r
-\r
- info.preferredSampleRate = jack_get_sample_rate( client );\r
- info.sampleRates.push_back( info.preferredSampleRate );\r
-\r
- // Count the available ports containing the client name as device\r
- // channels. Jack "input ports" equal RtAudio output channels.\r
- unsigned int nChannels = 0;\r
- ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );\r
- if ( ports ) {\r
- while ( ports[ nChannels ] ) nChannels++;\r
- free( ports );\r
- info.outputChannels = nChannels;\r
- }\r
-\r
- // Jack "output ports" equal RtAudio input channels.\r
- nChannels = 0;\r
- ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );\r
- if ( ports ) {\r
- while ( ports[ nChannels ] ) nChannels++;\r
- free( ports );\r
- info.inputChannels = nChannels;\r
- }\r
-\r
- if ( info.outputChannels == 0 && info.inputChannels == 0 ) {\r
- jack_client_close(client);\r
- errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // If device opens for both playback and capture, we determine the channels.\r
- if ( info.outputChannels > 0 && info.inputChannels > 0 )\r
- info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;\r
-\r
- // Jack always uses 32-bit floats.\r
- info.nativeFormats = RTAUDIO_FLOAT32;\r
-\r
- // Jack doesn't provide default devices so we'll use the first available one.\r
- if ( device == 0 && info.outputChannels > 0 )\r
- info.isDefaultOutput = true;\r
- if ( device == 0 && info.inputChannels > 0 )\r
- info.isDefaultInput = true;\r
-\r
- jack_client_close(client);\r
- info.probed = true;\r
- return info;\r
-}\r
-\r
-static int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) infoPointer;\r
-\r
- RtApiJack *object = (RtApiJack *) info->object;\r
- if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;\r
-\r
- return 0;\r
-}\r
-\r
-// This function will be called by a spawned thread when the Jack\r
-// server signals that it is shutting down. It is necessary to handle\r
-// it this way because the jackShutdown() function must return before\r
-// the jack_deactivate() function (in closeStream()) will return.\r
-static void *jackCloseStream( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiJack *object = (RtApiJack *) info->object;\r
-\r
- object->closeStream();\r
-\r
- pthread_exit( NULL );\r
-}\r
-static void jackShutdown( void *infoPointer )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) infoPointer;\r
- RtApiJack *object = (RtApiJack *) info->object;\r
-\r
- // Check current stream state. If stopped, then we'll assume this\r
- // was called as a result of a call to RtApiJack::stopStream (the\r
- // deactivation of a client handle causes this function to be called).\r
- // If not, we'll assume the Jack server is shutting down or some\r
- // other problem occurred and we should close the stream.\r
- if ( object->isStreamRunning() == false ) return;\r
-\r
- ThreadHandle threadId;\r
- pthread_create( &threadId, NULL, jackCloseStream, info );\r
- std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;\r
-}\r
-\r
-static int jackXrun( void *infoPointer )\r
-{\r
- JackHandle *handle = (JackHandle *) infoPointer;\r
-\r
- if ( handle->ports[0] ) handle->xrun[0] = true;\r
- if ( handle->ports[1] ) handle->xrun[1] = true;\r
-\r
- return 0;\r
-}\r
-\r
-bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int *bufferSize,\r
- RtAudio::StreamOptions *options )\r
-{\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
-\r
- // Look for jack server and try to become a client (only do once per stream).\r
- jack_client_t *client = 0;\r
- if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {\r
- jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer ); //JackNullOption;\r
- jack_status_t *status = NULL;\r
- if ( options && !options->streamName.empty() )\r
- client = jack_client_open( options->streamName.c_str(), jackoptions, status );\r
- else\r
- client = jack_client_open( "RtApiJack", jackoptions, status );\r
- if ( client == 0 ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";\r
- error( RtAudioError::WARNING );\r
- return FAILURE;\r
- }\r
- }\r
- else {\r
- // The handle must have been created on an earlier pass.\r
- client = handle->client;\r
- }\r
-\r
- const char **ports;\r
- std::string port, previousPort, deviceName;\r
- unsigned int nPorts = 0, nDevices = 0;\r
- ports = jack_get_ports( client, NULL, NULL, 0 );\r
- if ( ports ) {\r
- // Parse the port names up to the first colon (:).\r
- size_t iColon = 0;\r
- do {\r
- port = (char *) ports[ nPorts ];\r
- iColon = port.find(":");\r
- if ( iColon != std::string::npos ) {\r
- port = port.substr( 0, iColon );\r
- if ( port != previousPort ) {\r
- if ( nDevices == device ) deviceName = port;\r
- nDevices++;\r
- previousPort = port;\r
- }\r
- }\r
- } while ( ports[++nPorts] );\r
- free( ports );\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";\r
- return FAILURE;\r
- }\r
-\r
- // Count the available ports containing the client name as device\r
- // channels. Jack "input ports" equal RtAudio output channels.\r
- unsigned int nChannels = 0;\r
- unsigned long flag = JackPortIsInput;\r
- if ( mode == INPUT ) flag = JackPortIsOutput;\r
- ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );\r
- if ( ports ) {\r
- while ( ports[ nChannels ] ) nChannels++;\r
- free( ports );\r
- }\r
-\r
- // Compare the jack ports for specified client to the requested number of channels.\r
- if ( nChannels < (channels + firstChannel) ) {\r
- errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Check the jack server sample rate.\r
- unsigned int jackRate = jack_get_sample_rate( client );\r
- if ( sampleRate != jackRate ) {\r
- jack_client_close( client );\r
- errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- stream_.sampleRate = jackRate;\r
-\r
- // Get the latency of the JACK port.\r
- ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );\r
- if ( ports[ firstChannel ] ) {\r
- // Added by Ge Wang\r
- jack_latency_callback_mode_t cbmode = (mode == INPUT ? JackCaptureLatency : JackPlaybackLatency);\r
- // the range (usually the min and max are equal)\r
- jack_latency_range_t latrange; latrange.min = latrange.max = 0;\r
- // get the latency range\r
- jack_port_get_latency_range( jack_port_by_name( client, ports[firstChannel] ), cbmode, &latrange );\r
- // be optimistic, use the min!\r
- stream_.latency[mode] = latrange.min;\r
- //stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );\r
- }\r
- free( ports );\r
-\r
- // The jack server always uses 32-bit floating-point data.\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;\r
- stream_.userFormat = format;\r
-\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;\r
- else stream_.userInterleaved = true;\r
-\r
- // Jack always uses non-interleaved buffers.\r
- stream_.deviceInterleaved[mode] = false;\r
-\r
- // Jack always provides host byte-ordered data.\r
- stream_.doByteSwap[mode] = false;\r
-\r
- // Get the buffer size. The buffer size and number of buffers\r
- // (periods) is set when the jack server is started.\r
- stream_.bufferSize = (int) jack_get_buffer_size( client );\r
- *bufferSize = stream_.bufferSize;\r
-\r
- stream_.nDeviceChannels[mode] = channels;\r
- stream_.nUserChannels[mode] = channels;\r
-\r
- // Set flags for buffer conversion.\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
- stream_.nUserChannels[mode] > 1 )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate our JackHandle structure for the stream.\r
- if ( handle == 0 ) {\r
- try {\r
- handle = new JackHandle;\r
- }\r
- catch ( std::bad_alloc& ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";\r
- goto error;\r
- }\r
-\r
- if ( pthread_cond_init(&handle->condition, NULL) ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";\r
- goto error;\r
- }\r
- stream_.apiHandle = (void *) handle;\r
- handle->client = client;\r
- }\r
- handle->deviceName[mode] = deviceName;\r
-\r
- // Allocate necessary internal buffers.\r
- unsigned long bufferBytes;\r
- bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
-\r
- if ( stream_.doConvertBuffer[mode] ) {\r
-\r
- bool makeBuffer = true;\r
- if ( mode == OUTPUT )\r
- bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- else { // mode == INPUT\r
- bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );\r
- if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);\r
- if ( bufferBytes < bytesOut ) makeBuffer = false;\r
- }\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- // Allocate memory for the Jack ports (channels) identifiers.\r
- handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );\r
- if ( handle->ports[mode] == NULL ) {\r
- errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";\r
- goto error;\r
- }\r
-\r
- stream_.device[mode] = device;\r
- stream_.channelOffset[mode] = firstChannel;\r
- stream_.state = STREAM_STOPPED;\r
- stream_.callbackInfo.object = (void *) this;\r
-\r
- if ( stream_.mode == OUTPUT && mode == INPUT )\r
- // We had already set up the stream for output.\r
- stream_.mode = DUPLEX;\r
- else {\r
- stream_.mode = mode;\r
- jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );\r
- jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );\r
- jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );\r
- }\r
-\r
- // Register our ports.\r
- char label[64];\r
- if ( mode == OUTPUT ) {\r
- for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {\r
- snprintf( label, 64, "outport %d", i );\r
- handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,\r
- JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );\r
- }\r
- }\r
- else {\r
- for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {\r
- snprintf( label, 64, "inport %d", i );\r
- handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,\r
- JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );\r
- }\r
- }\r
-\r
- // Setup the buffer conversion information structure. We don't use\r
- // buffers to do channel offsets, so we override that parameter\r
- // here.\r
- if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );\r
-\r
- return SUCCESS;\r
-\r
- error:\r
- if ( handle ) {\r
- pthread_cond_destroy( &handle->condition );\r
- jack_client_close( handle->client );\r
-\r
- if ( handle->ports[0] ) free( handle->ports[0] );\r
- if ( handle->ports[1] ) free( handle->ports[1] );\r
-\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- return FAILURE;\r
-}\r
-\r
-void RtApiJack :: closeStream( void )\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiJack::closeStream(): no open stream to close!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
- if ( handle ) {\r
-\r
- if ( stream_.state == STREAM_RUNNING )\r
- jack_deactivate( handle->client );\r
-\r
- jack_client_close( handle->client );\r
- }\r
-\r
- if ( handle ) {\r
- if ( handle->ports[0] ) free( handle->ports[0] );\r
- if ( handle->ports[1] ) free( handle->ports[1] );\r
- pthread_cond_destroy( &handle->condition );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-void RtApiJack :: startStream( void )\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiJack::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
- int result = jack_activate( handle->client );\r
- if ( result ) {\r
- errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";\r
- goto unlock;\r
- }\r
-\r
- const char **ports;\r
-\r
- // Get the list of available ports.\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- result = 1;\r
- ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);\r
- if ( ports == NULL) {\r
- errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";\r
- goto unlock;\r
- }\r
-\r
- // Now make the port connections. Since RtAudio wasn't designed to\r
- // allow the user to select particular channels of a device, we'll\r
- // just open the first "nChannels" ports with offset.\r
- for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {\r
- result = 1;\r
- if ( ports[ stream_.channelOffset[0] + i ] )\r
- result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );\r
- if ( result ) {\r
- free( ports );\r
- errorText_ = "RtApiJack::startStream(): error connecting output ports!";\r
- goto unlock;\r
- }\r
- }\r
- free(ports);\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
- result = 1;\r
- ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );\r
- if ( ports == NULL) {\r
- errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";\r
- goto unlock;\r
- }\r
-\r
- // Now make the port connections. See note above.\r
- for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {\r
- result = 1;\r
- if ( ports[ stream_.channelOffset[1] + i ] )\r
- result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );\r
- if ( result ) {\r
- free( ports );\r
- errorText_ = "RtApiJack::startStream(): error connecting input ports!";\r
- goto unlock;\r
- }\r
- }\r
- free(ports);\r
- }\r
-\r
- handle->drainCounter = 0;\r
- handle->internalDrain = false;\r
- stream_.state = STREAM_RUNNING;\r
-\r
- unlock:\r
- if ( result == 0 ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiJack :: stopStream( void )\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- if ( handle->drainCounter == 0 ) {\r
- handle->drainCounter = 2;\r
- pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled\r
- }\r
- }\r
-\r
- jack_deactivate( handle->client );\r
- stream_.state = STREAM_STOPPED;\r
-}\r
-\r
-void RtApiJack :: abortStream( void )\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
- handle->drainCounter = 2;\r
-\r
- stopStream();\r
-}\r
-\r
-// This function will be called by a spawned thread when the user\r
-// callback function signals that the stream should be stopped or\r
-// aborted. It is necessary to handle it this way because the\r
-// callbackEvent() function must return before the jack_deactivate()\r
-// function will return.\r
-static void *jackStopStream( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiJack *object = (RtApiJack *) info->object;\r
-\r
- object->stopStream();\r
- pthread_exit( NULL );\r
-}\r
-\r
-bool RtApiJack :: callbackEvent( unsigned long nframes )\r
-{\r
- if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) return SUCCESS;\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return FAILURE;\r
- }\r
- if ( stream_.bufferSize != nframes ) {\r
- errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";\r
- error( RtAudioError::WARNING );\r
- return FAILURE;\r
- }\r
-\r
- CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;\r
- JackHandle *handle = (JackHandle *) stream_.apiHandle;\r
-\r
- // Check if we were draining the stream and signal is finished.\r
- if ( handle->drainCounter > 3 ) {\r
- ThreadHandle threadId;\r
-\r
- stream_.state = STREAM_STOPPING;\r
- if ( handle->internalDrain == true )\r
- pthread_create( &threadId, NULL, jackStopStream, info );\r
- else\r
- pthread_cond_signal( &handle->condition );\r
- return SUCCESS;\r
- }\r
-\r
- // Invoke user callback first, to get fresh output data.\r
- if ( handle->drainCounter == 0 ) {\r
- RtAudioCallback callback = (RtAudioCallback) info->callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- if ( stream_.mode != INPUT && handle->xrun[0] == true ) {\r
- status |= RTAUDIO_OUTPUT_UNDERFLOW;\r
- handle->xrun[0] = false;\r
- }\r
- if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {\r
- status |= RTAUDIO_INPUT_OVERFLOW;\r
- handle->xrun[1] = false;\r
- }\r
- int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],\r
- stream_.bufferSize, streamTime, status, info->userData );\r
- if ( cbReturnValue == 2 ) {\r
- stream_.state = STREAM_STOPPING;\r
- handle->drainCounter = 2;\r
- ThreadHandle id;\r
- pthread_create( &id, NULL, jackStopStream, info );\r
- return SUCCESS;\r
- }\r
- else if ( cbReturnValue == 1 ) {\r
- handle->drainCounter = 1;\r
- handle->internalDrain = true;\r
- }\r
- }\r
-\r
- jack_default_audio_sample_t *jackbuffer;\r
- unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- if ( handle->drainCounter > 1 ) { // write zeros to the output stream\r
-\r
- for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {\r
- jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );\r
- memset( jackbuffer, 0, bufferBytes );\r
- }\r
-\r
- }\r
- else if ( stream_.doConvertBuffer[0] ) {\r
-\r
- convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
-\r
- for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {\r
- jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );\r
- memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );\r
- }\r
- }\r
- else { // no buffer conversion\r
- for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {\r
- jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );\r
- memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );\r
- }\r
- }\r
- }\r
-\r
- // Don't bother draining input\r
- if ( handle->drainCounter ) {\r
- handle->drainCounter++;\r
- goto unlock;\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
-\r
- if ( stream_.doConvertBuffer[1] ) {\r
- for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {\r
- jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );\r
- memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );\r
- }\r
- convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );\r
- }\r
- else { // no buffer conversion\r
- for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {\r
- jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );\r
- memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );\r
- }\r
- }\r
- }\r
-\r
- unlock:\r
- RtApi::tickStreamTime();\r
- return SUCCESS;\r
-}\r
- //******************** End of __UNIX_JACK__ *********************//\r
-#endif\r
-\r
-#if defined(__WINDOWS_ASIO__) // ASIO API on Windows\r
-\r
-// The ASIO API is designed around a callback scheme, so this\r
-// implementation is similar to that used for OS-X CoreAudio and Linux\r
-// Jack. The primary constraint with ASIO is that it only allows\r
-// access to a single driver at a time. Thus, it is not possible to\r
-// have more than one simultaneous RtAudio stream.\r
-//\r
-// This implementation also requires a number of external ASIO files\r
-// and a few global variables. The ASIO callback scheme does not\r
-// allow for the passing of user data, so we must create a global\r
-// pointer to our callbackInfo structure.\r
-//\r
-// On unix systems, we make use of a pthread condition variable.\r
-// Since there is no equivalent in Windows, I hacked something based\r
-// on information found in\r
-// http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.\r
-\r
-#include "asiosys.h"\r
-#include "asio.h"\r
-#include "iasiothiscallresolver.h"\r
-#include "asiodrivers.h"\r
-#include <cmath>\r
-\r
-static AsioDrivers drivers;\r
-static ASIOCallbacks asioCallbacks;\r
-static ASIODriverInfo driverInfo;\r
-static CallbackInfo *asioCallbackInfo;\r
-static bool asioXRun;\r
-\r
-struct AsioHandle {\r
- int drainCounter; // Tracks callback counts when draining\r
- bool internalDrain; // Indicates if stop is initiated from callback or not.\r
- ASIOBufferInfo *bufferInfos;\r
- HANDLE condition;\r
-\r
- AsioHandle()\r
- :drainCounter(0), internalDrain(false), bufferInfos(0) {}\r
-};\r
-\r
-// Function declarations (definitions at end of section)\r
-static const char* getAsioErrorString( ASIOError result );\r
-static void sampleRateChanged( ASIOSampleRate sRate );\r
-static long asioMessages( long selector, long value, void* message, double* opt );\r
-\r
-RtApiAsio :: RtApiAsio()\r
-{\r
- // ASIO cannot run on a multi-threaded appartment. You can call\r
- // CoInitialize beforehand, but it must be for appartment threading\r
- // (in which case, CoInitilialize will return S_FALSE here).\r
- coInitialized_ = false;\r
- HRESULT hr = CoInitialize( NULL );\r
- if ( FAILED(hr) ) {\r
- errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";\r
- error( RtAudioError::WARNING );\r
- }\r
- coInitialized_ = true;\r
-\r
- drivers.removeCurrentDriver();\r
- driverInfo.asioVersion = 2;\r
-\r
- // See note in DirectSound implementation about GetDesktopWindow().\r
- driverInfo.sysRef = GetForegroundWindow();\r
-}\r
-\r
-RtApiAsio :: ~RtApiAsio()\r
-{\r
- if ( stream_.state != STREAM_CLOSED ) closeStream();\r
- if ( coInitialized_ ) CoUninitialize();\r
-}\r
-\r
-unsigned int RtApiAsio :: getDeviceCount( void )\r
-{\r
- return (unsigned int) drivers.asioGetNumDev();\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = false;\r
-\r
- // Get device ID\r
- unsigned int nDevices = getDeviceCount();\r
- if ( nDevices == 0 ) {\r
- errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- // If a stream is already open, we cannot probe other devices. Thus, use the saved results.\r
- if ( stream_.state != STREAM_CLOSED ) {\r
- if ( device >= devices_.size() ) {\r
- errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
- return devices_[ device ];\r
- }\r
-\r
- char driverName[32];\r
- ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- info.name = driverName;\r
-\r
- if ( !drivers.loadDriver( driverName ) ) {\r
- errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- result = ASIOInit( &driverInfo );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Determine the device channel information.\r
- long inputChannels, outputChannels;\r
- result = ASIOGetChannels( &inputChannels, &outputChannels );\r
- if ( result != ASE_OK ) {\r
- drivers.removeCurrentDriver();\r
- errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- info.outputChannels = outputChannels;\r
- info.inputChannels = inputChannels;\r
- if ( info.outputChannels > 0 && info.inputChannels > 0 )\r
- info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;\r
-\r
- // Determine the supported sample rates.\r
- info.sampleRates.clear();\r
- for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {\r
- result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );\r
- if ( result == ASE_OK ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[i] );\r
-\r
- if ( !info.preferredSampleRate || ( SAMPLE_RATES[i] <= 48000 && SAMPLE_RATES[i] > info.preferredSampleRate ) )\r
- info.preferredSampleRate = SAMPLE_RATES[i];\r
- }\r
- }\r
-\r
- // Determine supported data types ... just check first channel and assume rest are the same.\r
- ASIOChannelInfo channelInfo;\r
- channelInfo.channel = 0;\r
- channelInfo.isInput = true;\r
- if ( info.inputChannels <= 0 ) channelInfo.isInput = false;\r
- result = ASIOGetChannelInfo( &channelInfo );\r
- if ( result != ASE_OK ) {\r
- drivers.removeCurrentDriver();\r
- errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- info.nativeFormats = 0;\r
- if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )\r
- info.nativeFormats |= RTAUDIO_SINT16;\r
- else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )\r
- info.nativeFormats |= RTAUDIO_SINT32;\r
- else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )\r
- info.nativeFormats |= RTAUDIO_FLOAT32;\r
- else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )\r
- info.nativeFormats |= RTAUDIO_FLOAT64;\r
- else if ( channelInfo.type == ASIOSTInt24MSB || channelInfo.type == ASIOSTInt24LSB )\r
- info.nativeFormats |= RTAUDIO_SINT24;\r
-\r
- if ( info.outputChannels > 0 )\r
- if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;\r
- if ( info.inputChannels > 0 )\r
- if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;\r
-\r
- info.probed = true;\r
- drivers.removeCurrentDriver();\r
- return info;\r
-}\r
-\r
-static void bufferSwitch( long index, ASIOBool /*processNow*/ )\r
-{\r
- RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;\r
- object->callbackEvent( index );\r
-}\r
-\r
-void RtApiAsio :: saveDeviceInfo( void )\r
-{\r
- devices_.clear();\r
-\r
- unsigned int nDevices = getDeviceCount();\r
- devices_.resize( nDevices );\r
- for ( unsigned int i=0; i<nDevices; i++ )\r
- devices_[i] = getDeviceInfo( i );\r
-}\r
-\r
-bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int *bufferSize,\r
- RtAudio::StreamOptions *options )\r
-{////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\r
-\r
- bool isDuplexInput = mode == INPUT && stream_.mode == OUTPUT;\r
-\r
- // For ASIO, a duplex stream MUST use the same driver.\r
- if ( isDuplexInput && stream_.device[0] != device ) {\r
- errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";\r
- return FAILURE;\r
- }\r
-\r
- char driverName[32];\r
- ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Only load the driver once for duplex stream.\r
- if ( !isDuplexInput ) {\r
- // The getDeviceInfo() function will not work when a stream is open\r
- // because ASIO does not allow multiple devices to run at the same\r
- // time. Thus, we'll probe the system before opening a stream and\r
- // save the results for use by getDeviceInfo().\r
- this->saveDeviceInfo();\r
-\r
- if ( !drivers.loadDriver( driverName ) ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- result = ASIOInit( &driverInfo );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
-\r
- // keep them before any "goto error", they are used for error cleanup + goto device boundary checks\r
- bool buffersAllocated = false;\r
- AsioHandle *handle = (AsioHandle *) stream_.apiHandle;\r
- unsigned int nChannels;\r
-\r
-\r
- // Check the device channel count.\r
- long inputChannels, outputChannels;\r
- result = ASIOGetChannels( &inputChannels, &outputChannels );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||\r
- ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
- stream_.nDeviceChannels[mode] = channels;\r
- stream_.nUserChannels[mode] = channels;\r
- stream_.channelOffset[mode] = firstChannel;\r
-\r
- // Verify the sample rate is supported.\r
- result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- // Get the current sample rate\r
- ASIOSampleRate currentRate;\r
- result = ASIOGetSampleRate( ¤tRate );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- // Set the sample rate only if necessary\r
- if ( currentRate != sampleRate ) {\r
- result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
- }\r
-\r
- // Determine the driver data type.\r
- ASIOChannelInfo channelInfo;\r
- channelInfo.channel = 0;\r
- if ( mode == OUTPUT ) channelInfo.isInput = false;\r
- else channelInfo.isInput = true;\r
- result = ASIOGetChannelInfo( &channelInfo );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- // Assuming WINDOWS host is always little-endian.\r
- stream_.doByteSwap[mode] = false;\r
- stream_.userFormat = format;\r
- stream_.deviceFormat[mode] = 0;\r
- if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT32;\r
- if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;\r
- if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;\r
- if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( channelInfo.type == ASIOSTInt24MSB || channelInfo.type == ASIOSTInt24LSB ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT24;\r
- if ( channelInfo.type == ASIOSTInt24MSB ) stream_.doByteSwap[mode] = true;\r
- }\r
-\r
- if ( stream_.deviceFormat[mode] == 0 ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- // Set the buffer size. For a duplex stream, this will end up\r
- // setting the buffer size based on the input constraints, which\r
- // should be ok.\r
- long minSize, maxSize, preferSize, granularity;\r
- result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- if ( isDuplexInput ) {\r
- // When this is the duplex input (output was opened before), then we have to use the same\r
- // buffersize as the output, because it might use the preferred buffer size, which most\r
- // likely wasn't passed as input to this. The buffer sizes have to be identically anyway,\r
- // So instead of throwing an error, make them equal. The caller uses the reference\r
- // to the "bufferSize" param as usual to set up processing buffers.\r
-\r
- *bufferSize = stream_.bufferSize;\r
-\r
- } else {\r
- if ( *bufferSize == 0 ) *bufferSize = preferSize;\r
- else if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;\r
- else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;\r
- else if ( granularity == -1 ) {\r
- // Make sure bufferSize is a power of two.\r
- int log2_of_min_size = 0;\r
- int log2_of_max_size = 0;\r
-\r
- for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {\r
- if ( minSize & ((long)1 << i) ) log2_of_min_size = i;\r
- if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;\r
- }\r
-\r
- long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );\r
- int min_delta_num = log2_of_min_size;\r
-\r
- for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {\r
- long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );\r
- if (current_delta < min_delta) {\r
- min_delta = current_delta;\r
- min_delta_num = i;\r
- }\r
- }\r
-\r
- *bufferSize = ( (unsigned int)1 << min_delta_num );\r
- if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;\r
- else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;\r
- }\r
- else if ( granularity != 0 ) {\r
- // Set to an even multiple of granularity, rounding up.\r
- *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;\r
- }\r
- }\r
-\r
- /*\r
- // we don't use it anymore, see above!\r
- // Just left it here for the case...\r
- if ( isDuplexInput && stream_.bufferSize != *bufferSize ) {\r
- errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";\r
- goto error;\r
- }\r
- */\r
-\r
- stream_.bufferSize = *bufferSize;\r
- stream_.nBuffers = 2;\r
-\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;\r
- else stream_.userInterleaved = true;\r
-\r
- // ASIO always uses non-interleaved buffers.\r
- stream_.deviceInterleaved[mode] = false;\r
-\r
- // Allocate, if necessary, our AsioHandle structure for the stream.\r
- if ( handle == 0 ) {\r
- try {\r
- handle = new AsioHandle;\r
- }\r
- catch ( std::bad_alloc& ) {\r
- errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";\r
- goto error;\r
- }\r
- handle->bufferInfos = 0;\r
-\r
- // Create a manual-reset event.\r
- handle->condition = CreateEvent( NULL, // no security\r
- TRUE, // manual-reset\r
- FALSE, // non-signaled initially\r
- NULL ); // unnamed\r
- stream_.apiHandle = (void *) handle;\r
- }\r
-\r
- // Create the ASIO internal buffers. Since RtAudio sets up input\r
- // and output separately, we'll have to dispose of previously\r
- // created output buffers for a duplex stream.\r
- if ( mode == INPUT && stream_.mode == OUTPUT ) {\r
- ASIODisposeBuffers();\r
- if ( handle->bufferInfos ) free( handle->bufferInfos );\r
- }\r
-\r
- // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.\r
- unsigned int i;\r
- nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];\r
- handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );\r
- if ( handle->bufferInfos == NULL ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
-\r
- ASIOBufferInfo *infos;\r
- infos = handle->bufferInfos;\r
- for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {\r
- infos->isInput = ASIOFalse;\r
- infos->channelNum = i + stream_.channelOffset[0];\r
- infos->buffers[0] = infos->buffers[1] = 0;\r
- }\r
- for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {\r
- infos->isInput = ASIOTrue;\r
- infos->channelNum = i + stream_.channelOffset[1];\r
- infos->buffers[0] = infos->buffers[1] = 0;\r
- }\r
-\r
- // prepare for callbacks\r
- stream_.sampleRate = sampleRate;\r
- stream_.device[mode] = device;\r
- stream_.mode = isDuplexInput ? DUPLEX : mode;\r
-\r
- // store this class instance before registering callbacks, that are going to use it\r
- asioCallbackInfo = &stream_.callbackInfo;\r
- stream_.callbackInfo.object = (void *) this;\r
-\r
- // Set up the ASIO callback structure and create the ASIO data buffers.\r
- asioCallbacks.bufferSwitch = &bufferSwitch;\r
- asioCallbacks.sampleRateDidChange = &sampleRateChanged;\r
- asioCallbacks.asioMessage = &asioMessages;\r
- asioCallbacks.bufferSwitchTimeInfo = NULL;\r
- result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );\r
- if ( result != ASE_OK ) {\r
- // Standard method failed. This can happen with strict/misbehaving drivers that return valid buffer size ranges\r
- // but only accept the preferred buffer size as parameter for ASIOCreateBuffers. eg. Creatives ASIO driver\r
- // in that case, let's be naïve and try that instead\r
- *bufferSize = preferSize;\r
- stream_.bufferSize = *bufferSize;\r
- result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );\r
- }\r
-\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";\r
- errorText_ = errorStream_.str();\r
- goto error;\r
- }\r
- buffersAllocated = true;\r
- stream_.state = STREAM_STOPPED;\r
-\r
- // Set flags for buffer conversion.\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
- stream_.nUserChannels[mode] > 1 )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate necessary internal buffers\r
- unsigned long bufferBytes;\r
- bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
-\r
- if ( stream_.doConvertBuffer[mode] ) {\r
-\r
- bool makeBuffer = true;\r
- bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );\r
- if ( isDuplexInput && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- if ( bufferBytes <= bytesOut ) makeBuffer = false;\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- // Determine device latencies\r
- long inputLatency, outputLatency;\r
- result = ASIOGetLatencies( &inputLatency, &outputLatency );\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING); // warn but don't fail\r
- }\r
- else {\r
- stream_.latency[0] = outputLatency;\r
- stream_.latency[1] = inputLatency;\r
- }\r
-\r
- // Setup the buffer conversion information structure. We don't use\r
- // buffers to do channel offsets, so we override that parameter\r
- // here.\r
- if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );\r
-\r
- return SUCCESS;\r
-\r
- error:\r
- if ( !isDuplexInput ) {\r
- // the cleanup for error in the duplex input, is done by RtApi::openStream\r
- // So we clean up for single channel only\r
-\r
- if ( buffersAllocated )\r
- ASIODisposeBuffers();\r
-\r
- drivers.removeCurrentDriver();\r
-\r
- if ( handle ) {\r
- CloseHandle( handle->condition );\r
- if ( handle->bufferInfos )\r
- free( handle->bufferInfos );\r
-\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
-\r
- if ( stream_.userBuffer[mode] ) {\r
- free( stream_.userBuffer[mode] );\r
- stream_.userBuffer[mode] = 0;\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
- }\r
-\r
- return FAILURE;\r
-}////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\r
-\r
-void RtApiAsio :: closeStream()\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiAsio::closeStream(): no open stream to close!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- stream_.state = STREAM_STOPPED;\r
- ASIOStop();\r
- }\r
- ASIODisposeBuffers();\r
- drivers.removeCurrentDriver();\r
-\r
- AsioHandle *handle = (AsioHandle *) stream_.apiHandle;\r
- if ( handle ) {\r
- CloseHandle( handle->condition );\r
- if ( handle->bufferInfos )\r
- free( handle->bufferInfos );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-bool stopThreadCalled = false;\r
-\r
-void RtApiAsio :: startStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiAsio::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- AsioHandle *handle = (AsioHandle *) stream_.apiHandle;\r
- ASIOError result = ASIOStart();\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- handle->drainCounter = 0;\r
- handle->internalDrain = false;\r
- ResetEvent( handle->condition );\r
- stream_.state = STREAM_RUNNING;\r
- asioXRun = false;\r
-\r
- unlock:\r
- stopThreadCalled = false;\r
-\r
- if ( result == ASE_OK ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiAsio :: stopStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- AsioHandle *handle = (AsioHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- if ( handle->drainCounter == 0 ) {\r
- handle->drainCounter = 2;\r
- WaitForSingleObject( handle->condition, INFINITE ); // block until signaled\r
- }\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
-\r
- ASIOError result = ASIOStop();\r
- if ( result != ASE_OK ) {\r
- errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";\r
- errorText_ = errorStream_.str();\r
- }\r
-\r
- if ( result == ASE_OK ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiAsio :: abortStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- // The following lines were commented-out because some behavior was\r
- // noted where the device buffers need to be zeroed to avoid\r
- // continuing sound, even when the device buffers are completely\r
- // disposed. So now, calling abort is the same as calling stop.\r
- // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;\r
- // handle->drainCounter = 2;\r
- stopStream();\r
-}\r
-\r
-// This function will be called by a spawned thread when the user\r
-// callback function signals that the stream should be stopped or\r
-// aborted. It is necessary to handle it this way because the\r
-// callbackEvent() function must return before the ASIOStop()\r
-// function will return.\r
-static unsigned __stdcall asioStopStream( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiAsio *object = (RtApiAsio *) info->object;\r
-\r
- object->stopStream();\r
- _endthreadex( 0 );\r
- return 0;\r
-}\r
-\r
-bool RtApiAsio :: callbackEvent( long bufferIndex )\r
-{\r
- if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) return SUCCESS;\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return FAILURE;\r
- }\r
-\r
- CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;\r
- AsioHandle *handle = (AsioHandle *) stream_.apiHandle;\r
-\r
- // Check if we were draining the stream and signal if finished.\r
- if ( handle->drainCounter > 3 ) {\r
-\r
- stream_.state = STREAM_STOPPING;\r
- if ( handle->internalDrain == false )\r
- SetEvent( handle->condition );\r
- else { // spawn a thread to stop the stream\r
- unsigned threadId;\r
- stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &asioStopStream,\r
- &stream_.callbackInfo, 0, &threadId );\r
- }\r
- return SUCCESS;\r
- }\r
-\r
- // Invoke user callback to get fresh output data UNLESS we are\r
- // draining stream.\r
- if ( handle->drainCounter == 0 ) {\r
- RtAudioCallback callback = (RtAudioCallback) info->callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- if ( stream_.mode != INPUT && asioXRun == true ) {\r
- status |= RTAUDIO_OUTPUT_UNDERFLOW;\r
- asioXRun = false;\r
- }\r
- if ( stream_.mode != OUTPUT && asioXRun == true ) {\r
- status |= RTAUDIO_INPUT_OVERFLOW;\r
- asioXRun = false;\r
- }\r
- int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],\r
- stream_.bufferSize, streamTime, status, info->userData );\r
- if ( cbReturnValue == 2 ) {\r
- stream_.state = STREAM_STOPPING;\r
- handle->drainCounter = 2;\r
- unsigned threadId;\r
- stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &asioStopStream,\r
- &stream_.callbackInfo, 0, &threadId );\r
- return SUCCESS;\r
- }\r
- else if ( cbReturnValue == 1 ) {\r
- handle->drainCounter = 1;\r
- handle->internalDrain = true;\r
- }\r
- }\r
-\r
- unsigned int nChannels, bufferBytes, i, j;\r
- nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );\r
-\r
- if ( handle->drainCounter > 1 ) { // write zeros to the output stream\r
-\r
- for ( i=0, j=0; i<nChannels; i++ ) {\r
- if ( handle->bufferInfos[i].isInput != ASIOTrue )\r
- memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );\r
- }\r
-\r
- }\r
- else if ( stream_.doConvertBuffer[0] ) {\r
-\r
- convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
- if ( stream_.doByteSwap[0] )\r
- byteSwapBuffer( stream_.deviceBuffer,\r
- stream_.bufferSize * stream_.nDeviceChannels[0],\r
- stream_.deviceFormat[0] );\r
-\r
- for ( i=0, j=0; i<nChannels; i++ ) {\r
- if ( handle->bufferInfos[i].isInput != ASIOTrue )\r
- memcpy( handle->bufferInfos[i].buffers[bufferIndex],\r
- &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );\r
- }\r
-\r
- }\r
- else {\r
-\r
- if ( stream_.doByteSwap[0] )\r
- byteSwapBuffer( stream_.userBuffer[0],\r
- stream_.bufferSize * stream_.nUserChannels[0],\r
- stream_.userFormat );\r
-\r
- for ( i=0, j=0; i<nChannels; i++ ) {\r
- if ( handle->bufferInfos[i].isInput != ASIOTrue )\r
- memcpy( handle->bufferInfos[i].buffers[bufferIndex],\r
- &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );\r
- }\r
-\r
- }\r
- }\r
-\r
- // Don't bother draining input\r
- if ( handle->drainCounter ) {\r
- handle->drainCounter++;\r
- goto unlock;\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
-\r
- bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);\r
-\r
- if (stream_.doConvertBuffer[1]) {\r
-\r
- // Always interleave ASIO input data.\r
- for ( i=0, j=0; i<nChannels; i++ ) {\r
- if ( handle->bufferInfos[i].isInput == ASIOTrue )\r
- memcpy( &stream_.deviceBuffer[j++*bufferBytes],\r
- handle->bufferInfos[i].buffers[bufferIndex],\r
- bufferBytes );\r
- }\r
-\r
- if ( stream_.doByteSwap[1] )\r
- byteSwapBuffer( stream_.deviceBuffer,\r
- stream_.bufferSize * stream_.nDeviceChannels[1],\r
- stream_.deviceFormat[1] );\r
- convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );\r
-\r
- }\r
- else {\r
- for ( i=0, j=0; i<nChannels; i++ ) {\r
- if ( handle->bufferInfos[i].isInput == ASIOTrue ) {\r
- memcpy( &stream_.userBuffer[1][bufferBytes*j++],\r
- handle->bufferInfos[i].buffers[bufferIndex],\r
- bufferBytes );\r
- }\r
- }\r
-\r
- if ( stream_.doByteSwap[1] )\r
- byteSwapBuffer( stream_.userBuffer[1],\r
- stream_.bufferSize * stream_.nUserChannels[1],\r
- stream_.userFormat );\r
- }\r
- }\r
-\r
- unlock:\r
- // The following call was suggested by Malte Clasen. While the API\r
- // documentation indicates it should not be required, some device\r
- // drivers apparently do not function correctly without it.\r
- ASIOOutputReady();\r
-\r
- RtApi::tickStreamTime();\r
- return SUCCESS;\r
-}\r
-\r
-static void sampleRateChanged( ASIOSampleRate sRate )\r
-{\r
- // The ASIO documentation says that this usually only happens during\r
- // external sync. Audio processing is not stopped by the driver,\r
- // actual sample rate might not have even changed, maybe only the\r
- // sample rate status of an AES/EBU or S/PDIF digital input at the\r
- // audio device.\r
-\r
- RtApi *object = (RtApi *) asioCallbackInfo->object;\r
- try {\r
- object->stopStream();\r
- }\r
- catch ( RtAudioError &exception ) {\r
- std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;\r
- return;\r
- }\r
-\r
- std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;\r
-}\r
-\r
-static long asioMessages( long selector, long value, void* /*message*/, double* /*opt*/ )\r
-{\r
- long ret = 0;\r
-\r
- switch( selector ) {\r
- case kAsioSelectorSupported:\r
- if ( value == kAsioResetRequest\r
- || value == kAsioEngineVersion\r
- || value == kAsioResyncRequest\r
- || value == kAsioLatenciesChanged\r
- // The following three were added for ASIO 2.0, you don't\r
- // necessarily have to support them.\r
- || value == kAsioSupportsTimeInfo\r
- || value == kAsioSupportsTimeCode\r
- || value == kAsioSupportsInputMonitor)\r
- ret = 1L;\r
- break;\r
- case kAsioResetRequest:\r
- // Defer the task and perform the reset of the driver during the\r
- // next "safe" situation. You cannot reset the driver right now,\r
- // as this code is called from the driver. Reset the driver is\r
- // done by completely destruct is. I.e. ASIOStop(),\r
- // ASIODisposeBuffers(), Destruction Afterwards you initialize the\r
- // driver again.\r
- std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;\r
- ret = 1L;\r
- break;\r
- case kAsioResyncRequest:\r
- // This informs the application that the driver encountered some\r
- // non-fatal data loss. It is used for synchronization purposes\r
- // of different media. Added mainly to work around the Win16Mutex\r
- // problems in Windows 95/98 with the Windows Multimedia system,\r
- // which could lose data because the Mutex was held too long by\r
- // another thread. However a driver can issue it in other\r
- // situations, too.\r
- // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;\r
- asioXRun = true;\r
- ret = 1L;\r
- break;\r
- case kAsioLatenciesChanged:\r
- // This will inform the host application that the drivers were\r
- // latencies changed. Beware, it this does not mean that the\r
- // buffer sizes have changed! You might need to update internal\r
- // delay data.\r
- std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;\r
- ret = 1L;\r
- break;\r
- case kAsioEngineVersion:\r
- // Return the supported ASIO version of the host application. If\r
- // a host application does not implement this selector, ASIO 1.0\r
- // is assumed by the driver.\r
- ret = 2L;\r
- break;\r
- case kAsioSupportsTimeInfo:\r
- // Informs the driver whether the\r
- // asioCallbacks.bufferSwitchTimeInfo() callback is supported.\r
- // For compatibility with ASIO 1.0 drivers the host application\r
- // should always support the "old" bufferSwitch method, too.\r
- ret = 0;\r
- break;\r
- case kAsioSupportsTimeCode:\r
- // Informs the driver whether application is interested in time\r
- // code info. If an application does not need to know about time\r
- // code, the driver has less work to do.\r
- ret = 0;\r
- break;\r
- }\r
- return ret;\r
-}\r
-\r
-static const char* getAsioErrorString( ASIOError result )\r
-{\r
- struct Messages\r
- {\r
- ASIOError value;\r
- const char*message;\r
- };\r
-\r
- static const Messages m[] =\r
- {\r
- { ASE_NotPresent, "Hardware input or output is not present or available." },\r
- { ASE_HWMalfunction, "Hardware is malfunctioning." },\r
- { ASE_InvalidParameter, "Invalid input parameter." },\r
- { ASE_InvalidMode, "Invalid mode." },\r
- { ASE_SPNotAdvancing, "Sample position not advancing." },\r
- { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },\r
- { ASE_NoMemory, "Not enough memory to complete the request." }\r
- };\r
-\r
- for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )\r
- if ( m[i].value == result ) return m[i].message;\r
-\r
- return "Unknown error.";\r
-}\r
-\r
-//******************** End of __WINDOWS_ASIO__ *********************//\r
-#endif\r
-\r
-\r
-#if defined(__WINDOWS_WASAPI__) // Windows WASAPI API\r
-\r
-// Authored by Marcus Tomlinson <themarcustomlinson@gmail.com>, April 2014\r
-// - Introduces support for the Windows WASAPI API\r
-// - Aims to deliver bit streams to and from hardware at the lowest possible latency, via the absolute minimum buffer sizes required\r
-// - Provides flexible stream configuration to an otherwise strict and inflexible WASAPI interface\r
-// - Includes automatic internal conversion of sample rate and buffer size between hardware and the user\r
-\r
-#ifndef INITGUID\r
- #define INITGUID\r
-#endif\r
-#include <audioclient.h>\r
-#include <avrt.h>\r
-#include <mmdeviceapi.h>\r
-#include <functiondiscoverykeys_devpkey.h>\r
-\r
-//=============================================================================\r
-\r
-#define SAFE_RELEASE( objectPtr )\\r
-if ( objectPtr )\\r
-{\\r
- objectPtr->Release();\\r
- objectPtr = NULL;\\r
-}\r
-\r
-typedef HANDLE ( __stdcall *TAvSetMmThreadCharacteristicsPtr )( LPCWSTR TaskName, LPDWORD TaskIndex );\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-// WASAPI dictates stream sample rate, format, channel count, and in some cases, buffer size.\r
-// Therefore we must perform all necessary conversions to user buffers in order to satisfy these\r
-// requirements. WasapiBuffer ring buffers are used between HwIn->UserIn and UserOut->HwOut to\r
-// provide intermediate storage for read / write synchronization.\r
-class WasapiBuffer\r
-{\r
-public:\r
- WasapiBuffer()\r
- : buffer_( NULL ),\r
- bufferSize_( 0 ),\r
- inIndex_( 0 ),\r
- outIndex_( 0 ) {}\r
-\r
- ~WasapiBuffer() {\r
- free( buffer_ );\r
- }\r
-\r
- // sets the length of the internal ring buffer\r
- void setBufferSize( unsigned int bufferSize, unsigned int formatBytes ) {\r
- free( buffer_ );\r
-\r
- buffer_ = ( char* ) calloc( bufferSize, formatBytes );\r
-\r
- bufferSize_ = bufferSize;\r
- inIndex_ = 0;\r
- outIndex_ = 0;\r
- }\r
-\r
- // attempt to push a buffer into the ring buffer at the current "in" index\r
- bool pushBuffer( char* buffer, unsigned int bufferSize, RtAudioFormat format )\r
- {\r
- if ( !buffer || // incoming buffer is NULL\r
- bufferSize == 0 || // incoming buffer has no data\r
- bufferSize > bufferSize_ ) // incoming buffer too large\r
- {\r
- return false;\r
- }\r
-\r
- unsigned int relOutIndex = outIndex_;\r
- unsigned int inIndexEnd = inIndex_ + bufferSize;\r
- if ( relOutIndex < inIndex_ && inIndexEnd >= bufferSize_ ) {\r
- relOutIndex += bufferSize_;\r
- }\r
-\r
- // "in" index can end on the "out" index but cannot begin at it\r
- if ( inIndex_ <= relOutIndex && inIndexEnd > relOutIndex ) {\r
- return false; // not enough space between "in" index and "out" index\r
- }\r
-\r
- // copy buffer from external to internal\r
- int fromZeroSize = inIndex_ + bufferSize - bufferSize_;\r
- fromZeroSize = fromZeroSize < 0 ? 0 : fromZeroSize;\r
- int fromInSize = bufferSize - fromZeroSize;\r
-\r
- switch( format )\r
- {\r
- case RTAUDIO_SINT8:\r
- memcpy( &( ( char* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( char ) );\r
- memcpy( buffer_, &( ( char* ) buffer )[fromInSize], fromZeroSize * sizeof( char ) );\r
- break;\r
- case RTAUDIO_SINT16:\r
- memcpy( &( ( short* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( short ) );\r
- memcpy( buffer_, &( ( short* ) buffer )[fromInSize], fromZeroSize * sizeof( short ) );\r
- break;\r
- case RTAUDIO_SINT24:\r
- memcpy( &( ( S24* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( S24 ) );\r
- memcpy( buffer_, &( ( S24* ) buffer )[fromInSize], fromZeroSize * sizeof( S24 ) );\r
- break;\r
- case RTAUDIO_SINT32:\r
- memcpy( &( ( int* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( int ) );\r
- memcpy( buffer_, &( ( int* ) buffer )[fromInSize], fromZeroSize * sizeof( int ) );\r
- break;\r
- case RTAUDIO_FLOAT32:\r
- memcpy( &( ( float* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( float ) );\r
- memcpy( buffer_, &( ( float* ) buffer )[fromInSize], fromZeroSize * sizeof( float ) );\r
- break;\r
- case RTAUDIO_FLOAT64:\r
- memcpy( &( ( double* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( double ) );\r
- memcpy( buffer_, &( ( double* ) buffer )[fromInSize], fromZeroSize * sizeof( double ) );\r
- break;\r
- }\r
-\r
- // update "in" index\r
- inIndex_ += bufferSize;\r
- inIndex_ %= bufferSize_;\r
-\r
- return true;\r
- }\r
-\r
- // attempt to pull a buffer from the ring buffer from the current "out" index\r
- bool pullBuffer( char* buffer, unsigned int bufferSize, RtAudioFormat format )\r
- {\r
- if ( !buffer || // incoming buffer is NULL\r
- bufferSize == 0 || // incoming buffer has no data\r
- bufferSize > bufferSize_ ) // incoming buffer too large\r
- {\r
- return false;\r
- }\r
-\r
- unsigned int relInIndex = inIndex_;\r
- unsigned int outIndexEnd = outIndex_ + bufferSize;\r
- if ( relInIndex < outIndex_ && outIndexEnd >= bufferSize_ ) {\r
- relInIndex += bufferSize_;\r
- }\r
-\r
- // "out" index can begin at and end on the "in" index\r
- if ( outIndex_ < relInIndex && outIndexEnd > relInIndex ) {\r
- return false; // not enough space between "out" index and "in" index\r
- }\r
-\r
- // copy buffer from internal to external\r
- int fromZeroSize = outIndex_ + bufferSize - bufferSize_;\r
- fromZeroSize = fromZeroSize < 0 ? 0 : fromZeroSize;\r
- int fromOutSize = bufferSize - fromZeroSize;\r
-\r
- switch( format )\r
- {\r
- case RTAUDIO_SINT8:\r
- memcpy( buffer, &( ( char* ) buffer_ )[outIndex_], fromOutSize * sizeof( char ) );\r
- memcpy( &( ( char* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( char ) );\r
- break;\r
- case RTAUDIO_SINT16:\r
- memcpy( buffer, &( ( short* ) buffer_ )[outIndex_], fromOutSize * sizeof( short ) );\r
- memcpy( &( ( short* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( short ) );\r
- break;\r
- case RTAUDIO_SINT24:\r
- memcpy( buffer, &( ( S24* ) buffer_ )[outIndex_], fromOutSize * sizeof( S24 ) );\r
- memcpy( &( ( S24* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( S24 ) );\r
- break;\r
- case RTAUDIO_SINT32:\r
- memcpy( buffer, &( ( int* ) buffer_ )[outIndex_], fromOutSize * sizeof( int ) );\r
- memcpy( &( ( int* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( int ) );\r
- break;\r
- case RTAUDIO_FLOAT32:\r
- memcpy( buffer, &( ( float* ) buffer_ )[outIndex_], fromOutSize * sizeof( float ) );\r
- memcpy( &( ( float* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( float ) );\r
- break;\r
- case RTAUDIO_FLOAT64:\r
- memcpy( buffer, &( ( double* ) buffer_ )[outIndex_], fromOutSize * sizeof( double ) );\r
- memcpy( &( ( double* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( double ) );\r
- break;\r
- }\r
-\r
- // update "out" index\r
- outIndex_ += bufferSize;\r
- outIndex_ %= bufferSize_;\r
-\r
- return true;\r
- }\r
-\r
-private:\r
- char* buffer_;\r
- unsigned int bufferSize_;\r
- unsigned int inIndex_;\r
- unsigned int outIndex_;\r
-};\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-// In order to satisfy WASAPI's buffer requirements, we need a means of converting sample rate\r
-// between HW and the user. The convertBufferWasapi function is used to perform this conversion\r
-// between HwIn->UserIn and UserOut->HwOut during the stream callback loop.\r
-// This sample rate converter favors speed over quality, and works best with conversions between\r
-// one rate and its multiple.\r
-void convertBufferWasapi( char* outBuffer,\r
- const char* inBuffer,\r
- const unsigned int& channelCount,\r
- const unsigned int& inSampleRate,\r
- const unsigned int& outSampleRate,\r
- const unsigned int& inSampleCount,\r
- unsigned int& outSampleCount,\r
- const RtAudioFormat& format )\r
-{\r
- // calculate the new outSampleCount and relative sampleStep\r
- float sampleRatio = ( float ) outSampleRate / inSampleRate;\r
- float sampleStep = 1.0f / sampleRatio;\r
- float inSampleFraction = 0.0f;\r
-\r
- outSampleCount = ( unsigned int ) roundf( inSampleCount * sampleRatio );\r
-\r
- // frame-by-frame, copy each relative input sample into it's corresponding output sample\r
- for ( unsigned int outSample = 0; outSample < outSampleCount; outSample++ )\r
- {\r
- unsigned int inSample = ( unsigned int ) inSampleFraction;\r
-\r
- switch ( format )\r
- {\r
- case RTAUDIO_SINT8:\r
- memcpy( &( ( char* ) outBuffer )[ outSample * channelCount ], &( ( char* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( char ) );\r
- break;\r
- case RTAUDIO_SINT16:\r
- memcpy( &( ( short* ) outBuffer )[ outSample * channelCount ], &( ( short* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( short ) );\r
- break;\r
- case RTAUDIO_SINT24:\r
- memcpy( &( ( S24* ) outBuffer )[ outSample * channelCount ], &( ( S24* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( S24 ) );\r
- break;\r
- case RTAUDIO_SINT32:\r
- memcpy( &( ( int* ) outBuffer )[ outSample * channelCount ], &( ( int* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( int ) );\r
- break;\r
- case RTAUDIO_FLOAT32:\r
- memcpy( &( ( float* ) outBuffer )[ outSample * channelCount ], &( ( float* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( float ) );\r
- break;\r
- case RTAUDIO_FLOAT64:\r
- memcpy( &( ( double* ) outBuffer )[ outSample * channelCount ], &( ( double* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( double ) );\r
- break;\r
- }\r
-\r
- // jump to next in sample\r
- inSampleFraction += sampleStep;\r
- }\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-// A structure to hold various information related to the WASAPI implementation.\r
-struct WasapiHandle\r
-{\r
- IAudioClient* captureAudioClient;\r
- IAudioClient* renderAudioClient;\r
- IAudioCaptureClient* captureClient;\r
- IAudioRenderClient* renderClient;\r
- HANDLE captureEvent;\r
- HANDLE renderEvent;\r
-\r
- WasapiHandle()\r
- : captureAudioClient( NULL ),\r
- renderAudioClient( NULL ),\r
- captureClient( NULL ),\r
- renderClient( NULL ),\r
- captureEvent( NULL ),\r
- renderEvent( NULL ) {}\r
-};\r
-\r
-//=============================================================================\r
-\r
-RtApiWasapi::RtApiWasapi()\r
- : coInitialized_( false ), deviceEnumerator_( NULL )\r
-{\r
- // WASAPI can run either apartment or multi-threaded\r
- HRESULT hr = CoInitialize( NULL );\r
- if ( !FAILED( hr ) )\r
- coInitialized_ = true;\r
-\r
- // Instantiate device enumerator\r
- hr = CoCreateInstance( __uuidof( MMDeviceEnumerator ), NULL,\r
- CLSCTX_ALL, __uuidof( IMMDeviceEnumerator ),\r
- ( void** ) &deviceEnumerator_ );\r
-\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::RtApiWasapi: Unable to instantiate device enumerator";\r
- error( RtAudioError::DRIVER_ERROR );\r
- }\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-RtApiWasapi::~RtApiWasapi()\r
-{\r
- if ( stream_.state != STREAM_CLOSED )\r
- closeStream();\r
-\r
- SAFE_RELEASE( deviceEnumerator_ );\r
-\r
- // If this object previously called CoInitialize()\r
- if ( coInitialized_ )\r
- CoUninitialize();\r
-}\r
-\r
-//=============================================================================\r
-\r
-unsigned int RtApiWasapi::getDeviceCount( void )\r
-{\r
- unsigned int captureDeviceCount = 0;\r
- unsigned int renderDeviceCount = 0;\r
-\r
- IMMDeviceCollection* captureDevices = NULL;\r
- IMMDeviceCollection* renderDevices = NULL;\r
-\r
- // Count capture devices\r
- errorText_.clear();\r
- HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve capture device collection.";\r
- goto Exit;\r
- }\r
-\r
- hr = captureDevices->GetCount( &captureDeviceCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve capture device count.";\r
- goto Exit;\r
- }\r
-\r
- // Count render devices\r
- hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve render device collection.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderDevices->GetCount( &renderDeviceCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve render device count.";\r
- goto Exit;\r
- }\r
-\r
-Exit:\r
- // release all references\r
- SAFE_RELEASE( captureDevices );\r
- SAFE_RELEASE( renderDevices );\r
-\r
- if ( errorText_.empty() )\r
- return captureDeviceCount + renderDeviceCount;\r
-\r
- error( RtAudioError::DRIVER_ERROR );\r
- return 0;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-RtAudio::DeviceInfo RtApiWasapi::getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- unsigned int captureDeviceCount = 0;\r
- unsigned int renderDeviceCount = 0;\r
- std::string defaultDeviceName;\r
- bool isCaptureDevice = false;\r
-\r
- PROPVARIANT deviceNameProp;\r
- PROPVARIANT defaultDeviceNameProp;\r
-\r
- IMMDeviceCollection* captureDevices = NULL;\r
- IMMDeviceCollection* renderDevices = NULL;\r
- IMMDevice* devicePtr = NULL;\r
- IMMDevice* defaultDevicePtr = NULL;\r
- IAudioClient* audioClient = NULL;\r
- IPropertyStore* devicePropStore = NULL;\r
- IPropertyStore* defaultDevicePropStore = NULL;\r
-\r
- WAVEFORMATEX* deviceFormat = NULL;\r
- WAVEFORMATEX* closestMatchFormat = NULL;\r
-\r
- // probed\r
- info.probed = false;\r
-\r
- // Count capture devices\r
- errorText_.clear();\r
- RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;\r
- HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device collection.";\r
- goto Exit;\r
- }\r
-\r
- hr = captureDevices->GetCount( &captureDeviceCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device count.";\r
- goto Exit;\r
- }\r
-\r
- // Count render devices\r
- hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device collection.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderDevices->GetCount( &renderDeviceCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device count.";\r
- goto Exit;\r
- }\r
-\r
- // validate device index\r
- if ( device >= captureDeviceCount + renderDeviceCount ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Invalid device index.";\r
- errorType = RtAudioError::INVALID_USE;\r
- goto Exit;\r
- }\r
-\r
- // determine whether index falls within capture or render devices\r
- if ( device >= renderDeviceCount ) {\r
- hr = captureDevices->Item( device - renderDeviceCount, &devicePtr );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device handle.";\r
- goto Exit;\r
- }\r
- isCaptureDevice = true;\r
- }\r
- else {\r
- hr = renderDevices->Item( device, &devicePtr );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device handle.";\r
- goto Exit;\r
- }\r
- isCaptureDevice = false;\r
- }\r
-\r
- // get default device name\r
- if ( isCaptureDevice ) {\r
- hr = deviceEnumerator_->GetDefaultAudioEndpoint( eCapture, eConsole, &defaultDevicePtr );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default capture device handle.";\r
- goto Exit;\r
- }\r
- }\r
- else {\r
- hr = deviceEnumerator_->GetDefaultAudioEndpoint( eRender, eConsole, &defaultDevicePtr );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default render device handle.";\r
- goto Exit;\r
- }\r
- }\r
-\r
- hr = defaultDevicePtr->OpenPropertyStore( STGM_READ, &defaultDevicePropStore );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to open default device property store.";\r
- goto Exit;\r
- }\r
- PropVariantInit( &defaultDeviceNameProp );\r
-\r
- hr = defaultDevicePropStore->GetValue( PKEY_Device_FriendlyName, &defaultDeviceNameProp );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default device property: PKEY_Device_FriendlyName.";\r
- goto Exit;\r
- }\r
-\r
- defaultDeviceName = convertCharPointerToStdString(defaultDeviceNameProp.pwszVal);\r
-\r
- // name\r
- hr = devicePtr->OpenPropertyStore( STGM_READ, &devicePropStore );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to open device property store.";\r
- goto Exit;\r
- }\r
-\r
- PropVariantInit( &deviceNameProp );\r
-\r
- hr = devicePropStore->GetValue( PKEY_Device_FriendlyName, &deviceNameProp );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device property: PKEY_Device_FriendlyName.";\r
- goto Exit;\r
- }\r
-\r
- info.name =convertCharPointerToStdString(deviceNameProp.pwszVal);\r
-\r
- // is default\r
- if ( isCaptureDevice ) {\r
- info.isDefaultInput = info.name == defaultDeviceName;\r
- info.isDefaultOutput = false;\r
- }\r
- else {\r
- info.isDefaultInput = false;\r
- info.isDefaultOutput = info.name == defaultDeviceName;\r
- }\r
-\r
- // channel count\r
- hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL, NULL, ( void** ) &audioClient );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device audio client.";\r
- goto Exit;\r
- }\r
-\r
- hr = audioClient->GetMixFormat( &deviceFormat );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device mix format.";\r
- goto Exit;\r
- }\r
-\r
- if ( isCaptureDevice ) {\r
- info.inputChannels = deviceFormat->nChannels;\r
- info.outputChannels = 0;\r
- info.duplexChannels = 0;\r
- }\r
- else {\r
- info.inputChannels = 0;\r
- info.outputChannels = deviceFormat->nChannels;\r
- info.duplexChannels = 0;\r
- }\r
-\r
- // sample rates\r
- info.sampleRates.clear();\r
-\r
- // allow support for all sample rates as we have a built-in sample rate converter\r
- for ( unsigned int i = 0; i < MAX_SAMPLE_RATES; i++ ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[i] );\r
- }\r
- info.preferredSampleRate = deviceFormat->nSamplesPerSec;\r
-\r
- // native format\r
- info.nativeFormats = 0;\r
-\r
- if ( deviceFormat->wFormatTag == WAVE_FORMAT_IEEE_FLOAT ||\r
- ( deviceFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE &&\r
- ( ( WAVEFORMATEXTENSIBLE* ) deviceFormat )->SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT ) )\r
- {\r
- if ( deviceFormat->wBitsPerSample == 32 ) {\r
- info.nativeFormats |= RTAUDIO_FLOAT32;\r
- }\r
- else if ( deviceFormat->wBitsPerSample == 64 ) {\r
- info.nativeFormats |= RTAUDIO_FLOAT64;\r
- }\r
- }\r
- else if ( deviceFormat->wFormatTag == WAVE_FORMAT_PCM ||\r
- ( deviceFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE &&\r
- ( ( WAVEFORMATEXTENSIBLE* ) deviceFormat )->SubFormat == KSDATAFORMAT_SUBTYPE_PCM ) )\r
- {\r
- if ( deviceFormat->wBitsPerSample == 8 ) {\r
- info.nativeFormats |= RTAUDIO_SINT8;\r
- }\r
- else if ( deviceFormat->wBitsPerSample == 16 ) {\r
- info.nativeFormats |= RTAUDIO_SINT16;\r
- }\r
- else if ( deviceFormat->wBitsPerSample == 24 ) {\r
- info.nativeFormats |= RTAUDIO_SINT24;\r
- }\r
- else if ( deviceFormat->wBitsPerSample == 32 ) {\r
- info.nativeFormats |= RTAUDIO_SINT32;\r
- }\r
- }\r
-\r
- // probed\r
- info.probed = true;\r
-\r
-Exit:\r
- // release all references\r
- PropVariantClear( &deviceNameProp );\r
- PropVariantClear( &defaultDeviceNameProp );\r
-\r
- SAFE_RELEASE( captureDevices );\r
- SAFE_RELEASE( renderDevices );\r
- SAFE_RELEASE( devicePtr );\r
- SAFE_RELEASE( defaultDevicePtr );\r
- SAFE_RELEASE( audioClient );\r
- SAFE_RELEASE( devicePropStore );\r
- SAFE_RELEASE( defaultDevicePropStore );\r
-\r
- CoTaskMemFree( deviceFormat );\r
- CoTaskMemFree( closestMatchFormat );\r
-\r
- if ( !errorText_.empty() )\r
- error( errorType );\r
- return info;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-unsigned int RtApiWasapi::getDefaultOutputDevice( void )\r
-{\r
- for ( unsigned int i = 0; i < getDeviceCount(); i++ ) {\r
- if ( getDeviceInfo( i ).isDefaultOutput ) {\r
- return i;\r
- }\r
- }\r
-\r
- return 0;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-unsigned int RtApiWasapi::getDefaultInputDevice( void )\r
-{\r
- for ( unsigned int i = 0; i < getDeviceCount(); i++ ) {\r
- if ( getDeviceInfo( i ).isDefaultInput ) {\r
- return i;\r
- }\r
- }\r
-\r
- return 0;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-void RtApiWasapi::closeStream( void )\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiWasapi::closeStream: No open stream to close.";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- if ( stream_.state != STREAM_STOPPED )\r
- stopStream();\r
-\r
- // clean up stream memory\r
- SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient )\r
- SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient )\r
-\r
- SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->captureClient )\r
- SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->renderClient )\r
-\r
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent )\r
- CloseHandle( ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent );\r
-\r
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent )\r
- CloseHandle( ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent );\r
-\r
- delete ( WasapiHandle* ) stream_.apiHandle;\r
- stream_.apiHandle = NULL;\r
-\r
- for ( int i = 0; i < 2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- // update stream state\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-void RtApiWasapi::startStream( void )\r
-{\r
- verifyStream();\r
-\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiWasapi::startStream: The stream is already running.";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- // update stream state\r
- stream_.state = STREAM_RUNNING;\r
-\r
- // create WASAPI stream thread\r
- stream_.callbackInfo.thread = ( ThreadHandle ) CreateThread( NULL, 0, runWasapiThread, this, CREATE_SUSPENDED, NULL );\r
-\r
- if ( !stream_.callbackInfo.thread ) {\r
- errorText_ = "RtApiWasapi::startStream: Unable to instantiate callback thread.";\r
- error( RtAudioError::THREAD_ERROR );\r
- }\r
- else {\r
- SetThreadPriority( ( void* ) stream_.callbackInfo.thread, stream_.callbackInfo.priority );\r
- ResumeThread( ( void* ) stream_.callbackInfo.thread );\r
- }\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-void RtApiWasapi::stopStream( void )\r
-{\r
- verifyStream();\r
-\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiWasapi::stopStream: The stream is already stopped.";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- // inform stream thread by setting stream state to STREAM_STOPPING\r
- stream_.state = STREAM_STOPPING;\r
-\r
- // wait until stream thread is stopped\r
- while( stream_.state != STREAM_STOPPED ) {\r
- Sleep( 1 );\r
- }\r
-\r
- // Wait for the last buffer to play before stopping.\r
- Sleep( 1000 * stream_.bufferSize / stream_.sampleRate );\r
-\r
- // stop capture client if applicable\r
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {\r
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::stopStream: Unable to stop capture stream.";\r
- error( RtAudioError::DRIVER_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- // stop render client if applicable\r
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {\r
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::stopStream: Unable to stop render stream.";\r
- error( RtAudioError::DRIVER_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- // close thread handle\r
- if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {\r
- errorText_ = "RtApiWasapi::stopStream: Unable to close callback thread.";\r
- error( RtAudioError::THREAD_ERROR );\r
- return;\r
- }\r
-\r
- stream_.callbackInfo.thread = (ThreadHandle) NULL;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-void RtApiWasapi::abortStream( void )\r
-{\r
- verifyStream();\r
-\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiWasapi::abortStream: The stream is already stopped.";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- // inform stream thread by setting stream state to STREAM_STOPPING\r
- stream_.state = STREAM_STOPPING;\r
-\r
- // wait until stream thread is stopped\r
- while ( stream_.state != STREAM_STOPPED ) {\r
- Sleep( 1 );\r
- }\r
-\r
- // stop capture client if applicable\r
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {\r
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::abortStream: Unable to stop capture stream.";\r
- error( RtAudioError::DRIVER_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- // stop render client if applicable\r
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {\r
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::abortStream: Unable to stop render stream.";\r
- error( RtAudioError::DRIVER_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- // close thread handle\r
- if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {\r
- errorText_ = "RtApiWasapi::abortStream: Unable to close callback thread.";\r
- error( RtAudioError::THREAD_ERROR );\r
- return;\r
- }\r
-\r
- stream_.callbackInfo.thread = (ThreadHandle) NULL;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int* bufferSize,\r
- RtAudio::StreamOptions* options )\r
-{\r
- bool methodResult = FAILURE;\r
- unsigned int captureDeviceCount = 0;\r
- unsigned int renderDeviceCount = 0;\r
-\r
- IMMDeviceCollection* captureDevices = NULL;\r
- IMMDeviceCollection* renderDevices = NULL;\r
- IMMDevice* devicePtr = NULL;\r
- WAVEFORMATEX* deviceFormat = NULL;\r
- unsigned int bufferBytes;\r
- stream_.state = STREAM_STOPPED;\r
-\r
- // create API Handle if not already created\r
- if ( !stream_.apiHandle )\r
- stream_.apiHandle = ( void* ) new WasapiHandle();\r
-\r
- // Count capture devices\r
- errorText_.clear();\r
- RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;\r
- HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device collection.";\r
- goto Exit;\r
- }\r
-\r
- hr = captureDevices->GetCount( &captureDeviceCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device count.";\r
- goto Exit;\r
- }\r
-\r
- // Count render devices\r
- hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device collection.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderDevices->GetCount( &renderDeviceCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device count.";\r
- goto Exit;\r
- }\r
-\r
- // validate device index\r
- if ( device >= captureDeviceCount + renderDeviceCount ) {\r
- errorType = RtAudioError::INVALID_USE;\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Invalid device index.";\r
- goto Exit;\r
- }\r
-\r
- // determine whether index falls within capture or render devices\r
- if ( device >= renderDeviceCount ) {\r
- if ( mode != INPUT ) {\r
- errorType = RtAudioError::INVALID_USE;\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Capture device selected as output device.";\r
- goto Exit;\r
- }\r
-\r
- // retrieve captureAudioClient from devicePtr\r
- IAudioClient*& captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;\r
-\r
- hr = captureDevices->Item( device - renderDeviceCount, &devicePtr );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device handle.";\r
- goto Exit;\r
- }\r
-\r
- hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,\r
- NULL, ( void** ) &captureAudioClient );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client.";\r
- goto Exit;\r
- }\r
-\r
- hr = captureAudioClient->GetMixFormat( &deviceFormat );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format.";\r
- goto Exit;\r
- }\r
-\r
- stream_.nDeviceChannels[mode] = deviceFormat->nChannels;\r
- captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );\r
- }\r
- else {\r
- if ( mode != OUTPUT ) {\r
- errorType = RtAudioError::INVALID_USE;\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Render device selected as input device.";\r
- goto Exit;\r
- }\r
-\r
- // retrieve renderAudioClient from devicePtr\r
- IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;\r
-\r
- hr = renderDevices->Item( device, &devicePtr );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device handle.";\r
- goto Exit;\r
- }\r
-\r
- hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,\r
- NULL, ( void** ) &renderAudioClient );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderAudioClient->GetMixFormat( &deviceFormat );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format.";\r
- goto Exit;\r
- }\r
-\r
- stream_.nDeviceChannels[mode] = deviceFormat->nChannels;\r
- renderAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );\r
- }\r
-\r
- // fill stream data\r
- if ( ( stream_.mode == OUTPUT && mode == INPUT ) ||\r
- ( stream_.mode == INPUT && mode == OUTPUT ) ) {\r
- stream_.mode = DUPLEX;\r
- }\r
- else {\r
- stream_.mode = mode;\r
- }\r
-\r
- stream_.device[mode] = device;\r
- stream_.doByteSwap[mode] = false;\r
- stream_.sampleRate = sampleRate;\r
- stream_.bufferSize = *bufferSize;\r
- stream_.nBuffers = 1;\r
- stream_.nUserChannels[mode] = channels;\r
- stream_.channelOffset[mode] = firstChannel;\r
- stream_.userFormat = format;\r
- stream_.deviceFormat[mode] = getDeviceInfo( device ).nativeFormats;\r
-\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED )\r
- stream_.userInterleaved = false;\r
- else\r
- stream_.userInterleaved = true;\r
- stream_.deviceInterleaved[mode] = true;\r
-\r
- // Set flags for buffer conversion.\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] ||\r
- stream_.nUserChannels != stream_.nDeviceChannels )\r
- stream_.doConvertBuffer[mode] = true;\r
- else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
- stream_.nUserChannels[mode] > 1 )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- if ( stream_.doConvertBuffer[mode] )\r
- setConvertInfo( mode, 0 );\r
-\r
- // Allocate necessary internal buffers\r
- bufferBytes = stream_.nUserChannels[mode] * stream_.bufferSize * formatBytes( stream_.userFormat );\r
-\r
- stream_.userBuffer[mode] = ( char* ) calloc( bufferBytes, 1 );\r
- if ( !stream_.userBuffer[mode] ) {\r
- errorType = RtAudioError::MEMORY_ERROR;\r
- errorText_ = "RtApiWasapi::probeDeviceOpen: Error allocating user buffer memory.";\r
- goto Exit;\r
- }\r
-\r
- if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME )\r
- stream_.callbackInfo.priority = 15;\r
- else\r
- stream_.callbackInfo.priority = 0;\r
-\r
- ///! TODO: RTAUDIO_MINIMIZE_LATENCY // Provide stream buffers directly to callback\r
- ///! TODO: RTAUDIO_HOG_DEVICE // Exclusive mode\r
-\r
- methodResult = SUCCESS;\r
-\r
-Exit:\r
- //clean up\r
- SAFE_RELEASE( captureDevices );\r
- SAFE_RELEASE( renderDevices );\r
- SAFE_RELEASE( devicePtr );\r
- CoTaskMemFree( deviceFormat );\r
-\r
- // if method failed, close the stream\r
- if ( methodResult == FAILURE )\r
- closeStream();\r
-\r
- if ( !errorText_.empty() )\r
- error( errorType );\r
- return methodResult;\r
-}\r
-\r
-//=============================================================================\r
-\r
-DWORD WINAPI RtApiWasapi::runWasapiThread( void* wasapiPtr )\r
-{\r
- if ( wasapiPtr )\r
- ( ( RtApiWasapi* ) wasapiPtr )->wasapiThread();\r
-\r
- return 0;\r
-}\r
-\r
-DWORD WINAPI RtApiWasapi::stopWasapiThread( void* wasapiPtr )\r
-{\r
- if ( wasapiPtr )\r
- ( ( RtApiWasapi* ) wasapiPtr )->stopStream();\r
-\r
- return 0;\r
-}\r
-\r
-DWORD WINAPI RtApiWasapi::abortWasapiThread( void* wasapiPtr )\r
-{\r
- if ( wasapiPtr )\r
- ( ( RtApiWasapi* ) wasapiPtr )->abortStream();\r
-\r
- return 0;\r
-}\r
-\r
-//-----------------------------------------------------------------------------\r
-\r
-void RtApiWasapi::wasapiThread()\r
-{\r
- // as this is a new thread, we must CoInitialize it\r
- CoInitialize( NULL );\r
-\r
- HRESULT hr;\r
-\r
- IAudioClient* captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;\r
- IAudioClient* renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;\r
- IAudioCaptureClient* captureClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureClient;\r
- IAudioRenderClient* renderClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderClient;\r
- HANDLE captureEvent = ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent;\r
- HANDLE renderEvent = ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent;\r
-\r
- WAVEFORMATEX* captureFormat = NULL;\r
- WAVEFORMATEX* renderFormat = NULL;\r
- float captureSrRatio = 0.0f;\r
- float renderSrRatio = 0.0f;\r
- WasapiBuffer captureBuffer;\r
- WasapiBuffer renderBuffer;\r
-\r
- // declare local stream variables\r
- RtAudioCallback callback = ( RtAudioCallback ) stream_.callbackInfo.callback;\r
- BYTE* streamBuffer = NULL;\r
- unsigned long captureFlags = 0;\r
- unsigned int bufferFrameCount = 0;\r
- unsigned int numFramesPadding = 0;\r
- unsigned int convBufferSize = 0;\r
- bool callbackPushed = false;\r
- bool callbackPulled = false;\r
- bool callbackStopped = false;\r
- int callbackResult = 0;\r
-\r
- // convBuffer is used to store converted buffers between WASAPI and the user\r
- char* convBuffer = NULL;\r
- unsigned int convBuffSize = 0;\r
- unsigned int deviceBuffSize = 0;\r
-\r
- errorText_.clear();\r
- RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;\r
-\r
- // Attempt to assign "Pro Audio" characteristic to thread\r
- HMODULE AvrtDll = LoadLibrary( (LPCTSTR) "AVRT.dll" );\r
- if ( AvrtDll ) {\r
- DWORD taskIndex = 0;\r
- TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr = ( TAvSetMmThreadCharacteristicsPtr ) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );\r
- AvSetMmThreadCharacteristicsPtr( L"Pro Audio", &taskIndex );\r
- FreeLibrary( AvrtDll );\r
- }\r
-\r
- // start capture stream if applicable\r
- if ( captureAudioClient ) {\r
- hr = captureAudioClient->GetMixFormat( &captureFormat );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";\r
- goto Exit;\r
- }\r
-\r
- captureSrRatio = ( ( float ) captureFormat->nSamplesPerSec / stream_.sampleRate );\r
-\r
- // initialize capture stream according to desire buffer size\r
- float desiredBufferSize = stream_.bufferSize * captureSrRatio;\r
- REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) desiredBufferSize * 10000000 / captureFormat->nSamplesPerSec );\r
-\r
- if ( !captureClient ) {\r
- hr = captureAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,\r
- AUDCLNT_STREAMFLAGS_EVENTCALLBACK,\r
- desiredBufferPeriod,\r
- desiredBufferPeriod,\r
- captureFormat,\r
- NULL );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client.";\r
- goto Exit;\r
- }\r
-\r
- hr = captureAudioClient->GetService( __uuidof( IAudioCaptureClient ),\r
- ( void** ) &captureClient );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle.";\r
- goto Exit;\r
- }\r
-\r
- // configure captureEvent to trigger on every available capture buffer\r
- captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL );\r
- if ( !captureEvent ) {\r
- errorType = RtAudioError::SYSTEM_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to create capture event.";\r
- goto Exit;\r
- }\r
-\r
- hr = captureAudioClient->SetEventHandle( captureEvent );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to set capture event handle.";\r
- goto Exit;\r
- }\r
-\r
- ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient;\r
- ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent;\r
- }\r
-\r
- unsigned int inBufferSize = 0;\r
- hr = captureAudioClient->GetBufferSize( &inBufferSize );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to get capture buffer size.";\r
- goto Exit;\r
- }\r
-\r
- // scale outBufferSize according to stream->user sample rate ratio\r
- unsigned int outBufferSize = ( unsigned int ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT];\r
- inBufferSize *= stream_.nDeviceChannels[INPUT];\r
-\r
- // set captureBuffer size\r
- captureBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[INPUT] ) );\r
-\r
- // reset the capture stream\r
- hr = captureAudioClient->Reset();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";\r
- goto Exit;\r
- }\r
-\r
- // start the capture stream\r
- hr = captureAudioClient->Start();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to start capture stream.";\r
- goto Exit;\r
- }\r
- }\r
-\r
- // start render stream if applicable\r
- if ( renderAudioClient ) {\r
- hr = renderAudioClient->GetMixFormat( &renderFormat );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";\r
- goto Exit;\r
- }\r
-\r
- renderSrRatio = ( ( float ) renderFormat->nSamplesPerSec / stream_.sampleRate );\r
-\r
- // initialize render stream according to desire buffer size\r
- float desiredBufferSize = stream_.bufferSize * renderSrRatio;\r
- REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) desiredBufferSize * 10000000 / renderFormat->nSamplesPerSec );\r
-\r
- if ( !renderClient ) {\r
- hr = renderAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,\r
- AUDCLNT_STREAMFLAGS_EVENTCALLBACK,\r
- desiredBufferPeriod,\r
- desiredBufferPeriod,\r
- renderFormat,\r
- NULL );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize render audio client.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderAudioClient->GetService( __uuidof( IAudioRenderClient ),\r
- ( void** ) &renderClient );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle.";\r
- goto Exit;\r
- }\r
-\r
- // configure renderEvent to trigger on every available render buffer\r
- renderEvent = CreateEvent( NULL, FALSE, FALSE, NULL );\r
- if ( !renderEvent ) {\r
- errorType = RtAudioError::SYSTEM_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to create render event.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderAudioClient->SetEventHandle( renderEvent );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to set render event handle.";\r
- goto Exit;\r
- }\r
-\r
- ( ( WasapiHandle* ) stream_.apiHandle )->renderClient = renderClient;\r
- ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent = renderEvent;\r
- }\r
-\r
- unsigned int outBufferSize = 0;\r
- hr = renderAudioClient->GetBufferSize( &outBufferSize );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to get render buffer size.";\r
- goto Exit;\r
- }\r
-\r
- // scale inBufferSize according to user->stream sample rate ratio\r
- unsigned int inBufferSize = ( unsigned int ) ( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT];\r
- outBufferSize *= stream_.nDeviceChannels[OUTPUT];\r
-\r
- // set renderBuffer size\r
- renderBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[OUTPUT] ) );\r
-\r
- // reset the render stream\r
- hr = renderAudioClient->Reset();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to reset render stream.";\r
- goto Exit;\r
- }\r
-\r
- // start the render stream\r
- hr = renderAudioClient->Start();\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to start render stream.";\r
- goto Exit;\r
- }\r
- }\r
-\r
- if ( stream_.mode == INPUT ) {\r
- convBuffSize = ( size_t ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );\r
- deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );\r
- }\r
- else if ( stream_.mode == OUTPUT ) {\r
- convBuffSize = ( size_t ) ( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] );\r
- deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] );\r
- }\r
- else if ( stream_.mode == DUPLEX ) {\r
- convBuffSize = std::max( ( size_t ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ),\r
- ( size_t ) ( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) );\r
- deviceBuffSize = std::max( stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ),\r
- stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) );\r
- }\r
-\r
- convBuffer = ( char* ) malloc( convBuffSize );\r
- stream_.deviceBuffer = ( char* ) malloc( deviceBuffSize );\r
- if ( !convBuffer || !stream_.deviceBuffer ) {\r
- errorType = RtAudioError::MEMORY_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Error allocating device buffer memory.";\r
- goto Exit;\r
- }\r
-\r
- // stream process loop\r
- while ( stream_.state != STREAM_STOPPING ) {\r
- if ( !callbackPulled ) {\r
- // Callback Input\r
- // ==============\r
- // 1. Pull callback buffer from inputBuffer\r
- // 2. If 1. was successful: Convert callback buffer to user sample rate and channel count\r
- // Convert callback buffer to user format\r
-\r
- if ( captureAudioClient ) {\r
- // Pull callback buffer from inputBuffer\r
- callbackPulled = captureBuffer.pullBuffer( convBuffer,\r
- ( unsigned int ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT],\r
- stream_.deviceFormat[INPUT] );\r
-\r
- if ( callbackPulled ) {\r
- // Convert callback buffer to user sample rate\r
- convertBufferWasapi( stream_.deviceBuffer,\r
- convBuffer,\r
- stream_.nDeviceChannels[INPUT],\r
- captureFormat->nSamplesPerSec,\r
- stream_.sampleRate,\r
- ( unsigned int ) ( stream_.bufferSize * captureSrRatio ),\r
- convBufferSize,\r
- stream_.deviceFormat[INPUT] );\r
-\r
- if ( stream_.doConvertBuffer[INPUT] ) {\r
- // Convert callback buffer to user format\r
- convertBuffer( stream_.userBuffer[INPUT],\r
- stream_.deviceBuffer,\r
- stream_.convertInfo[INPUT] );\r
- }\r
- else {\r
- // no further conversion, simple copy deviceBuffer to userBuffer\r
- memcpy( stream_.userBuffer[INPUT],\r
- stream_.deviceBuffer,\r
- stream_.bufferSize * stream_.nUserChannels[INPUT] * formatBytes( stream_.userFormat ) );\r
- }\r
- }\r
- }\r
- else {\r
- // if there is no capture stream, set callbackPulled flag\r
- callbackPulled = true;\r
- }\r
-\r
- // Execute Callback\r
- // ================\r
- // 1. Execute user callback method\r
- // 2. Handle return value from callback\r
-\r
- // if callback has not requested the stream to stop\r
- if ( callbackPulled && !callbackStopped ) {\r
- // Execute user callback method\r
- callbackResult = callback( stream_.userBuffer[OUTPUT],\r
- stream_.userBuffer[INPUT],\r
- stream_.bufferSize,\r
- getStreamTime(),\r
- captureFlags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY ? RTAUDIO_INPUT_OVERFLOW : 0,\r
- stream_.callbackInfo.userData );\r
-\r
- // Handle return value from callback\r
- if ( callbackResult == 1 ) {\r
- // instantiate a thread to stop this thread\r
- HANDLE threadHandle = CreateThread( NULL, 0, stopWasapiThread, this, 0, NULL );\r
- if ( !threadHandle ) {\r
- errorType = RtAudioError::THREAD_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread.";\r
- goto Exit;\r
- }\r
- else if ( !CloseHandle( threadHandle ) ) {\r
- errorType = RtAudioError::THREAD_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle.";\r
- goto Exit;\r
- }\r
-\r
- callbackStopped = true;\r
- }\r
- else if ( callbackResult == 2 ) {\r
- // instantiate a thread to stop this thread\r
- HANDLE threadHandle = CreateThread( NULL, 0, abortWasapiThread, this, 0, NULL );\r
- if ( !threadHandle ) {\r
- errorType = RtAudioError::THREAD_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread.";\r
- goto Exit;\r
- }\r
- else if ( !CloseHandle( threadHandle ) ) {\r
- errorType = RtAudioError::THREAD_ERROR;\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle.";\r
- goto Exit;\r
- }\r
-\r
- callbackStopped = true;\r
- }\r
- }\r
- }\r
-\r
- // Callback Output\r
- // ===============\r
- // 1. Convert callback buffer to stream format\r
- // 2. Convert callback buffer to stream sample rate and channel count\r
- // 3. Push callback buffer into outputBuffer\r
-\r
- if ( renderAudioClient && callbackPulled ) {\r
- if ( stream_.doConvertBuffer[OUTPUT] ) {\r
- // Convert callback buffer to stream format\r
- convertBuffer( stream_.deviceBuffer,\r
- stream_.userBuffer[OUTPUT],\r
- stream_.convertInfo[OUTPUT] );\r
-\r
- }\r
-\r
- // Convert callback buffer to stream sample rate\r
- convertBufferWasapi( convBuffer,\r
- stream_.deviceBuffer,\r
- stream_.nDeviceChannels[OUTPUT],\r
- stream_.sampleRate,\r
- renderFormat->nSamplesPerSec,\r
- stream_.bufferSize,\r
- convBufferSize,\r
- stream_.deviceFormat[OUTPUT] );\r
-\r
- // Push callback buffer into outputBuffer\r
- callbackPushed = renderBuffer.pushBuffer( convBuffer,\r
- convBufferSize * stream_.nDeviceChannels[OUTPUT],\r
- stream_.deviceFormat[OUTPUT] );\r
- }\r
- else {\r
- // if there is no render stream, set callbackPushed flag\r
- callbackPushed = true;\r
- }\r
-\r
- // Stream Capture\r
- // ==============\r
- // 1. Get capture buffer from stream\r
- // 2. Push capture buffer into inputBuffer\r
- // 3. If 2. was successful: Release capture buffer\r
-\r
- if ( captureAudioClient ) {\r
- // if the callback input buffer was not pulled from captureBuffer, wait for next capture event\r
- if ( !callbackPulled ) {\r
- WaitForSingleObject( captureEvent, INFINITE );\r
- }\r
-\r
- // Get capture buffer from stream\r
- hr = captureClient->GetBuffer( &streamBuffer,\r
- &bufferFrameCount,\r
- &captureFlags, NULL, NULL );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture buffer.";\r
- goto Exit;\r
- }\r
-\r
- if ( bufferFrameCount != 0 ) {\r
- // Push capture buffer into inputBuffer\r
- if ( captureBuffer.pushBuffer( ( char* ) streamBuffer,\r
- bufferFrameCount * stream_.nDeviceChannels[INPUT],\r
- stream_.deviceFormat[INPUT] ) )\r
- {\r
- // Release capture buffer\r
- hr = captureClient->ReleaseBuffer( bufferFrameCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";\r
- goto Exit;\r
- }\r
- }\r
- else\r
- {\r
- // Inform WASAPI that capture was unsuccessful\r
- hr = captureClient->ReleaseBuffer( 0 );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";\r
- goto Exit;\r
- }\r
- }\r
- }\r
- else\r
- {\r
- // Inform WASAPI that capture was unsuccessful\r
- hr = captureClient->ReleaseBuffer( 0 );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";\r
- goto Exit;\r
- }\r
- }\r
- }\r
-\r
- // Stream Render\r
- // =============\r
- // 1. Get render buffer from stream\r
- // 2. Pull next buffer from outputBuffer\r
- // 3. If 2. was successful: Fill render buffer with next buffer\r
- // Release render buffer\r
-\r
- if ( renderAudioClient ) {\r
- // if the callback output buffer was not pushed to renderBuffer, wait for next render event\r
- if ( callbackPulled && !callbackPushed ) {\r
- WaitForSingleObject( renderEvent, INFINITE );\r
- }\r
-\r
- // Get render buffer from stream\r
- hr = renderAudioClient->GetBufferSize( &bufferFrameCount );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer size.";\r
- goto Exit;\r
- }\r
-\r
- hr = renderAudioClient->GetCurrentPadding( &numFramesPadding );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer padding.";\r
- goto Exit;\r
- }\r
-\r
- bufferFrameCount -= numFramesPadding;\r
-\r
- if ( bufferFrameCount != 0 ) {\r
- hr = renderClient->GetBuffer( bufferFrameCount, &streamBuffer );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer.";\r
- goto Exit;\r
- }\r
-\r
- // Pull next buffer from outputBuffer\r
- // Fill render buffer with next buffer\r
- if ( renderBuffer.pullBuffer( ( char* ) streamBuffer,\r
- bufferFrameCount * stream_.nDeviceChannels[OUTPUT],\r
- stream_.deviceFormat[OUTPUT] ) )\r
- {\r
- // Release render buffer\r
- hr = renderClient->ReleaseBuffer( bufferFrameCount, 0 );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer.";\r
- goto Exit;\r
- }\r
- }\r
- else\r
- {\r
- // Inform WASAPI that render was unsuccessful\r
- hr = renderClient->ReleaseBuffer( 0, 0 );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer.";\r
- goto Exit;\r
- }\r
- }\r
- }\r
- else\r
- {\r
- // Inform WASAPI that render was unsuccessful\r
- hr = renderClient->ReleaseBuffer( 0, 0 );\r
- if ( FAILED( hr ) ) {\r
- errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer.";\r
- goto Exit;\r
- }\r
- }\r
- }\r
-\r
- // if the callback buffer was pushed renderBuffer reset callbackPulled flag\r
- if ( callbackPushed ) {\r
- callbackPulled = false;\r
- // tick stream time\r
- RtApi::tickStreamTime();\r
- }\r
-\r
- }\r
-\r
-Exit:\r
- // clean up\r
- CoTaskMemFree( captureFormat );\r
- CoTaskMemFree( renderFormat );\r
-\r
- free ( convBuffer );\r
-\r
- CoUninitialize();\r
-\r
- // update stream state\r
- stream_.state = STREAM_STOPPED;\r
-\r
- if ( errorText_.empty() )\r
- return;\r
- else\r
- error( errorType );\r
-}\r
-\r
-//******************** End of __WINDOWS_WASAPI__ *********************//\r
-#endif\r
-\r
-\r
-#if defined(__WINDOWS_DS__) // Windows DirectSound API\r
-\r
-// Modified by Robin Davies, October 2005\r
-// - Improvements to DirectX pointer chasing.\r
-// - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.\r
-// - Auto-call CoInitialize for DSOUND and ASIO platforms.\r
-// Various revisions for RtAudio 4.0 by Gary Scavone, April 2007\r
-// Changed device query structure for RtAudio 4.0.7, January 2010\r
-\r
-#include <dsound.h>\r
-#include <assert.h>\r
-#include <algorithm>\r
-\r
-#if defined(__MINGW32__)\r
- // missing from latest mingw winapi\r
-#define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */\r
-#define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */\r
-#define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */\r
-#define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */\r
-#endif\r
-\r
-#define MINIMUM_DEVICE_BUFFER_SIZE 32768\r
-\r
-#ifdef _MSC_VER // if Microsoft Visual C++\r
-#pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.\r
-#endif\r
-\r
-static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )\r
-{\r
- if ( pointer > bufferSize ) pointer -= bufferSize;\r
- if ( laterPointer < earlierPointer ) laterPointer += bufferSize;\r
- if ( pointer < earlierPointer ) pointer += bufferSize;\r
- return pointer >= earlierPointer && pointer < laterPointer;\r
-}\r
-\r
-// A structure to hold various information related to the DirectSound\r
-// API implementation.\r
-struct DsHandle {\r
- unsigned int drainCounter; // Tracks callback counts when draining\r
- bool internalDrain; // Indicates if stop is initiated from callback or not.\r
- void *id[2];\r
- void *buffer[2];\r
- bool xrun[2];\r
- UINT bufferPointer[2];\r
- DWORD dsBufferSize[2];\r
- DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.\r
- HANDLE condition;\r
-\r
- DsHandle()\r
- :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }\r
-};\r
-\r
-// Declarations for utility functions, callbacks, and structures\r
-// specific to the DirectSound implementation.\r
-static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,\r
- LPCTSTR description,\r
- LPCTSTR module,\r
- LPVOID lpContext );\r
-\r
-static const char* getErrorString( int code );\r
-\r
-static unsigned __stdcall callbackHandler( void *ptr );\r
-\r
-struct DsDevice {\r
- LPGUID id[2];\r
- bool validId[2];\r
- bool found;\r
- std::string name;\r
-\r
- DsDevice()\r
- : found(false) { validId[0] = false; validId[1] = false; }\r
-};\r
-\r
-struct DsProbeData {\r
- bool isInput;\r
- std::vector<struct DsDevice>* dsDevices;\r
-};\r
-\r
-RtApiDs :: RtApiDs()\r
-{\r
- // Dsound will run both-threaded. If CoInitialize fails, then just\r
- // accept whatever the mainline chose for a threading model.\r
- coInitialized_ = false;\r
- HRESULT hr = CoInitialize( NULL );\r
- if ( !FAILED( hr ) ) coInitialized_ = true;\r
-}\r
-\r
-RtApiDs :: ~RtApiDs()\r
-{\r
- if ( coInitialized_ ) CoUninitialize(); // balanced call.\r
- if ( stream_.state != STREAM_CLOSED ) closeStream();\r
-}\r
-\r
-// The DirectSound default output is always the first device.\r
-unsigned int RtApiDs :: getDefaultOutputDevice( void )\r
-{\r
- return 0;\r
-}\r
-\r
-// The DirectSound default input is always the first input device,\r
-// which is the first capture device enumerated.\r
-unsigned int RtApiDs :: getDefaultInputDevice( void )\r
-{\r
- return 0;\r
-}\r
-\r
-unsigned int RtApiDs :: getDeviceCount( void )\r
-{\r
- // Set query flag for previously found devices to false, so that we\r
- // can check for any devices that have disappeared.\r
- for ( unsigned int i=0; i<dsDevices.size(); i++ )\r
- dsDevices[i].found = false;\r
-\r
- // Query DirectSound devices.\r
- struct DsProbeData probeInfo;\r
- probeInfo.isInput = false;\r
- probeInfo.dsDevices = &dsDevices;\r
- HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &probeInfo );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- }\r
-\r
- // Query DirectSoundCapture devices.\r
- probeInfo.isInput = true;\r
- result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &probeInfo );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- }\r
-\r
- // Clean out any devices that may have disappeared (code update submitted by Eli Zehngut).\r
- for ( unsigned int i=0; i<dsDevices.size(); ) {\r
- if ( dsDevices[i].found == false ) dsDevices.erase( dsDevices.begin() + i );\r
- else i++;\r
- }\r
-\r
- return static_cast<unsigned int>(dsDevices.size());\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = false;\r
-\r
- if ( dsDevices.size() == 0 ) {\r
- // Force a query of all devices\r
- getDeviceCount();\r
- if ( dsDevices.size() == 0 ) {\r
- errorText_ = "RtApiDs::getDeviceInfo: no devices found!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
- }\r
-\r
- if ( device >= dsDevices.size() ) {\r
- errorText_ = "RtApiDs::getDeviceInfo: device ID is invalid!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- HRESULT result;\r
- if ( dsDevices[ device ].validId[0] == false ) goto probeInput;\r
-\r
- LPDIRECTSOUND output;\r
- DSCAPS outCaps;\r
- result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto probeInput;\r
- }\r
-\r
- outCaps.dwSize = sizeof( outCaps );\r
- result = output->GetCaps( &outCaps );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto probeInput;\r
- }\r
-\r
- // Get output channel information.\r
- info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;\r
-\r
- // Get sample rate information.\r
- info.sampleRates.clear();\r
- for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {\r
- if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&\r
- SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[k] );\r
-\r
- if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )\r
- info.preferredSampleRate = SAMPLE_RATES[k];\r
- }\r
- }\r
-\r
- // Get format information.\r
- if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;\r
-\r
- output->Release();\r
-\r
- if ( getDefaultOutputDevice() == device )\r
- info.isDefaultOutput = true;\r
-\r
- if ( dsDevices[ device ].validId[1] == false ) {\r
- info.name = dsDevices[ device ].name;\r
- info.probed = true;\r
- return info;\r
- }\r
-\r
- probeInput:\r
-\r
- LPDIRECTSOUNDCAPTURE input;\r
- result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- DSCCAPS inCaps;\r
- inCaps.dwSize = sizeof( inCaps );\r
- result = input->GetCaps( &inCaps );\r
- if ( FAILED( result ) ) {\r
- input->Release();\r
- errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Get input channel information.\r
- info.inputChannels = inCaps.dwChannels;\r
-\r
- // Get sample rate and format information.\r
- std::vector<unsigned int> rates;\r
- if ( inCaps.dwChannels >= 2 ) {\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
-\r
- if ( info.nativeFormats & RTAUDIO_SINT16 ) {\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) rates.push_back( 11025 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) rates.push_back( 22050 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) rates.push_back( 44100 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) rates.push_back( 96000 );\r
- }\r
- else if ( info.nativeFormats & RTAUDIO_SINT8 ) {\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) rates.push_back( 11025 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) rates.push_back( 22050 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) rates.push_back( 44100 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) rates.push_back( 96000 );\r
- }\r
- }\r
- else if ( inCaps.dwChannels == 1 ) {\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;\r
-\r
- if ( info.nativeFormats & RTAUDIO_SINT16 ) {\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) rates.push_back( 11025 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) rates.push_back( 22050 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) rates.push_back( 44100 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) rates.push_back( 96000 );\r
- }\r
- else if ( info.nativeFormats & RTAUDIO_SINT8 ) {\r
- if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) rates.push_back( 11025 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) rates.push_back( 22050 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) rates.push_back( 44100 );\r
- if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) rates.push_back( 96000 );\r
- }\r
- }\r
- else info.inputChannels = 0; // technically, this would be an error\r
-\r
- input->Release();\r
-\r
- if ( info.inputChannels == 0 ) return info;\r
-\r
- // Copy the supported rates to the info structure but avoid duplication.\r
- bool found;\r
- for ( unsigned int i=0; i<rates.size(); i++ ) {\r
- found = false;\r
- for ( unsigned int j=0; j<info.sampleRates.size(); j++ ) {\r
- if ( rates[i] == info.sampleRates[j] ) {\r
- found = true;\r
- break;\r
- }\r
- }\r
- if ( found == false ) info.sampleRates.push_back( rates[i] );\r
- }\r
- std::sort( info.sampleRates.begin(), info.sampleRates.end() );\r
-\r
- // If device opens for both playback and capture, we determine the channels.\r
- if ( info.outputChannels > 0 && info.inputChannels > 0 )\r
- info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;\r
-\r
- if ( device == 0 ) info.isDefaultInput = true;\r
-\r
- // Copy name and return.\r
- info.name = dsDevices[ device ].name;\r
- info.probed = true;\r
- return info;\r
-}\r
-\r
-bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int *bufferSize,\r
- RtAudio::StreamOptions *options )\r
-{\r
- if ( channels + firstChannel > 2 ) {\r
- errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";\r
- return FAILURE;\r
- }\r
-\r
- size_t nDevices = dsDevices.size();\r
- if ( nDevices == 0 ) {\r
- // This should not happen because a check is made before this function is called.\r
- errorText_ = "RtApiDs::probeDeviceOpen: no devices found!";\r
- return FAILURE;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- // This should not happen because a check is made before this function is called.\r
- errorText_ = "RtApiDs::probeDeviceOpen: device ID is invalid!";\r
- return FAILURE;\r
- }\r
-\r
- if ( mode == OUTPUT ) {\r
- if ( dsDevices[ device ].validId[0] == false ) {\r
- errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
- else { // mode == INPUT\r
- if ( dsDevices[ device ].validId[1] == false ) {\r
- errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
-\r
- // According to a note in PortAudio, using GetDesktopWindow()\r
- // instead of GetForegroundWindow() is supposed to avoid problems\r
- // that occur when the application's window is not the foreground\r
- // window. Also, if the application window closes before the\r
- // DirectSound buffer, DirectSound can crash. In the past, I had\r
- // problems when using GetDesktopWindow() but it seems fine now\r
- // (January 2010). I'll leave it commented here.\r
- // HWND hWnd = GetForegroundWindow();\r
- HWND hWnd = GetDesktopWindow();\r
-\r
- // Check the numberOfBuffers parameter and limit the lowest value to\r
- // two. This is a judgement call and a value of two is probably too\r
- // low for capture, but it should work for playback.\r
- int nBuffers = 0;\r
- if ( options ) nBuffers = options->numberOfBuffers;\r
- if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;\r
- if ( nBuffers < 2 ) nBuffers = 3;\r
-\r
- // Check the lower range of the user-specified buffer size and set\r
- // (arbitrarily) to a lower bound of 32.\r
- if ( *bufferSize < 32 ) *bufferSize = 32;\r
-\r
- // Create the wave format structure. The data format setting will\r
- // be determined later.\r
- WAVEFORMATEX waveFormat;\r
- ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );\r
- waveFormat.wFormatTag = WAVE_FORMAT_PCM;\r
- waveFormat.nChannels = channels + firstChannel;\r
- waveFormat.nSamplesPerSec = (unsigned long) sampleRate;\r
-\r
- // Determine the device buffer size. By default, we'll use the value\r
- // defined above (32K), but we will grow it to make allowances for\r
- // very large software buffer sizes.\r
- DWORD dsBufferSize = MINIMUM_DEVICE_BUFFER_SIZE;\r
- DWORD dsPointerLeadTime = 0;\r
-\r
- void *ohandle = 0, *bhandle = 0;\r
- HRESULT result;\r
- if ( mode == OUTPUT ) {\r
-\r
- LPDIRECTSOUND output;\r
- result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- DSCAPS outCaps;\r
- outCaps.dwSize = sizeof( outCaps );\r
- result = output->GetCaps( &outCaps );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Check channel information.\r
- if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {\r
- errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsDevices[ device ].name << ") does not support stereo playback.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Check format information. Use 16-bit format unless not\r
- // supported or user requests 8-bit.\r
- if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&\r
- !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {\r
- waveFormat.wBitsPerSample = 16;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- }\r
- else {\r
- waveFormat.wBitsPerSample = 8;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT8;\r
- }\r
- stream_.userFormat = format;\r
-\r
- // Update wave format structure and buffer information.\r
- waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;\r
- waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;\r
- dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;\r
-\r
- // If the user wants an even bigger buffer, increase the device buffer size accordingly.\r
- while ( dsPointerLeadTime * 2U > dsBufferSize )\r
- dsBufferSize *= 2;\r
-\r
- // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.\r
- // result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );\r
- // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.\r
- result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Even though we will write to the secondary buffer, we need to\r
- // access the primary buffer to set the correct output format\r
- // (since the default is 8-bit, 22 kHz!). Setup the DS primary\r
- // buffer description.\r
- DSBUFFERDESC bufferDescription;\r
- ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );\r
- bufferDescription.dwSize = sizeof( DSBUFFERDESC );\r
- bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;\r
-\r
- // Obtain the primary buffer\r
- LPDIRECTSOUNDBUFFER buffer;\r
- result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Set the primary DS buffer sound format.\r
- result = buffer->SetFormat( &waveFormat );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Setup the secondary DS buffer description.\r
- ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );\r
- bufferDescription.dwSize = sizeof( DSBUFFERDESC );\r
- bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |\r
- DSBCAPS_GLOBALFOCUS |\r
- DSBCAPS_GETCURRENTPOSITION2 |\r
- DSBCAPS_LOCHARDWARE ); // Force hardware mixing\r
- bufferDescription.dwBufferBytes = dsBufferSize;\r
- bufferDescription.lpwfxFormat = &waveFormat;\r
-\r
- // Try to create the secondary DS buffer. If that doesn't work,\r
- // try to use software mixing. Otherwise, there's a problem.\r
- result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );\r
- if ( FAILED( result ) ) {\r
- bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |\r
- DSBCAPS_GLOBALFOCUS |\r
- DSBCAPS_GETCURRENTPOSITION2 |\r
- DSBCAPS_LOCSOFTWARE ); // Force software mixing\r
- result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
-\r
- // Get the buffer size ... might be different from what we specified.\r
- DSBCAPS dsbcaps;\r
- dsbcaps.dwSize = sizeof( DSBCAPS );\r
- result = buffer->GetCaps( &dsbcaps );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- buffer->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- dsBufferSize = dsbcaps.dwBufferBytes;\r
-\r
- // Lock the DS buffer\r
- LPVOID audioPtr;\r
- DWORD dataLen;\r
- result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- buffer->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Zero the DS buffer\r
- ZeroMemory( audioPtr, dataLen );\r
-\r
- // Unlock the DS buffer\r
- result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- output->Release();\r
- buffer->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- ohandle = (void *) output;\r
- bhandle = (void *) buffer;\r
- }\r
-\r
- if ( mode == INPUT ) {\r
-\r
- LPDIRECTSOUNDCAPTURE input;\r
- result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- DSCCAPS inCaps;\r
- inCaps.dwSize = sizeof( inCaps );\r
- result = input->GetCaps( &inCaps );\r
- if ( FAILED( result ) ) {\r
- input->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Check channel information.\r
- if ( inCaps.dwChannels < channels + firstChannel ) {\r
- errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";\r
- return FAILURE;\r
- }\r
-\r
- // Check format information. Use 16-bit format unless user\r
- // requests 8-bit.\r
- DWORD deviceFormats;\r
- if ( channels + firstChannel == 2 ) {\r
- deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;\r
- if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {\r
- waveFormat.wBitsPerSample = 8;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT8;\r
- }\r
- else { // assume 16-bit is supported\r
- waveFormat.wBitsPerSample = 16;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- }\r
- }\r
- else { // channel == 1\r
- deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;\r
- if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {\r
- waveFormat.wBitsPerSample = 8;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT8;\r
- }\r
- else { // assume 16-bit is supported\r
- waveFormat.wBitsPerSample = 16;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- }\r
- }\r
- stream_.userFormat = format;\r
-\r
- // Update wave format structure and buffer information.\r
- waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;\r
- waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;\r
- dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;\r
-\r
- // If the user wants an even bigger buffer, increase the device buffer size accordingly.\r
- while ( dsPointerLeadTime * 2U > dsBufferSize )\r
- dsBufferSize *= 2;\r
-\r
- // Setup the secondary DS buffer description.\r
- DSCBUFFERDESC bufferDescription;\r
- ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );\r
- bufferDescription.dwSize = sizeof( DSCBUFFERDESC );\r
- bufferDescription.dwFlags = 0;\r
- bufferDescription.dwReserved = 0;\r
- bufferDescription.dwBufferBytes = dsBufferSize;\r
- bufferDescription.lpwfxFormat = &waveFormat;\r
-\r
- // Create the capture buffer.\r
- LPDIRECTSOUNDCAPTUREBUFFER buffer;\r
- result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );\r
- if ( FAILED( result ) ) {\r
- input->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Get the buffer size ... might be different from what we specified.\r
- DSCBCAPS dscbcaps;\r
- dscbcaps.dwSize = sizeof( DSCBCAPS );\r
- result = buffer->GetCaps( &dscbcaps );\r
- if ( FAILED( result ) ) {\r
- input->Release();\r
- buffer->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- dsBufferSize = dscbcaps.dwBufferBytes;\r
-\r
- // NOTE: We could have a problem here if this is a duplex stream\r
- // and the play and capture hardware buffer sizes are different\r
- // (I'm actually not sure if that is a problem or not).\r
- // Currently, we are not verifying that.\r
-\r
- // Lock the capture buffer\r
- LPVOID audioPtr;\r
- DWORD dataLen;\r
- result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- input->Release();\r
- buffer->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Zero the buffer\r
- ZeroMemory( audioPtr, dataLen );\r
-\r
- // Unlock the buffer\r
- result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- input->Release();\r
- buffer->Release();\r
- errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsDevices[ device ].name << ")!";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- ohandle = (void *) input;\r
- bhandle = (void *) buffer;\r
- }\r
-\r
- // Set various stream parameters\r
- DsHandle *handle = 0;\r
- stream_.nDeviceChannels[mode] = channels + firstChannel;\r
- stream_.nUserChannels[mode] = channels;\r
- stream_.bufferSize = *bufferSize;\r
- stream_.channelOffset[mode] = firstChannel;\r
- stream_.deviceInterleaved[mode] = true;\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;\r
- else stream_.userInterleaved = true;\r
-\r
- // Set flag for buffer conversion\r
- stream_.doConvertBuffer[mode] = false;\r
- if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])\r
- stream_.doConvertBuffer[mode] = true;\r
- if (stream_.userFormat != stream_.deviceFormat[mode])\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
- stream_.nUserChannels[mode] > 1 )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate necessary internal buffers\r
- long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
-\r
- if ( stream_.doConvertBuffer[mode] ) {\r
-\r
- bool makeBuffer = true;\r
- bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );\r
- if ( mode == INPUT ) {\r
- if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;\r
- }\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- // Allocate our DsHandle structures for the stream.\r
- if ( stream_.apiHandle == 0 ) {\r
- try {\r
- handle = new DsHandle;\r
- }\r
- catch ( std::bad_alloc& ) {\r
- errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";\r
- goto error;\r
- }\r
-\r
- // Create a manual-reset event.\r
- handle->condition = CreateEvent( NULL, // no security\r
- TRUE, // manual-reset\r
- FALSE, // non-signaled initially\r
- NULL ); // unnamed\r
- stream_.apiHandle = (void *) handle;\r
- }\r
- else\r
- handle = (DsHandle *) stream_.apiHandle;\r
- handle->id[mode] = ohandle;\r
- handle->buffer[mode] = bhandle;\r
- handle->dsBufferSize[mode] = dsBufferSize;\r
- handle->dsPointerLeadTime[mode] = dsPointerLeadTime;\r
-\r
- stream_.device[mode] = device;\r
- stream_.state = STREAM_STOPPED;\r
- if ( stream_.mode == OUTPUT && mode == INPUT )\r
- // We had already set up an output stream.\r
- stream_.mode = DUPLEX;\r
- else\r
- stream_.mode = mode;\r
- stream_.nBuffers = nBuffers;\r
- stream_.sampleRate = sampleRate;\r
-\r
- // Setup the buffer conversion information structure.\r
- if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );\r
-\r
- // Setup the callback thread.\r
- if ( stream_.callbackInfo.isRunning == false ) {\r
- unsigned threadId;\r
- stream_.callbackInfo.isRunning = true;\r
- stream_.callbackInfo.object = (void *) this;\r
- stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,\r
- &stream_.callbackInfo, 0, &threadId );\r
- if ( stream_.callbackInfo.thread == 0 ) {\r
- errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";\r
- goto error;\r
- }\r
-\r
- // Boost DS thread priority\r
- SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );\r
- }\r
- return SUCCESS;\r
-\r
- error:\r
- if ( handle ) {\r
- if ( handle->buffer[0] ) { // the object pointer can be NULL and valid\r
- LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];\r
- LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
- if ( buffer ) buffer->Release();\r
- object->Release();\r
- }\r
- if ( handle->buffer[1] ) {\r
- LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];\r
- LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];\r
- if ( buffer ) buffer->Release();\r
- object->Release();\r
- }\r
- CloseHandle( handle->condition );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.state = STREAM_CLOSED;\r
- return FAILURE;\r
-}\r
-\r
-void RtApiDs :: closeStream()\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiDs::closeStream(): no open stream to close!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- // Stop the callback thread.\r
- stream_.callbackInfo.isRunning = false;\r
- WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );\r
- CloseHandle( (HANDLE) stream_.callbackInfo.thread );\r
-\r
- DsHandle *handle = (DsHandle *) stream_.apiHandle;\r
- if ( handle ) {\r
- if ( handle->buffer[0] ) { // the object pointer can be NULL and valid\r
- LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];\r
- LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
- if ( buffer ) {\r
- buffer->Stop();\r
- buffer->Release();\r
- }\r
- object->Release();\r
- }\r
- if ( handle->buffer[1] ) {\r
- LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];\r
- LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];\r
- if ( buffer ) {\r
- buffer->Stop();\r
- buffer->Release();\r
- }\r
- object->Release();\r
- }\r
- CloseHandle( handle->condition );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-void RtApiDs :: startStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiDs::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- DsHandle *handle = (DsHandle *) stream_.apiHandle;\r
-\r
- // Increase scheduler frequency on lesser windows (a side-effect of\r
- // increasing timer accuracy). On greater windows (Win2K or later),\r
- // this is already in effect.\r
- timeBeginPeriod( 1 );\r
-\r
- buffersRolling = false;\r
- duplexPrerollBytes = 0;\r
-\r
- if ( stream_.mode == DUPLEX ) {\r
- // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.\r
- duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );\r
- }\r
-\r
- HRESULT result = 0;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
- result = buffer->Play( 0, 0, DSBPLAY_LOOPING );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
-\r
- LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];\r
- result = buffer->Start( DSCBSTART_LOOPING );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- handle->drainCounter = 0;\r
- handle->internalDrain = false;\r
- ResetEvent( handle->condition );\r
- stream_.state = STREAM_RUNNING;\r
-\r
- unlock:\r
- if ( FAILED( result ) ) error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiDs :: stopStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- HRESULT result = 0;\r
- LPVOID audioPtr;\r
- DWORD dataLen;\r
- DsHandle *handle = (DsHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- if ( handle->drainCounter == 0 ) {\r
- handle->drainCounter = 2;\r
- WaitForSingleObject( handle->condition, INFINITE ); // block until signaled\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- // Stop the buffer and clear memory\r
- LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
- result = buffer->Stop();\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping output buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- // Lock the buffer and clear it so that if we start to play again,\r
- // we won't have old data playing.\r
- result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking output buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- // Zero the DS buffer\r
- ZeroMemory( audioPtr, dataLen );\r
-\r
- // Unlock the DS buffer\r
- result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking output buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- // If we start playing again, we must begin at beginning of buffer.\r
- handle->bufferPointer[0] = 0;\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
- LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];\r
- audioPtr = NULL;\r
- dataLen = 0;\r
-\r
- stream_.state = STREAM_STOPPED;\r
-\r
- if ( stream_.mode != DUPLEX )\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- result = buffer->Stop();\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping input buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- // Lock the buffer and clear it so that if we start to play again,\r
- // we won't have old data playing.\r
- result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking input buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- // Zero the DS buffer\r
- ZeroMemory( audioPtr, dataLen );\r
-\r
- // Unlock the DS buffer\r
- result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking input buffer!";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
-\r
- // If we start recording again, we must begin at beginning of buffer.\r
- handle->bufferPointer[1] = 0;\r
- }\r
-\r
- unlock:\r
- timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- if ( FAILED( result ) ) error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiDs :: abortStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- DsHandle *handle = (DsHandle *) stream_.apiHandle;\r
- handle->drainCounter = 2;\r
-\r
- stopStream();\r
-}\r
-\r
-void RtApiDs :: callbackEvent()\r
-{\r
- if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) {\r
- Sleep( 50 ); // sleep 50 milliseconds\r
- return;\r
- }\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;\r
- DsHandle *handle = (DsHandle *) stream_.apiHandle;\r
-\r
- // Check if we were draining the stream and signal is finished.\r
- if ( handle->drainCounter > stream_.nBuffers + 2 ) {\r
-\r
- stream_.state = STREAM_STOPPING;\r
- if ( handle->internalDrain == false )\r
- SetEvent( handle->condition );\r
- else\r
- stopStream();\r
- return;\r
- }\r
-\r
- // Invoke user callback to get fresh output data UNLESS we are\r
- // draining stream.\r
- if ( handle->drainCounter == 0 ) {\r
- RtAudioCallback callback = (RtAudioCallback) info->callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- if ( stream_.mode != INPUT && handle->xrun[0] == true ) {\r
- status |= RTAUDIO_OUTPUT_UNDERFLOW;\r
- handle->xrun[0] = false;\r
- }\r
- if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {\r
- status |= RTAUDIO_INPUT_OVERFLOW;\r
- handle->xrun[1] = false;\r
- }\r
- int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],\r
- stream_.bufferSize, streamTime, status, info->userData );\r
- if ( cbReturnValue == 2 ) {\r
- stream_.state = STREAM_STOPPING;\r
- handle->drainCounter = 2;\r
- abortStream();\r
- return;\r
- }\r
- else if ( cbReturnValue == 1 ) {\r
- handle->drainCounter = 1;\r
- handle->internalDrain = true;\r
- }\r
- }\r
-\r
- HRESULT result;\r
- DWORD currentWritePointer, safeWritePointer;\r
- DWORD currentReadPointer, safeReadPointer;\r
- UINT nextWritePointer;\r
-\r
- LPVOID buffer1 = NULL;\r
- LPVOID buffer2 = NULL;\r
- DWORD bufferSize1 = 0;\r
- DWORD bufferSize2 = 0;\r
-\r
- char *buffer;\r
- long bufferBytes;\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- return;\r
- }\r
-\r
- if ( buffersRolling == false ) {\r
- if ( stream_.mode == DUPLEX ) {\r
- //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );\r
-\r
- // It takes a while for the devices to get rolling. As a result,\r
- // there's no guarantee that the capture and write device pointers\r
- // will move in lockstep. Wait here for both devices to start\r
- // rolling, and then set our buffer pointers accordingly.\r
- // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600\r
- // bytes later than the write buffer.\r
-\r
- // Stub: a serious risk of having a pre-emptive scheduling round\r
- // take place between the two GetCurrentPosition calls... but I'm\r
- // really not sure how to solve the problem. Temporarily boost to\r
- // Realtime priority, maybe; but I'm not sure what priority the\r
- // DirectSound service threads run at. We *should* be roughly\r
- // within a ms or so of correct.\r
-\r
- LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
- LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];\r
-\r
- DWORD startSafeWritePointer, startSafeReadPointer;\r
-\r
- result = dsWriteBuffer->GetCurrentPosition( NULL, &startSafeWritePointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- result = dsCaptureBuffer->GetCurrentPosition( NULL, &startSafeReadPointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- while ( true ) {\r
- result = dsWriteBuffer->GetCurrentPosition( NULL, &safeWritePointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- result = dsCaptureBuffer->GetCurrentPosition( NULL, &safeReadPointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- if ( safeWritePointer != startSafeWritePointer && safeReadPointer != startSafeReadPointer ) break;\r
- Sleep( 1 );\r
- }\r
-\r
- //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );\r
-\r
- handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];\r
- if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];\r
- handle->bufferPointer[1] = safeReadPointer;\r
- }\r
- else if ( stream_.mode == OUTPUT ) {\r
-\r
- // Set the proper nextWritePosition after initial startup.\r
- LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
- result = dsWriteBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];\r
- if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];\r
- }\r
-\r
- buffersRolling = true;\r
- }\r
-\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];\r
-\r
- if ( handle->drainCounter > 1 ) { // write zeros to the output stream\r
- bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];\r
- bufferBytes *= formatBytes( stream_.userFormat );\r
- memset( stream_.userBuffer[0], 0, bufferBytes );\r
- }\r
-\r
- // Setup parameters and do buffer conversion if necessary.\r
- if ( stream_.doConvertBuffer[0] ) {\r
- buffer = stream_.deviceBuffer;\r
- convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
- bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];\r
- bufferBytes *= formatBytes( stream_.deviceFormat[0] );\r
- }\r
- else {\r
- buffer = stream_.userBuffer[0];\r
- bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];\r
- bufferBytes *= formatBytes( stream_.userFormat );\r
- }\r
-\r
- // No byte swapping necessary in DirectSound implementation.\r
-\r
- // Ahhh ... windoze. 16-bit data is signed but 8-bit data is\r
- // unsigned. So, we need to convert our signed 8-bit data here to\r
- // unsigned.\r
- if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )\r
- for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );\r
-\r
- DWORD dsBufferSize = handle->dsBufferSize[0];\r
- nextWritePointer = handle->bufferPointer[0];\r
-\r
- DWORD endWrite, leadPointer;\r
- while ( true ) {\r
- // Find out where the read and "safe write" pointers are.\r
- result = dsBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
-\r
- // We will copy our output buffer into the region between\r
- // safeWritePointer and leadPointer. If leadPointer is not\r
- // beyond the next endWrite position, wait until it is.\r
- leadPointer = safeWritePointer + handle->dsPointerLeadTime[0];\r
- //std::cout << "safeWritePointer = " << safeWritePointer << ", leadPointer = " << leadPointer << ", nextWritePointer = " << nextWritePointer << std::endl;\r
- if ( leadPointer > dsBufferSize ) leadPointer -= dsBufferSize;\r
- if ( leadPointer < nextWritePointer ) leadPointer += dsBufferSize; // unwrap offset\r
- endWrite = nextWritePointer + bufferBytes;\r
-\r
- // Check whether the entire write region is behind the play pointer.\r
- if ( leadPointer >= endWrite ) break;\r
-\r
- // If we are here, then we must wait until the leadPointer advances\r
- // beyond the end of our next write region. We use the\r
- // Sleep() function to suspend operation until that happens.\r
- double millis = ( endWrite - leadPointer ) * 1000.0;\r
- millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);\r
- if ( millis < 1.0 ) millis = 1.0;\r
- Sleep( (DWORD) millis );\r
- }\r
-\r
- if ( dsPointerBetween( nextWritePointer, safeWritePointer, currentWritePointer, dsBufferSize )\r
- || dsPointerBetween( endWrite, safeWritePointer, currentWritePointer, dsBufferSize ) ) {\r
- // We've strayed into the forbidden zone ... resync the read pointer.\r
- handle->xrun[0] = true;\r
- nextWritePointer = safeWritePointer + handle->dsPointerLeadTime[0] - bufferBytes;\r
- if ( nextWritePointer >= dsBufferSize ) nextWritePointer -= dsBufferSize;\r
- handle->bufferPointer[0] = nextWritePointer;\r
- endWrite = nextWritePointer + bufferBytes;\r
- }\r
-\r
- // Lock free space in the buffer\r
- result = dsBuffer->Lock( nextWritePointer, bufferBytes, &buffer1,\r
- &bufferSize1, &buffer2, &bufferSize2, 0 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
-\r
- // Copy our buffer into the DS buffer\r
- CopyMemory( buffer1, buffer, bufferSize1 );\r
- if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );\r
-\r
- // Update our buffer offset and unlock sound buffer\r
- dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- nextWritePointer = ( nextWritePointer + bufferSize1 + bufferSize2 ) % dsBufferSize;\r
- handle->bufferPointer[0] = nextWritePointer;\r
- }\r
-\r
- // Don't bother draining input\r
- if ( handle->drainCounter ) {\r
- handle->drainCounter++;\r
- goto unlock;\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
-\r
- // Setup parameters.\r
- if ( stream_.doConvertBuffer[1] ) {\r
- buffer = stream_.deviceBuffer;\r
- bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];\r
- bufferBytes *= formatBytes( stream_.deviceFormat[1] );\r
- }\r
- else {\r
- buffer = stream_.userBuffer[1];\r
- bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];\r
- bufferBytes *= formatBytes( stream_.userFormat );\r
- }\r
-\r
- LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];\r
- long nextReadPointer = handle->bufferPointer[1];\r
- DWORD dsBufferSize = handle->dsBufferSize[1];\r
-\r
- // Find out where the write and "safe read" pointers are.\r
- result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
-\r
- if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset\r
- DWORD endRead = nextReadPointer + bufferBytes;\r
-\r
- // Handling depends on whether we are INPUT or DUPLEX.\r
- // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,\r
- // then a wait here will drag the write pointers into the forbidden zone.\r
- //\r
- // In DUPLEX mode, rather than wait, we will back off the read pointer until\r
- // it's in a safe position. This causes dropouts, but it seems to be the only\r
- // practical way to sync up the read and write pointers reliably, given the\r
- // the very complex relationship between phase and increment of the read and write\r
- // pointers.\r
- //\r
- // In order to minimize audible dropouts in DUPLEX mode, we will\r
- // provide a pre-roll period of 0.5 seconds in which we return\r
- // zeros from the read buffer while the pointers sync up.\r
-\r
- if ( stream_.mode == DUPLEX ) {\r
- if ( safeReadPointer < endRead ) {\r
- if ( duplexPrerollBytes <= 0 ) {\r
- // Pre-roll time over. Be more agressive.\r
- int adjustment = endRead-safeReadPointer;\r
-\r
- handle->xrun[1] = true;\r
- // Two cases:\r
- // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,\r
- // and perform fine adjustments later.\r
- // - small adjustments: back off by twice as much.\r
- if ( adjustment >= 2*bufferBytes )\r
- nextReadPointer = safeReadPointer-2*bufferBytes;\r
- else\r
- nextReadPointer = safeReadPointer-bufferBytes-adjustment;\r
-\r
- if ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;\r
-\r
- }\r
- else {\r
- // In pre=roll time. Just do it.\r
- nextReadPointer = safeReadPointer - bufferBytes;\r
- while ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;\r
- }\r
- endRead = nextReadPointer + bufferBytes;\r
- }\r
- }\r
- else { // mode == INPUT\r
- while ( safeReadPointer < endRead && stream_.callbackInfo.isRunning ) {\r
- // See comments for playback.\r
- double millis = (endRead - safeReadPointer) * 1000.0;\r
- millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);\r
- if ( millis < 1.0 ) millis = 1.0;\r
- Sleep( (DWORD) millis );\r
-\r
- // Wake up and find out where we are now.\r
- result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
-\r
- if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset\r
- }\r
- }\r
-\r
- // Lock free space in the buffer\r
- result = dsBuffer->Lock( nextReadPointer, bufferBytes, &buffer1,\r
- &bufferSize1, &buffer2, &bufferSize2, 0 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
-\r
- if ( duplexPrerollBytes <= 0 ) {\r
- // Copy our buffer into the DS buffer\r
- CopyMemory( buffer, buffer1, bufferSize1 );\r
- if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );\r
- }\r
- else {\r
- memset( buffer, 0, bufferSize1 );\r
- if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );\r
- duplexPrerollBytes -= bufferSize1 + bufferSize2;\r
- }\r
-\r
- // Update our buffer offset and unlock sound buffer\r
- nextReadPointer = ( nextReadPointer + bufferSize1 + bufferSize2 ) % dsBufferSize;\r
- dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );\r
- if ( FAILED( result ) ) {\r
- errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- handle->bufferPointer[1] = nextReadPointer;\r
-\r
- // No byte swapping necessary in DirectSound implementation.\r
-\r
- // If necessary, convert 8-bit data from unsigned to signed.\r
- if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )\r
- for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );\r
-\r
- // Do buffer conversion if necessary.\r
- if ( stream_.doConvertBuffer[1] )\r
- convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );\r
- }\r
-\r
- unlock:\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- RtApi::tickStreamTime();\r
-}\r
-\r
-// Definitions for utility functions and callbacks\r
-// specific to the DirectSound implementation.\r
-\r
-static unsigned __stdcall callbackHandler( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiDs *object = (RtApiDs *) info->object;\r
- bool* isRunning = &info->isRunning;\r
-\r
- while ( *isRunning == true ) {\r
- object->callbackEvent();\r
- }\r
-\r
- _endthreadex( 0 );\r
- return 0;\r
-}\r
-\r
-static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,\r
- LPCTSTR description,\r
- LPCTSTR /*module*/,\r
- LPVOID lpContext )\r
-{\r
- struct DsProbeData& probeInfo = *(struct DsProbeData*) lpContext;\r
- std::vector<struct DsDevice>& dsDevices = *probeInfo.dsDevices;\r
-\r
- HRESULT hr;\r
- bool validDevice = false;\r
- if ( probeInfo.isInput == true ) {\r
- DSCCAPS caps;\r
- LPDIRECTSOUNDCAPTURE object;\r
-\r
- hr = DirectSoundCaptureCreate( lpguid, &object, NULL );\r
- if ( hr != DS_OK ) return TRUE;\r
-\r
- caps.dwSize = sizeof(caps);\r
- hr = object->GetCaps( &caps );\r
- if ( hr == DS_OK ) {\r
- if ( caps.dwChannels > 0 && caps.dwFormats > 0 )\r
- validDevice = true;\r
- }\r
- object->Release();\r
- }\r
- else {\r
- DSCAPS caps;\r
- LPDIRECTSOUND object;\r
- hr = DirectSoundCreate( lpguid, &object, NULL );\r
- if ( hr != DS_OK ) return TRUE;\r
-\r
- caps.dwSize = sizeof(caps);\r
- hr = object->GetCaps( &caps );\r
- if ( hr == DS_OK ) {\r
- if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )\r
- validDevice = true;\r
- }\r
- object->Release();\r
- }\r
-\r
- // If good device, then save its name and guid.\r
- std::string name = convertCharPointerToStdString( description );\r
- //if ( name == "Primary Sound Driver" || name == "Primary Sound Capture Driver" )\r
- if ( lpguid == NULL )\r
- name = "Default Device";\r
- if ( validDevice ) {\r
- for ( unsigned int i=0; i<dsDevices.size(); i++ ) {\r
- if ( dsDevices[i].name == name ) {\r
- dsDevices[i].found = true;\r
- if ( probeInfo.isInput ) {\r
- dsDevices[i].id[1] = lpguid;\r
- dsDevices[i].validId[1] = true;\r
- }\r
- else {\r
- dsDevices[i].id[0] = lpguid;\r
- dsDevices[i].validId[0] = true;\r
- }\r
- return TRUE;\r
- }\r
- }\r
-\r
- DsDevice device;\r
- device.name = name;\r
- device.found = true;\r
- if ( probeInfo.isInput ) {\r
- device.id[1] = lpguid;\r
- device.validId[1] = true;\r
- }\r
- else {\r
- device.id[0] = lpguid;\r
- device.validId[0] = true;\r
- }\r
- dsDevices.push_back( device );\r
- }\r
-\r
- return TRUE;\r
-}\r
-\r
-static const char* getErrorString( int code )\r
-{\r
- switch ( code ) {\r
-\r
- case DSERR_ALLOCATED:\r
- return "Already allocated";\r
-\r
- case DSERR_CONTROLUNAVAIL:\r
- return "Control unavailable";\r
-\r
- case DSERR_INVALIDPARAM:\r
- return "Invalid parameter";\r
-\r
- case DSERR_INVALIDCALL:\r
- return "Invalid call";\r
-\r
- case DSERR_GENERIC:\r
- return "Generic error";\r
-\r
- case DSERR_PRIOLEVELNEEDED:\r
- return "Priority level needed";\r
-\r
- case DSERR_OUTOFMEMORY:\r
- return "Out of memory";\r
-\r
- case DSERR_BADFORMAT:\r
- return "The sample rate or the channel format is not supported";\r
-\r
- case DSERR_UNSUPPORTED:\r
- return "Not supported";\r
-\r
- case DSERR_NODRIVER:\r
- return "No driver";\r
-\r
- case DSERR_ALREADYINITIALIZED:\r
- return "Already initialized";\r
-\r
- case DSERR_NOAGGREGATION:\r
- return "No aggregation";\r
-\r
- case DSERR_BUFFERLOST:\r
- return "Buffer lost";\r
-\r
- case DSERR_OTHERAPPHASPRIO:\r
- return "Another application already has priority";\r
-\r
- case DSERR_UNINITIALIZED:\r
- return "Uninitialized";\r
-\r
- default:\r
- return "DirectSound unknown error";\r
- }\r
-}\r
-//******************** End of __WINDOWS_DS__ *********************//\r
-#endif\r
-\r
-\r
-#if defined(__LINUX_ALSA__)\r
-\r
-#include <alsa/asoundlib.h>\r
-#include <unistd.h>\r
-\r
- // A structure to hold various information related to the ALSA API\r
- // implementation.\r
-struct AlsaHandle {\r
- snd_pcm_t *handles[2];\r
- bool synchronized;\r
- bool xrun[2];\r
- pthread_cond_t runnable_cv;\r
- bool runnable;\r
-\r
- AlsaHandle()\r
- :synchronized(false), runnable(false) { xrun[0] = false; xrun[1] = false; }\r
-};\r
-\r
-static void *alsaCallbackHandler( void * ptr );\r
-\r
-RtApiAlsa :: RtApiAlsa()\r
-{\r
- // Nothing to do here.\r
-}\r
-\r
-RtApiAlsa :: ~RtApiAlsa()\r
-{\r
- if ( stream_.state != STREAM_CLOSED ) closeStream();\r
-}\r
-\r
-unsigned int RtApiAlsa :: getDeviceCount( void )\r
-{\r
- unsigned nDevices = 0;\r
- int result, subdevice, card;\r
- char name[64];\r
- snd_ctl_t *handle;\r
-\r
- // Count cards and devices\r
- card = -1;\r
- snd_card_next( &card );\r
- while ( card >= 0 ) {\r
- sprintf( name, "hw:%d", card );\r
- result = snd_ctl_open( &handle, name, 0 );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto nextcard;\r
- }\r
- subdevice = -1;\r
- while( 1 ) {\r
- result = snd_ctl_pcm_next_device( handle, &subdevice );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- break;\r
- }\r
- if ( subdevice < 0 )\r
- break;\r
- nDevices++;\r
- }\r
- nextcard:\r
- snd_ctl_close( handle );\r
- snd_card_next( &card );\r
- }\r
-\r
- result = snd_ctl_open( &handle, "default", 0 );\r
- if (result == 0) {\r
- nDevices++;\r
- snd_ctl_close( handle );\r
- }\r
-\r
- return nDevices;\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = false;\r
-\r
- unsigned nDevices = 0;\r
- int result, subdevice, card;\r
- char name[64];\r
- snd_ctl_t *chandle;\r
-\r
- // Count cards and devices\r
- card = -1;\r
- subdevice = -1;\r
- snd_card_next( &card );\r
- while ( card >= 0 ) {\r
- sprintf( name, "hw:%d", card );\r
- result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto nextcard;\r
- }\r
- subdevice = -1;\r
- while( 1 ) {\r
- result = snd_ctl_pcm_next_device( chandle, &subdevice );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- break;\r
- }\r
- if ( subdevice < 0 ) break;\r
- if ( nDevices == device ) {\r
- sprintf( name, "hw:%d,%d", card, subdevice );\r
- goto foundDevice;\r
- }\r
- nDevices++;\r
- }\r
- nextcard:\r
- snd_ctl_close( chandle );\r
- snd_card_next( &card );\r
- }\r
-\r
- result = snd_ctl_open( &chandle, "default", SND_CTL_NONBLOCK );\r
- if ( result == 0 ) {\r
- if ( nDevices == device ) {\r
- strcpy( name, "default" );\r
- goto foundDevice;\r
- }\r
- nDevices++;\r
- }\r
-\r
- if ( nDevices == 0 ) {\r
- errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- foundDevice:\r
-\r
- // If a stream is already open, we cannot probe the stream devices.\r
- // Thus, use the saved results.\r
- if ( stream_.state != STREAM_CLOSED &&\r
- ( stream_.device[0] == device || stream_.device[1] == device ) ) {\r
- snd_ctl_close( chandle );\r
- if ( device >= devices_.size() ) {\r
- errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
- return devices_[ device ];\r
- }\r
-\r
- int openMode = SND_PCM_ASYNC;\r
- snd_pcm_stream_t stream;\r
- snd_pcm_info_t *pcminfo;\r
- snd_pcm_info_alloca( &pcminfo );\r
- snd_pcm_t *phandle;\r
- snd_pcm_hw_params_t *params;\r
- snd_pcm_hw_params_alloca( ¶ms );\r
-\r
- // First try for playback unless default device (which has subdev -1)\r
- stream = SND_PCM_STREAM_PLAYBACK;\r
- snd_pcm_info_set_stream( pcminfo, stream );\r
- if ( subdevice != -1 ) {\r
- snd_pcm_info_set_device( pcminfo, subdevice );\r
- snd_pcm_info_set_subdevice( pcminfo, 0 );\r
-\r
- result = snd_ctl_pcm_info( chandle, pcminfo );\r
- if ( result < 0 ) {\r
- // Device probably doesn't support playback.\r
- goto captureProbe;\r
- }\r
- }\r
-\r
- result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto captureProbe;\r
- }\r
-\r
- // The device is open ... fill the parameter structure.\r
- result = snd_pcm_hw_params_any( phandle, params );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto captureProbe;\r
- }\r
-\r
- // Get output channel information.\r
- unsigned int value;\r
- result = snd_pcm_hw_params_get_channels_max( params, &value );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- goto captureProbe;\r
- }\r
- info.outputChannels = value;\r
- snd_pcm_close( phandle );\r
-\r
- captureProbe:\r
- stream = SND_PCM_STREAM_CAPTURE;\r
- snd_pcm_info_set_stream( pcminfo, stream );\r
-\r
- // Now try for capture unless default device (with subdev = -1)\r
- if ( subdevice != -1 ) {\r
- result = snd_ctl_pcm_info( chandle, pcminfo );\r
- snd_ctl_close( chandle );\r
- if ( result < 0 ) {\r
- // Device probably doesn't support capture.\r
- if ( info.outputChannels == 0 ) return info;\r
- goto probeParameters;\r
- }\r
- }\r
- else\r
- snd_ctl_close( chandle );\r
-\r
- result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- if ( info.outputChannels == 0 ) return info;\r
- goto probeParameters;\r
- }\r
-\r
- // The device is open ... fill the parameter structure.\r
- result = snd_pcm_hw_params_any( phandle, params );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- if ( info.outputChannels == 0 ) return info;\r
- goto probeParameters;\r
- }\r
-\r
- result = snd_pcm_hw_params_get_channels_max( params, &value );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- if ( info.outputChannels == 0 ) return info;\r
- goto probeParameters;\r
- }\r
- info.inputChannels = value;\r
- snd_pcm_close( phandle );\r
-\r
- // If device opens for both playback and capture, we determine the channels.\r
- if ( info.outputChannels > 0 && info.inputChannels > 0 )\r
- info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;\r
-\r
- // ALSA doesn't provide default devices so we'll use the first available one.\r
- if ( device == 0 && info.outputChannels > 0 )\r
- info.isDefaultOutput = true;\r
- if ( device == 0 && info.inputChannels > 0 )\r
- info.isDefaultInput = true;\r
-\r
- probeParameters:\r
- // At this point, we just need to figure out the supported data\r
- // formats and sample rates. We'll proceed by opening the device in\r
- // the direction with the maximum number of channels, or playback if\r
- // they are equal. This might limit our sample rate options, but so\r
- // be it.\r
-\r
- if ( info.outputChannels >= info.inputChannels )\r
- stream = SND_PCM_STREAM_PLAYBACK;\r
- else\r
- stream = SND_PCM_STREAM_CAPTURE;\r
- snd_pcm_info_set_stream( pcminfo, stream );\r
-\r
- result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // The device is open ... fill the parameter structure.\r
- result = snd_pcm_hw_params_any( phandle, params );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Test our discrete set of sample rate values.\r
- info.sampleRates.clear();\r
- for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {\r
- if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[i] );\r
-\r
- if ( !info.preferredSampleRate || ( SAMPLE_RATES[i] <= 48000 && SAMPLE_RATES[i] > info.preferredSampleRate ) )\r
- info.preferredSampleRate = SAMPLE_RATES[i];\r
- }\r
- }\r
- if ( info.sampleRates.size() == 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Probe the supported data formats ... we don't care about endian-ness just yet\r
- snd_pcm_format_t format;\r
- info.nativeFormats = 0;\r
- format = SND_PCM_FORMAT_S8;\r
- if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )\r
- info.nativeFormats |= RTAUDIO_SINT8;\r
- format = SND_PCM_FORMAT_S16;\r
- if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )\r
- info.nativeFormats |= RTAUDIO_SINT16;\r
- format = SND_PCM_FORMAT_S24;\r
- if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )\r
- info.nativeFormats |= RTAUDIO_SINT24;\r
- format = SND_PCM_FORMAT_S32;\r
- if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )\r
- info.nativeFormats |= RTAUDIO_SINT32;\r
- format = SND_PCM_FORMAT_FLOAT;\r
- if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )\r
- info.nativeFormats |= RTAUDIO_FLOAT32;\r
- format = SND_PCM_FORMAT_FLOAT64;\r
- if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )\r
- info.nativeFormats |= RTAUDIO_FLOAT64;\r
-\r
- // Check that we have at least one supported format\r
- if ( info.nativeFormats == 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Get the device name\r
- char *cardname;\r
- result = snd_card_get_name( card, &cardname );\r
- if ( result >= 0 ) {\r
- sprintf( name, "hw:%s,%d", cardname, subdevice );\r
- free( cardname );\r
- }\r
- info.name = name;\r
-\r
- // That's all ... close the device and return\r
- snd_pcm_close( phandle );\r
- info.probed = true;\r
- return info;\r
-}\r
-\r
-void RtApiAlsa :: saveDeviceInfo( void )\r
-{\r
- devices_.clear();\r
-\r
- unsigned int nDevices = getDeviceCount();\r
- devices_.resize( nDevices );\r
- for ( unsigned int i=0; i<nDevices; i++ )\r
- devices_[i] = getDeviceInfo( i );\r
-}\r
-\r
-bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int *bufferSize,\r
- RtAudio::StreamOptions *options )\r
-\r
-{\r
-#if defined(__RTAUDIO_DEBUG__)\r
- snd_output_t *out;\r
- snd_output_stdio_attach(&out, stderr, 0);\r
-#endif\r
-\r
- // I'm not using the "plug" interface ... too much inconsistent behavior.\r
-\r
- unsigned nDevices = 0;\r
- int result, subdevice, card;\r
- char name[64];\r
- snd_ctl_t *chandle;\r
-\r
- if ( options && options->flags & RTAUDIO_ALSA_USE_DEFAULT )\r
- snprintf(name, sizeof(name), "%s", "default");\r
- else {\r
- // Count cards and devices\r
- card = -1;\r
- snd_card_next( &card );\r
- while ( card >= 0 ) {\r
- sprintf( name, "hw:%d", card );\r
- result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- subdevice = -1;\r
- while( 1 ) {\r
- result = snd_ctl_pcm_next_device( chandle, &subdevice );\r
- if ( result < 0 ) break;\r
- if ( subdevice < 0 ) break;\r
- if ( nDevices == device ) {\r
- sprintf( name, "hw:%d,%d", card, subdevice );\r
- snd_ctl_close( chandle );\r
- goto foundDevice;\r
- }\r
- nDevices++;\r
- }\r
- snd_ctl_close( chandle );\r
- snd_card_next( &card );\r
- }\r
-\r
- result = snd_ctl_open( &chandle, "default", SND_CTL_NONBLOCK );\r
- if ( result == 0 ) {\r
- if ( nDevices == device ) {\r
- strcpy( name, "default" );\r
- goto foundDevice;\r
- }\r
- nDevices++;\r
- }\r
-\r
- if ( nDevices == 0 ) {\r
- // This should not happen because a check is made before this function is called.\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";\r
- return FAILURE;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- // This should not happen because a check is made before this function is called.\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";\r
- return FAILURE;\r
- }\r
- }\r
-\r
- foundDevice:\r
-\r
- // The getDeviceInfo() function will not work for a device that is\r
- // already open. Thus, we'll probe the system before opening a\r
- // stream and save the results for use by getDeviceInfo().\r
- if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once\r
- this->saveDeviceInfo();\r
-\r
- snd_pcm_stream_t stream;\r
- if ( mode == OUTPUT )\r
- stream = SND_PCM_STREAM_PLAYBACK;\r
- else\r
- stream = SND_PCM_STREAM_CAPTURE;\r
-\r
- snd_pcm_t *phandle;\r
- int openMode = SND_PCM_ASYNC;\r
- result = snd_pcm_open( &phandle, name, stream, openMode );\r
- if ( result < 0 ) {\r
- if ( mode == OUTPUT )\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";\r
- else\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Fill the parameter structure.\r
- snd_pcm_hw_params_t *hw_params;\r
- snd_pcm_hw_params_alloca( &hw_params );\r
- result = snd_pcm_hw_params_any( phandle, hw_params );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
-#if defined(__RTAUDIO_DEBUG__)\r
- fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );\r
- snd_pcm_hw_params_dump( hw_params, out );\r
-#endif\r
-\r
- // Set access ... check user preference.\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {\r
- stream_.userInterleaved = false;\r
- result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );\r
- if ( result < 0 ) {\r
- result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );\r
- stream_.deviceInterleaved[mode] = true;\r
- }\r
- else\r
- stream_.deviceInterleaved[mode] = false;\r
- }\r
- else {\r
- stream_.userInterleaved = true;\r
- result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );\r
- if ( result < 0 ) {\r
- result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );\r
- stream_.deviceInterleaved[mode] = false;\r
- }\r
- else\r
- stream_.deviceInterleaved[mode] = true;\r
- }\r
-\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Determine how to set the device format.\r
- stream_.userFormat = format;\r
- snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;\r
-\r
- if ( format == RTAUDIO_SINT8 )\r
- deviceFormat = SND_PCM_FORMAT_S8;\r
- else if ( format == RTAUDIO_SINT16 )\r
- deviceFormat = SND_PCM_FORMAT_S16;\r
- else if ( format == RTAUDIO_SINT24 )\r
- deviceFormat = SND_PCM_FORMAT_S24;\r
- else if ( format == RTAUDIO_SINT32 )\r
- deviceFormat = SND_PCM_FORMAT_S32;\r
- else if ( format == RTAUDIO_FLOAT32 )\r
- deviceFormat = SND_PCM_FORMAT_FLOAT;\r
- else if ( format == RTAUDIO_FLOAT64 )\r
- deviceFormat = SND_PCM_FORMAT_FLOAT64;\r
-\r
- if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {\r
- stream_.deviceFormat[mode] = format;\r
- goto setFormat;\r
- }\r
-\r
- // The user requested format is not natively supported by the device.\r
- deviceFormat = SND_PCM_FORMAT_FLOAT64;\r
- if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;\r
- goto setFormat;\r
- }\r
-\r
- deviceFormat = SND_PCM_FORMAT_FLOAT;\r
- if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;\r
- goto setFormat;\r
- }\r
-\r
- deviceFormat = SND_PCM_FORMAT_S32;\r
- if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT32;\r
- goto setFormat;\r
- }\r
-\r
- deviceFormat = SND_PCM_FORMAT_S24;\r
- if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT24;\r
- goto setFormat;\r
- }\r
-\r
- deviceFormat = SND_PCM_FORMAT_S16;\r
- if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- goto setFormat;\r
- }\r
-\r
- deviceFormat = SND_PCM_FORMAT_S8;\r
- if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT8;\r
- goto setFormat;\r
- }\r
-\r
- // If we get here, no supported format was found.\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
-\r
- setFormat:\r
- result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Determine whether byte-swaping is necessary.\r
- stream_.doByteSwap[mode] = false;\r
- if ( deviceFormat != SND_PCM_FORMAT_S8 ) {\r
- result = snd_pcm_format_cpu_endian( deviceFormat );\r
- if ( result == 0 )\r
- stream_.doByteSwap[mode] = true;\r
- else if (result < 0) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
-\r
- // Set the sample rate.\r
- result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Determine the number of channels for this device. We support a possible\r
- // minimum device channel number > than the value requested by the user.\r
- stream_.nUserChannels[mode] = channels;\r
- unsigned int value;\r
- result = snd_pcm_hw_params_get_channels_max( hw_params, &value );\r
- unsigned int deviceChannels = value;\r
- if ( result < 0 || deviceChannels < channels + firstChannel ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- result = snd_pcm_hw_params_get_channels_min( hw_params, &value );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- deviceChannels = value;\r
- if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;\r
- stream_.nDeviceChannels[mode] = deviceChannels;\r
-\r
- // Set the device channels.\r
- result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Set the buffer (or period) size.\r
- int dir = 0;\r
- snd_pcm_uframes_t periodSize = *bufferSize;\r
- result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- *bufferSize = periodSize;\r
-\r
- // Set the buffer number, which in ALSA is referred to as the "period".\r
- unsigned int periods = 0;\r
- if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;\r
- if ( options && options->numberOfBuffers > 0 ) periods = options->numberOfBuffers;\r
- if ( periods < 2 ) periods = 4; // a fairly safe default value\r
- result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // If attempting to setup a duplex stream, the bufferSize parameter\r
- // MUST be the same in both directions!\r
- if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- stream_.bufferSize = *bufferSize;\r
-\r
- // Install the hardware configuration\r
- result = snd_pcm_hw_params( phandle, hw_params );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
-#if defined(__RTAUDIO_DEBUG__)\r
- fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");\r
- snd_pcm_hw_params_dump( hw_params, out );\r
-#endif\r
-\r
- // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.\r
- snd_pcm_sw_params_t *sw_params = NULL;\r
- snd_pcm_sw_params_alloca( &sw_params );\r
- snd_pcm_sw_params_current( phandle, sw_params );\r
- snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );\r
- snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );\r
- snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );\r
-\r
- // The following two settings were suggested by Theo Veenker\r
- //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );\r
- //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );\r
-\r
- // here are two options for a fix\r
- //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );\r
- snd_pcm_uframes_t val;\r
- snd_pcm_sw_params_get_boundary( sw_params, &val );\r
- snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );\r
-\r
- result = snd_pcm_sw_params( phandle, sw_params );\r
- if ( result < 0 ) {\r
- snd_pcm_close( phandle );\r
- errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
-#if defined(__RTAUDIO_DEBUG__)\r
- fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");\r
- snd_pcm_sw_params_dump( sw_params, out );\r
-#endif\r
-\r
- // Set flags for buffer conversion\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
- stream_.nUserChannels[mode] > 1 )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate the ApiHandle if necessary and then save.\r
- AlsaHandle *apiInfo = 0;\r
- if ( stream_.apiHandle == 0 ) {\r
- try {\r
- apiInfo = (AlsaHandle *) new AlsaHandle;\r
- }\r
- catch ( std::bad_alloc& ) {\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";\r
- goto error;\r
- }\r
-\r
- if ( pthread_cond_init( &apiInfo->runnable_cv, NULL ) ) {\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";\r
- goto error;\r
- }\r
-\r
- stream_.apiHandle = (void *) apiInfo;\r
- apiInfo->handles[0] = 0;\r
- apiInfo->handles[1] = 0;\r
- }\r
- else {\r
- apiInfo = (AlsaHandle *) stream_.apiHandle;\r
- }\r
- apiInfo->handles[mode] = phandle;\r
- phandle = 0;\r
-\r
- // Allocate necessary internal buffers.\r
- unsigned long bufferBytes;\r
- bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
-\r
- if ( stream_.doConvertBuffer[mode] ) {\r
-\r
- bool makeBuffer = true;\r
- bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );\r
- if ( mode == INPUT ) {\r
- if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- if ( bufferBytes <= bytesOut ) makeBuffer = false;\r
- }\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- stream_.sampleRate = sampleRate;\r
- stream_.nBuffers = periods;\r
- stream_.device[mode] = device;\r
- stream_.state = STREAM_STOPPED;\r
-\r
- // Setup the buffer conversion information structure.\r
- if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );\r
-\r
- // Setup thread if necessary.\r
- if ( stream_.mode == OUTPUT && mode == INPUT ) {\r
- // We had already set up an output stream.\r
- stream_.mode = DUPLEX;\r
- // Link the streams if possible.\r
- apiInfo->synchronized = false;\r
- if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )\r
- apiInfo->synchronized = true;\r
- else {\r
- errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";\r
- error( RtAudioError::WARNING );\r
- }\r
- }\r
- else {\r
- stream_.mode = mode;\r
-\r
- // Setup callback thread.\r
- stream_.callbackInfo.object = (void *) this;\r
-\r
- // Set the thread attributes for joinable and realtime scheduling\r
- // priority (optional). The higher priority will only take affect\r
- // if the program is run as root or suid. Note, under Linux\r
- // processes with CAP_SYS_NICE privilege, a user can change\r
- // scheduling policy and priority (thus need not be root). See\r
- // POSIX "capabilities".\r
- pthread_attr_t attr;\r
- pthread_attr_init( &attr );\r
- pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );\r
-\r
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)\r
- if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {\r
- // We previously attempted to increase the audio callback priority\r
- // to SCHED_RR here via the attributes. However, while no errors\r
- // were reported in doing so, it did not work. So, now this is\r
- // done in the alsaCallbackHandler function.\r
- stream_.callbackInfo.doRealtime = true;\r
- int priority = options->priority;\r
- int min = sched_get_priority_min( SCHED_RR );\r
- int max = sched_get_priority_max( SCHED_RR );\r
- if ( priority < min ) priority = min;\r
- else if ( priority > max ) priority = max;\r
- stream_.callbackInfo.priority = priority;\r
- }\r
-#endif\r
-\r
- stream_.callbackInfo.isRunning = true;\r
- result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );\r
- pthread_attr_destroy( &attr );\r
- if ( result ) {\r
- stream_.callbackInfo.isRunning = false;\r
- errorText_ = "RtApiAlsa::error creating callback thread!";\r
- goto error;\r
- }\r
- }\r
-\r
- return SUCCESS;\r
-\r
- error:\r
- if ( apiInfo ) {\r
- pthread_cond_destroy( &apiInfo->runnable_cv );\r
- if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );\r
- if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );\r
- delete apiInfo;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- if ( phandle) snd_pcm_close( phandle );\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.state = STREAM_CLOSED;\r
- return FAILURE;\r
-}\r
-\r
-void RtApiAlsa :: closeStream()\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;\r
- stream_.callbackInfo.isRunning = false;\r
- MUTEX_LOCK( &stream_.mutex );\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- apiInfo->runnable = true;\r
- pthread_cond_signal( &apiInfo->runnable_cv );\r
- }\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- pthread_join( stream_.callbackInfo.thread, NULL );\r
-\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- stream_.state = STREAM_STOPPED;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )\r
- snd_pcm_drop( apiInfo->handles[0] );\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX )\r
- snd_pcm_drop( apiInfo->handles[1] );\r
- }\r
-\r
- if ( apiInfo ) {\r
- pthread_cond_destroy( &apiInfo->runnable_cv );\r
- if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );\r
- if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );\r
- delete apiInfo;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-void RtApiAlsa :: startStream()\r
-{\r
- // This method calls snd_pcm_prepare if the device isn't already in that state.\r
-\r
- verifyStream();\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiAlsa::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- int result = 0;\r
- snd_pcm_state_t state;\r
- AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;\r
- snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- state = snd_pcm_state( handle[0] );\r
- if ( state != SND_PCM_STATE_PREPARED ) {\r
- result = snd_pcm_prepare( handle[0] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
- }\r
-\r
- if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {\r
- result = snd_pcm_drop(handle[1]); // fix to remove stale data received since device has been open\r
- state = snd_pcm_state( handle[1] );\r
- if ( state != SND_PCM_STATE_PREPARED ) {\r
- result = snd_pcm_prepare( handle[1] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
- }\r
-\r
- stream_.state = STREAM_RUNNING;\r
-\r
- unlock:\r
- apiInfo->runnable = true;\r
- pthread_cond_signal( &apiInfo->runnable_cv );\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- if ( result >= 0 ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiAlsa :: stopStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- int result = 0;\r
- AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;\r
- snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- if ( apiInfo->synchronized )\r
- result = snd_pcm_drop( handle[0] );\r
- else\r
- result = snd_pcm_drain( handle[0] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {\r
- result = snd_pcm_drop( handle[1] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- unlock:\r
- apiInfo->runnable = false; // fixes high CPU usage when stopped\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- if ( result >= 0 ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiAlsa :: abortStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- int result = 0;\r
- AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;\r
- snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- result = snd_pcm_drop( handle[0] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {\r
- result = snd_pcm_drop( handle[1] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- unlock:\r
- apiInfo->runnable = false; // fixes high CPU usage when stopped\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- if ( result >= 0 ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiAlsa :: callbackEvent()\r
-{\r
- AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- MUTEX_LOCK( &stream_.mutex );\r
- while ( !apiInfo->runnable )\r
- pthread_cond_wait( &apiInfo->runnable_cv, &stream_.mutex );\r
-\r
- if ( stream_.state != STREAM_RUNNING ) {\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- return;\r
- }\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- }\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- int doStopStream = 0;\r
- RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {\r
- status |= RTAUDIO_OUTPUT_UNDERFLOW;\r
- apiInfo->xrun[0] = false;\r
- }\r
- if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {\r
- status |= RTAUDIO_INPUT_OVERFLOW;\r
- apiInfo->xrun[1] = false;\r
- }\r
- doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],\r
- stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );\r
-\r
- if ( doStopStream == 2 ) {\r
- abortStream();\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- // The state might change while waiting on a mutex.\r
- if ( stream_.state == STREAM_STOPPED ) goto unlock;\r
-\r
- int result;\r
- char *buffer;\r
- int channels;\r
- snd_pcm_t **handle;\r
- snd_pcm_sframes_t frames;\r
- RtAudioFormat format;\r
- handle = (snd_pcm_t **) apiInfo->handles;\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
-\r
- // Setup parameters.\r
- if ( stream_.doConvertBuffer[1] ) {\r
- buffer = stream_.deviceBuffer;\r
- channels = stream_.nDeviceChannels[1];\r
- format = stream_.deviceFormat[1];\r
- }\r
- else {\r
- buffer = stream_.userBuffer[1];\r
- channels = stream_.nUserChannels[1];\r
- format = stream_.userFormat;\r
- }\r
-\r
- // Read samples from device in interleaved/non-interleaved format.\r
- if ( stream_.deviceInterleaved[1] )\r
- result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );\r
- else {\r
- void *bufs[channels];\r
- size_t offset = stream_.bufferSize * formatBytes( format );\r
- for ( int i=0; i<channels; i++ )\r
- bufs[i] = (void *) (buffer + (i * offset));\r
- result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );\r
- }\r
-\r
- if ( result < (int) stream_.bufferSize ) {\r
- // Either an error or overrun occured.\r
- if ( result == -EPIPE ) {\r
- snd_pcm_state_t state = snd_pcm_state( handle[1] );\r
- if ( state == SND_PCM_STATE_XRUN ) {\r
- apiInfo->xrun[1] = true;\r
- result = snd_pcm_prepare( handle[1] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- }\r
- }\r
- else {\r
- errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- }\r
- }\r
- else {\r
- errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- }\r
- error( RtAudioError::WARNING );\r
- goto tryOutput;\r
- }\r
-\r
- // Do byte swapping if necessary.\r
- if ( stream_.doByteSwap[1] )\r
- byteSwapBuffer( buffer, stream_.bufferSize * channels, format );\r
-\r
- // Do buffer conversion if necessary.\r
- if ( stream_.doConvertBuffer[1] )\r
- convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );\r
-\r
- // Check stream latency\r
- result = snd_pcm_delay( handle[1], &frames );\r
- if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;\r
- }\r
-\r
- tryOutput:\r
-\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- // Setup parameters and do buffer conversion if necessary.\r
- if ( stream_.doConvertBuffer[0] ) {\r
- buffer = stream_.deviceBuffer;\r
- convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
- channels = stream_.nDeviceChannels[0];\r
- format = stream_.deviceFormat[0];\r
- }\r
- else {\r
- buffer = stream_.userBuffer[0];\r
- channels = stream_.nUserChannels[0];\r
- format = stream_.userFormat;\r
- }\r
-\r
- // Do byte swapping if necessary.\r
- if ( stream_.doByteSwap[0] )\r
- byteSwapBuffer(buffer, stream_.bufferSize * channels, format);\r
-\r
- // Write samples to device in interleaved/non-interleaved format.\r
- if ( stream_.deviceInterleaved[0] )\r
- result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );\r
- else {\r
- void *bufs[channels];\r
- size_t offset = stream_.bufferSize * formatBytes( format );\r
- for ( int i=0; i<channels; i++ )\r
- bufs[i] = (void *) (buffer + (i * offset));\r
- result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );\r
- }\r
-\r
- if ( result < (int) stream_.bufferSize ) {\r
- // Either an error or underrun occured.\r
- if ( result == -EPIPE ) {\r
- snd_pcm_state_t state = snd_pcm_state( handle[0] );\r
- if ( state == SND_PCM_STATE_XRUN ) {\r
- apiInfo->xrun[0] = true;\r
- result = snd_pcm_prepare( handle[0] );\r
- if ( result < 0 ) {\r
- errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- }\r
- else\r
- errorText_ = "RtApiAlsa::callbackEvent: audio write error, underrun.";\r
- }\r
- else {\r
- errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- }\r
- }\r
- else {\r
- errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";\r
- errorText_ = errorStream_.str();\r
- }\r
- error( RtAudioError::WARNING );\r
- goto unlock;\r
- }\r
-\r
- // Check stream latency\r
- result = snd_pcm_delay( handle[0], &frames );\r
- if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;\r
- }\r
-\r
- unlock:\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- RtApi::tickStreamTime();\r
- if ( doStopStream == 1 ) this->stopStream();\r
-}\r
-\r
-static void *alsaCallbackHandler( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiAlsa *object = (RtApiAlsa *) info->object;\r
- bool *isRunning = &info->isRunning;\r
-\r
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)\r
- if ( info->doRealtime ) {\r
- pthread_t tID = pthread_self(); // ID of this thread\r
- sched_param prio = { info->priority }; // scheduling priority of thread\r
- pthread_setschedparam( tID, SCHED_RR, &prio );\r
- }\r
-#endif\r
-\r
- while ( *isRunning == true ) {\r
- pthread_testcancel();\r
- object->callbackEvent();\r
- }\r
-\r
- pthread_exit( NULL );\r
-}\r
-\r
-//******************** End of __LINUX_ALSA__ *********************//\r
-#endif\r
-\r
-#if defined(__LINUX_PULSE__)\r
-\r
-// Code written by Peter Meerwald, pmeerw@pmeerw.net\r
-// and Tristan Matthews.\r
-\r
-#include <pulse/error.h>\r
-#include <pulse/simple.h>\r
-#include <cstdio>\r
-\r
-static const unsigned int SUPPORTED_SAMPLERATES[] = { 8000, 16000, 22050, 32000,\r
- 44100, 48000, 96000, 0};\r
-\r
-struct rtaudio_pa_format_mapping_t {\r
- RtAudioFormat rtaudio_format;\r
- pa_sample_format_t pa_format;\r
-};\r
-\r
-static const rtaudio_pa_format_mapping_t supported_sampleformats[] = {\r
- {RTAUDIO_SINT16, PA_SAMPLE_S16LE},\r
- {RTAUDIO_SINT32, PA_SAMPLE_S32LE},\r
- {RTAUDIO_FLOAT32, PA_SAMPLE_FLOAT32LE},\r
- {0, PA_SAMPLE_INVALID}};\r
-\r
-struct PulseAudioHandle {\r
- pa_simple *s_play;\r
- pa_simple *s_rec;\r
- pthread_t thread;\r
- pthread_cond_t runnable_cv;\r
- bool runnable;\r
- PulseAudioHandle() : s_play(0), s_rec(0), runnable(false) { }\r
-};\r
-\r
-RtApiPulse::~RtApiPulse()\r
-{\r
- if ( stream_.state != STREAM_CLOSED )\r
- closeStream();\r
-}\r
-\r
-unsigned int RtApiPulse::getDeviceCount( void )\r
-{\r
- return 1;\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiPulse::getDeviceInfo( unsigned int /*device*/ )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = true;\r
- info.name = "PulseAudio";\r
- info.outputChannels = 2;\r
- info.inputChannels = 2;\r
- info.duplexChannels = 2;\r
- info.isDefaultOutput = true;\r
- info.isDefaultInput = true;\r
-\r
- for ( const unsigned int *sr = SUPPORTED_SAMPLERATES; *sr; ++sr )\r
- info.sampleRates.push_back( *sr );\r
-\r
- info.preferredSampleRate = 48000;\r
- info.nativeFormats = RTAUDIO_SINT16 | RTAUDIO_SINT32 | RTAUDIO_FLOAT32;\r
-\r
- return info;\r
-}\r
-\r
-static void *pulseaudio_callback( void * user )\r
-{\r
- CallbackInfo *cbi = static_cast<CallbackInfo *>( user );\r
- RtApiPulse *context = static_cast<RtApiPulse *>( cbi->object );\r
- volatile bool *isRunning = &cbi->isRunning;\r
-\r
- while ( *isRunning ) {\r
- pthread_testcancel();\r
- context->callbackEvent();\r
- }\r
-\r
- pthread_exit( NULL );\r
-}\r
-\r
-void RtApiPulse::closeStream( void )\r
-{\r
- PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );\r
-\r
- stream_.callbackInfo.isRunning = false;\r
- if ( pah ) {\r
- MUTEX_LOCK( &stream_.mutex );\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- pah->runnable = true;\r
- pthread_cond_signal( &pah->runnable_cv );\r
- }\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- pthread_join( pah->thread, 0 );\r
- if ( pah->s_play ) {\r
- pa_simple_flush( pah->s_play, NULL );\r
- pa_simple_free( pah->s_play );\r
- }\r
- if ( pah->s_rec )\r
- pa_simple_free( pah->s_rec );\r
-\r
- pthread_cond_destroy( &pah->runnable_cv );\r
- delete pah;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- if ( stream_.userBuffer[0] ) {\r
- free( stream_.userBuffer[0] );\r
- stream_.userBuffer[0] = 0;\r
- }\r
- if ( stream_.userBuffer[1] ) {\r
- free( stream_.userBuffer[1] );\r
- stream_.userBuffer[1] = 0;\r
- }\r
-\r
- stream_.state = STREAM_CLOSED;\r
- stream_.mode = UNINITIALIZED;\r
-}\r
-\r
-void RtApiPulse::callbackEvent( void )\r
-{\r
- PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );\r
-\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- MUTEX_LOCK( &stream_.mutex );\r
- while ( !pah->runnable )\r
- pthread_cond_wait( &pah->runnable_cv, &stream_.mutex );\r
-\r
- if ( stream_.state != STREAM_RUNNING ) {\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- return;\r
- }\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- }\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiPulse::callbackEvent(): the stream is closed ... "\r
- "this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- int doStopStream = callback( stream_.userBuffer[OUTPUT], stream_.userBuffer[INPUT],\r
- stream_.bufferSize, streamTime, status,\r
- stream_.callbackInfo.userData );\r
-\r
- if ( doStopStream == 2 ) {\r
- abortStream();\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
- void *pulse_in = stream_.doConvertBuffer[INPUT] ? stream_.deviceBuffer : stream_.userBuffer[INPUT];\r
- void *pulse_out = stream_.doConvertBuffer[OUTPUT] ? stream_.deviceBuffer : stream_.userBuffer[OUTPUT];\r
-\r
- if ( stream_.state != STREAM_RUNNING )\r
- goto unlock;\r
-\r
- int pa_error;\r
- size_t bytes;\r
- if (stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- if ( stream_.doConvertBuffer[OUTPUT] ) {\r
- convertBuffer( stream_.deviceBuffer,\r
- stream_.userBuffer[OUTPUT],\r
- stream_.convertInfo[OUTPUT] );\r
- bytes = stream_.nDeviceChannels[OUTPUT] * stream_.bufferSize *\r
- formatBytes( stream_.deviceFormat[OUTPUT] );\r
- } else\r
- bytes = stream_.nUserChannels[OUTPUT] * stream_.bufferSize *\r
- formatBytes( stream_.userFormat );\r
-\r
- if ( pa_simple_write( pah->s_play, pulse_out, bytes, &pa_error ) < 0 ) {\r
- errorStream_ << "RtApiPulse::callbackEvent: audio write error, " <<\r
- pa_strerror( pa_error ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- }\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX) {\r
- if ( stream_.doConvertBuffer[INPUT] )\r
- bytes = stream_.nDeviceChannels[INPUT] * stream_.bufferSize *\r
- formatBytes( stream_.deviceFormat[INPUT] );\r
- else\r
- bytes = stream_.nUserChannels[INPUT] * stream_.bufferSize *\r
- formatBytes( stream_.userFormat );\r
-\r
- if ( pa_simple_read( pah->s_rec, pulse_in, bytes, &pa_error ) < 0 ) {\r
- errorStream_ << "RtApiPulse::callbackEvent: audio read error, " <<\r
- pa_strerror( pa_error ) << ".";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- }\r
- if ( stream_.doConvertBuffer[INPUT] ) {\r
- convertBuffer( stream_.userBuffer[INPUT],\r
- stream_.deviceBuffer,\r
- stream_.convertInfo[INPUT] );\r
- }\r
- }\r
-\r
- unlock:\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- RtApi::tickStreamTime();\r
-\r
- if ( doStopStream == 1 )\r
- stopStream();\r
-}\r
-\r
-void RtApiPulse::startStream( void )\r
-{\r
- PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiPulse::startStream(): the stream is not open!";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiPulse::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- stream_.state = STREAM_RUNNING;\r
-\r
- pah->runnable = true;\r
- pthread_cond_signal( &pah->runnable_cv );\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-}\r
-\r
-void RtApiPulse::stopStream( void )\r
-{\r
- PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiPulse::stopStream(): the stream is not open!";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiPulse::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- if ( pah && pah->s_play ) {\r
- int pa_error;\r
- if ( pa_simple_drain( pah->s_play, &pa_error ) < 0 ) {\r
- errorStream_ << "RtApiPulse::stopStream: error draining output device, " <<\r
- pa_strerror( pa_error ) << ".";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-}\r
-\r
-void RtApiPulse::abortStream( void )\r
-{\r
- PulseAudioHandle *pah = static_cast<PulseAudioHandle*>( stream_.apiHandle );\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiPulse::abortStream(): the stream is not open!";\r
- error( RtAudioError::INVALID_USE );\r
- return;\r
- }\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiPulse::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- if ( pah && pah->s_play ) {\r
- int pa_error;\r
- if ( pa_simple_flush( pah->s_play, &pa_error ) < 0 ) {\r
- errorStream_ << "RtApiPulse::abortStream: error flushing output device, " <<\r
- pa_strerror( pa_error ) << ".";\r
- errorText_ = errorStream_.str();\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- error( RtAudioError::SYSTEM_ERROR );\r
- return;\r
- }\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-}\r
-\r
-bool RtApiPulse::probeDeviceOpen( unsigned int device, StreamMode mode,\r
- unsigned int channels, unsigned int firstChannel,\r
- unsigned int sampleRate, RtAudioFormat format,\r
- unsigned int *bufferSize, RtAudio::StreamOptions *options )\r
-{\r
- PulseAudioHandle *pah = 0;\r
- unsigned long bufferBytes = 0;\r
- pa_sample_spec ss;\r
-\r
- if ( device != 0 ) return false;\r
- if ( mode != INPUT && mode != OUTPUT ) return false;\r
- if ( channels != 1 && channels != 2 ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: unsupported number of channels.";\r
- return false;\r
- }\r
- ss.channels = channels;\r
-\r
- if ( firstChannel != 0 ) return false;\r
-\r
- bool sr_found = false;\r
- for ( const unsigned int *sr = SUPPORTED_SAMPLERATES; *sr; ++sr ) {\r
- if ( sampleRate == *sr ) {\r
- sr_found = true;\r
- stream_.sampleRate = sampleRate;\r
- ss.rate = sampleRate;\r
- break;\r
- }\r
- }\r
- if ( !sr_found ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: unsupported sample rate.";\r
- return false;\r
- }\r
-\r
- bool sf_found = 0;\r
- for ( const rtaudio_pa_format_mapping_t *sf = supported_sampleformats;\r
- sf->rtaudio_format && sf->pa_format != PA_SAMPLE_INVALID; ++sf ) {\r
- if ( format == sf->rtaudio_format ) {\r
- sf_found = true;\r
- stream_.userFormat = sf->rtaudio_format;\r
- stream_.deviceFormat[mode] = stream_.userFormat;\r
- ss.format = sf->pa_format;\r
- break;\r
- }\r
- }\r
- if ( !sf_found ) { // Use internal data format conversion.\r
- stream_.userFormat = format;\r
- stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;\r
- ss.format = PA_SAMPLE_FLOAT32LE;\r
- }\r
-\r
- // Set other stream parameters.\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;\r
- else stream_.userInterleaved = true;\r
- stream_.deviceInterleaved[mode] = true;\r
- stream_.nBuffers = 1;\r
- stream_.doByteSwap[mode] = false;\r
- stream_.nUserChannels[mode] = channels;\r
- stream_.nDeviceChannels[mode] = channels + firstChannel;\r
- stream_.channelOffset[mode] = 0;\r
- std::string streamName = "RtAudio";\r
-\r
- // Set flags for buffer conversion.\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate necessary internal buffers.\r
- bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
- stream_.bufferSize = *bufferSize;\r
-\r
- if ( stream_.doConvertBuffer[mode] ) {\r
-\r
- bool makeBuffer = true;\r
- bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );\r
- if ( mode == INPUT ) {\r
- if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- if ( bufferBytes <= bytesOut ) makeBuffer = false;\r
- }\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- stream_.device[mode] = device;\r
-\r
- // Setup the buffer conversion information structure.\r
- if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );\r
-\r
- if ( !stream_.apiHandle ) {\r
- PulseAudioHandle *pah = new PulseAudioHandle;\r
- if ( !pah ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error allocating memory for handle.";\r
- goto error;\r
- }\r
-\r
- stream_.apiHandle = pah;\r
- if ( pthread_cond_init( &pah->runnable_cv, NULL ) != 0 ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error creating condition variable.";\r
- goto error;\r
- }\r
- }\r
- pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );\r
-\r
- int error;\r
- if ( options && !options->streamName.empty() ) streamName = options->streamName;\r
- switch ( mode ) {\r
- case INPUT:\r
- pa_buffer_attr buffer_attr;\r
- buffer_attr.fragsize = bufferBytes;\r
- buffer_attr.maxlength = -1;\r
-\r
- pah->s_rec = pa_simple_new( NULL, streamName.c_str(), PA_STREAM_RECORD, NULL, "Record", &ss, NULL, &buffer_attr, &error );\r
- if ( !pah->s_rec ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error connecting input to PulseAudio server.";\r
- goto error;\r
- }\r
- break;\r
- case OUTPUT:\r
- pah->s_play = pa_simple_new( NULL, streamName.c_str(), PA_STREAM_PLAYBACK, NULL, "Playback", &ss, NULL, NULL, &error );\r
- if ( !pah->s_play ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error connecting output to PulseAudio server.";\r
- goto error;\r
- }\r
- break;\r
- default:\r
- goto error;\r
- }\r
-\r
- if ( stream_.mode == UNINITIALIZED )\r
- stream_.mode = mode;\r
- else if ( stream_.mode == mode )\r
- goto error;\r
- else\r
- stream_.mode = DUPLEX;\r
-\r
- if ( !stream_.callbackInfo.isRunning ) {\r
- stream_.callbackInfo.object = this;\r
- stream_.callbackInfo.isRunning = true;\r
- if ( pthread_create( &pah->thread, NULL, pulseaudio_callback, (void *)&stream_.callbackInfo) != 0 ) {\r
- errorText_ = "RtApiPulse::probeDeviceOpen: error creating thread.";\r
- goto error;\r
- }\r
- }\r
-\r
- stream_.state = STREAM_STOPPED;\r
- return true;\r
-\r
- error:\r
- if ( pah && stream_.callbackInfo.isRunning ) {\r
- pthread_cond_destroy( &pah->runnable_cv );\r
- delete pah;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- return FAILURE;\r
-}\r
-\r
-//******************** End of __LINUX_PULSE__ *********************//\r
-#endif\r
-\r
-#if defined(__LINUX_OSS__)\r
-\r
-#include <unistd.h>\r
-#include <sys/ioctl.h>\r
-#include <unistd.h>\r
-#include <fcntl.h>\r
-#include <sys/soundcard.h>\r
-#include <errno.h>\r
-#include <math.h>\r
-\r
-static void *ossCallbackHandler(void * ptr);\r
-\r
-// A structure to hold various information related to the OSS API\r
-// implementation.\r
-struct OssHandle {\r
- int id[2]; // device ids\r
- bool xrun[2];\r
- bool triggered;\r
- pthread_cond_t runnable;\r
-\r
- OssHandle()\r
- :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }\r
-};\r
-\r
-RtApiOss :: RtApiOss()\r
-{\r
- // Nothing to do here.\r
-}\r
-\r
-RtApiOss :: ~RtApiOss()\r
-{\r
- if ( stream_.state != STREAM_CLOSED ) closeStream();\r
-}\r
-\r
-unsigned int RtApiOss :: getDeviceCount( void )\r
-{\r
- int mixerfd = open( "/dev/mixer", O_RDWR, 0 );\r
- if ( mixerfd == -1 ) {\r
- errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- oss_sysinfo sysinfo;\r
- if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";\r
- error( RtAudioError::WARNING );\r
- return 0;\r
- }\r
-\r
- close( mixerfd );\r
- return sysinfo.numaudios;\r
-}\r
-\r
-RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )\r
-{\r
- RtAudio::DeviceInfo info;\r
- info.probed = false;\r
-\r
- int mixerfd = open( "/dev/mixer", O_RDWR, 0 );\r
- if ( mixerfd == -1 ) {\r
- errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- oss_sysinfo sysinfo;\r
- int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );\r
- if ( result == -1 ) {\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- unsigned nDevices = sysinfo.numaudios;\r
- if ( nDevices == 0 ) {\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::getDeviceInfo: no devices found!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";\r
- error( RtAudioError::INVALID_USE );\r
- return info;\r
- }\r
-\r
- oss_audioinfo ainfo;\r
- ainfo.dev = device;\r
- result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );\r
- close( mixerfd );\r
- if ( result == -1 ) {\r
- errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Probe channels\r
- if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;\r
- if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;\r
- if ( ainfo.caps & PCM_CAP_DUPLEX ) {\r
- if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )\r
- info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;\r
- }\r
-\r
- // Probe data formats ... do for input\r
- unsigned long mask = ainfo.iformats;\r
- if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )\r
- info.nativeFormats |= RTAUDIO_SINT16;\r
- if ( mask & AFMT_S8 )\r
- info.nativeFormats |= RTAUDIO_SINT8;\r
- if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )\r
- info.nativeFormats |= RTAUDIO_SINT32;\r
- if ( mask & AFMT_FLOAT )\r
- info.nativeFormats |= RTAUDIO_FLOAT32;\r
- if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )\r
- info.nativeFormats |= RTAUDIO_SINT24;\r
-\r
- // Check that we have at least one supported format\r
- if ( info.nativeFormats == 0 ) {\r
- errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- return info;\r
- }\r
-\r
- // Probe the supported sample rates.\r
- info.sampleRates.clear();\r
- if ( ainfo.nrates ) {\r
- for ( unsigned int i=0; i<ainfo.nrates; i++ ) {\r
- for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {\r
- if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[k] );\r
-\r
- if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )\r
- info.preferredSampleRate = SAMPLE_RATES[k];\r
-\r
- break;\r
- }\r
- }\r
- }\r
- }\r
- else {\r
- // Check min and max rate values;\r
- for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {\r
- if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] ) {\r
- info.sampleRates.push_back( SAMPLE_RATES[k] );\r
-\r
- if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )\r
- info.preferredSampleRate = SAMPLE_RATES[k];\r
- }\r
- }\r
- }\r
-\r
- if ( info.sampleRates.size() == 0 ) {\r
- errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- error( RtAudioError::WARNING );\r
- }\r
- else {\r
- info.probed = true;\r
- info.name = ainfo.name;\r
- }\r
-\r
- return info;\r
-}\r
-\r
-\r
-bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
- unsigned int firstChannel, unsigned int sampleRate,\r
- RtAudioFormat format, unsigned int *bufferSize,\r
- RtAudio::StreamOptions *options )\r
-{\r
- int mixerfd = open( "/dev/mixer", O_RDWR, 0 );\r
- if ( mixerfd == -1 ) {\r
- errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";\r
- return FAILURE;\r
- }\r
-\r
- oss_sysinfo sysinfo;\r
- int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );\r
- if ( result == -1 ) {\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";\r
- return FAILURE;\r
- }\r
-\r
- unsigned nDevices = sysinfo.numaudios;\r
- if ( nDevices == 0 ) {\r
- // This should not happen because a check is made before this function is called.\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";\r
- return FAILURE;\r
- }\r
-\r
- if ( device >= nDevices ) {\r
- // This should not happen because a check is made before this function is called.\r
- close( mixerfd );\r
- errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";\r
- return FAILURE;\r
- }\r
-\r
- oss_audioinfo ainfo;\r
- ainfo.dev = device;\r
- result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );\r
- close( mixerfd );\r
- if ( result == -1 ) {\r
- errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Check if device supports input or output\r
- if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||\r
- ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {\r
- if ( mode == OUTPUT )\r
- errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";\r
- else\r
- errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- int flags = 0;\r
- OssHandle *handle = (OssHandle *) stream_.apiHandle;\r
- if ( mode == OUTPUT )\r
- flags |= O_WRONLY;\r
- else { // mode == INPUT\r
- if (stream_.mode == OUTPUT && stream_.device[0] == device) {\r
- // We just set the same device for playback ... close and reopen for duplex (OSS only).\r
- close( handle->id[0] );\r
- handle->id[0] = 0;\r
- if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {\r
- errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- // Check that the number previously set channels is the same.\r
- if ( stream_.nUserChannels[0] != channels ) {\r
- errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- flags |= O_RDWR;\r
- }\r
- else\r
- flags |= O_RDONLY;\r
- }\r
-\r
- // Set exclusive access if specified.\r
- if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;\r
-\r
- // Try to open the device.\r
- int fd;\r
- fd = open( ainfo.devnode, flags, 0 );\r
- if ( fd == -1 ) {\r
- if ( errno == EBUSY )\r
- errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";\r
- else\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // For duplex operation, specifically set this mode (this doesn't seem to work).\r
- /*\r
- if ( flags | O_RDWR ) {\r
- result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );\r
- if ( result == -1) {\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- }\r
- */\r
-\r
- // Check the device channel support.\r
- stream_.nUserChannels[mode] = channels;\r
- if ( ainfo.max_channels < (int)(channels + firstChannel) ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Set the number of channels.\r
- int deviceChannels = channels + firstChannel;\r
- result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );\r
- if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- stream_.nDeviceChannels[mode] = deviceChannels;\r
-\r
- // Get the data format mask\r
- int mask;\r
- result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );\r
- if ( result == -1 ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Determine how to set the device format.\r
- stream_.userFormat = format;\r
- int deviceFormat = -1;\r
- stream_.doByteSwap[mode] = false;\r
- if ( format == RTAUDIO_SINT8 ) {\r
- if ( mask & AFMT_S8 ) {\r
- deviceFormat = AFMT_S8;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT8;\r
- }\r
- }\r
- else if ( format == RTAUDIO_SINT16 ) {\r
- if ( mask & AFMT_S16_NE ) {\r
- deviceFormat = AFMT_S16_NE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- }\r
- else if ( mask & AFMT_S16_OE ) {\r
- deviceFormat = AFMT_S16_OE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- stream_.doByteSwap[mode] = true;\r
- }\r
- }\r
- else if ( format == RTAUDIO_SINT24 ) {\r
- if ( mask & AFMT_S24_NE ) {\r
- deviceFormat = AFMT_S24_NE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT24;\r
- }\r
- else if ( mask & AFMT_S24_OE ) {\r
- deviceFormat = AFMT_S24_OE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT24;\r
- stream_.doByteSwap[mode] = true;\r
- }\r
- }\r
- else if ( format == RTAUDIO_SINT32 ) {\r
- if ( mask & AFMT_S32_NE ) {\r
- deviceFormat = AFMT_S32_NE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT32;\r
- }\r
- else if ( mask & AFMT_S32_OE ) {\r
- deviceFormat = AFMT_S32_OE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT32;\r
- stream_.doByteSwap[mode] = true;\r
- }\r
- }\r
-\r
- if ( deviceFormat == -1 ) {\r
- // The user requested format is not natively supported by the device.\r
- if ( mask & AFMT_S16_NE ) {\r
- deviceFormat = AFMT_S16_NE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- }\r
- else if ( mask & AFMT_S32_NE ) {\r
- deviceFormat = AFMT_S32_NE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT32;\r
- }\r
- else if ( mask & AFMT_S24_NE ) {\r
- deviceFormat = AFMT_S24_NE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT24;\r
- }\r
- else if ( mask & AFMT_S16_OE ) {\r
- deviceFormat = AFMT_S16_OE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT16;\r
- stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( mask & AFMT_S32_OE ) {\r
- deviceFormat = AFMT_S32_OE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT32;\r
- stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( mask & AFMT_S24_OE ) {\r
- deviceFormat = AFMT_S24_OE;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT24;\r
- stream_.doByteSwap[mode] = true;\r
- }\r
- else if ( mask & AFMT_S8) {\r
- deviceFormat = AFMT_S8;\r
- stream_.deviceFormat[mode] = RTAUDIO_SINT8;\r
- }\r
- }\r
-\r
- if ( stream_.deviceFormat[mode] == 0 ) {\r
- // This really shouldn't happen ...\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Set the data format.\r
- int temp = deviceFormat;\r
- result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );\r
- if ( result == -1 || deviceFormat != temp ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Attempt to set the buffer size. According to OSS, the minimum\r
- // number of buffers is two. The supposed minimum buffer size is 16\r
- // bytes, so that will be our lower bound. The argument to this\r
- // call is in the form 0xMMMMSSSS (hex), where the buffer size (in\r
- // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.\r
- // We'll check the actual value used near the end of the setup\r
- // procedure.\r
- int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;\r
- if ( ossBufferBytes < 16 ) ossBufferBytes = 16;\r
- int buffers = 0;\r
- if ( options ) buffers = options->numberOfBuffers;\r
- if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;\r
- if ( buffers < 2 ) buffers = 3;\r
- temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );\r
- result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );\r
- if ( result == -1 ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- stream_.nBuffers = buffers;\r
-\r
- // Save buffer size (in sample frames).\r
- *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );\r
- stream_.bufferSize = *bufferSize;\r
-\r
- // Set the sample rate.\r
- int srate = sampleRate;\r
- result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );\r
- if ( result == -1 ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
-\r
- // Verify the sample rate setup worked.\r
- if ( abs( srate - sampleRate ) > 100 ) {\r
- close( fd );\r
- errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";\r
- errorText_ = errorStream_.str();\r
- return FAILURE;\r
- }\r
- stream_.sampleRate = sampleRate;\r
-\r
- if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {\r
- // We're doing duplex setup here.\r
- stream_.deviceFormat[0] = stream_.deviceFormat[1];\r
- stream_.nDeviceChannels[0] = deviceChannels;\r
- }\r
-\r
- // Set interleaving parameters.\r
- stream_.userInterleaved = true;\r
- stream_.deviceInterleaved[mode] = true;\r
- if ( options && options->flags & RTAUDIO_NONINTERLEAVED )\r
- stream_.userInterleaved = false;\r
-\r
- // Set flags for buffer conversion\r
- stream_.doConvertBuffer[mode] = false;\r
- if ( stream_.userFormat != stream_.deviceFormat[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )\r
- stream_.doConvertBuffer[mode] = true;\r
- if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
- stream_.nUserChannels[mode] > 1 )\r
- stream_.doConvertBuffer[mode] = true;\r
-\r
- // Allocate the stream handles if necessary and then save.\r
- if ( stream_.apiHandle == 0 ) {\r
- try {\r
- handle = new OssHandle;\r
- }\r
- catch ( std::bad_alloc& ) {\r
- errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";\r
- goto error;\r
- }\r
-\r
- if ( pthread_cond_init( &handle->runnable, NULL ) ) {\r
- errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";\r
- goto error;\r
- }\r
-\r
- stream_.apiHandle = (void *) handle;\r
- }\r
- else {\r
- handle = (OssHandle *) stream_.apiHandle;\r
- }\r
- handle->id[mode] = fd;\r
-\r
- // Allocate necessary internal buffers.\r
- unsigned long bufferBytes;\r
- bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );\r
- stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.userBuffer[mode] == NULL ) {\r
- errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";\r
- goto error;\r
- }\r
-\r
- if ( stream_.doConvertBuffer[mode] ) {\r
-\r
- bool makeBuffer = true;\r
- bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );\r
- if ( mode == INPUT ) {\r
- if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {\r
- unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );\r
- if ( bufferBytes <= bytesOut ) makeBuffer = false;\r
- }\r
- }\r
-\r
- if ( makeBuffer ) {\r
- bufferBytes *= *bufferSize;\r
- if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );\r
- if ( stream_.deviceBuffer == NULL ) {\r
- errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";\r
- goto error;\r
- }\r
- }\r
- }\r
-\r
- stream_.device[mode] = device;\r
- stream_.state = STREAM_STOPPED;\r
-\r
- // Setup the buffer conversion information structure.\r
- if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );\r
-\r
- // Setup thread if necessary.\r
- if ( stream_.mode == OUTPUT && mode == INPUT ) {\r
- // We had already set up an output stream.\r
- stream_.mode = DUPLEX;\r
- if ( stream_.device[0] == device ) handle->id[0] = fd;\r
- }\r
- else {\r
- stream_.mode = mode;\r
-\r
- // Setup callback thread.\r
- stream_.callbackInfo.object = (void *) this;\r
-\r
- // Set the thread attributes for joinable and realtime scheduling\r
- // priority. The higher priority will only take affect if the\r
- // program is run as root or suid.\r
- pthread_attr_t attr;\r
- pthread_attr_init( &attr );\r
- pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );\r
-#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)\r
- if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {\r
- struct sched_param param;\r
- int priority = options->priority;\r
- int min = sched_get_priority_min( SCHED_RR );\r
- int max = sched_get_priority_max( SCHED_RR );\r
- if ( priority < min ) priority = min;\r
- else if ( priority > max ) priority = max;\r
- param.sched_priority = priority;\r
- pthread_attr_setschedparam( &attr, ¶m );\r
- pthread_attr_setschedpolicy( &attr, SCHED_RR );\r
- }\r
- else\r
- pthread_attr_setschedpolicy( &attr, SCHED_OTHER );\r
-#else\r
- pthread_attr_setschedpolicy( &attr, SCHED_OTHER );\r
-#endif\r
-\r
- stream_.callbackInfo.isRunning = true;\r
- result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );\r
- pthread_attr_destroy( &attr );\r
- if ( result ) {\r
- stream_.callbackInfo.isRunning = false;\r
- errorText_ = "RtApiOss::error creating callback thread!";\r
- goto error;\r
- }\r
- }\r
-\r
- return SUCCESS;\r
-\r
- error:\r
- if ( handle ) {\r
- pthread_cond_destroy( &handle->runnable );\r
- if ( handle->id[0] ) close( handle->id[0] );\r
- if ( handle->id[1] ) close( handle->id[1] );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- return FAILURE;\r
-}\r
-\r
-void RtApiOss :: closeStream()\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiOss::closeStream(): no open stream to close!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- OssHandle *handle = (OssHandle *) stream_.apiHandle;\r
- stream_.callbackInfo.isRunning = false;\r
- MUTEX_LOCK( &stream_.mutex );\r
- if ( stream_.state == STREAM_STOPPED )\r
- pthread_cond_signal( &handle->runnable );\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- pthread_join( stream_.callbackInfo.thread, NULL );\r
-\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )\r
- ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );\r
- else\r
- ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );\r
- stream_.state = STREAM_STOPPED;\r
- }\r
-\r
- if ( handle ) {\r
- pthread_cond_destroy( &handle->runnable );\r
- if ( handle->id[0] ) close( handle->id[0] );\r
- if ( handle->id[1] ) close( handle->id[1] );\r
- delete handle;\r
- stream_.apiHandle = 0;\r
- }\r
-\r
- for ( int i=0; i<2; i++ ) {\r
- if ( stream_.userBuffer[i] ) {\r
- free( stream_.userBuffer[i] );\r
- stream_.userBuffer[i] = 0;\r
- }\r
- }\r
-\r
- if ( stream_.deviceBuffer ) {\r
- free( stream_.deviceBuffer );\r
- stream_.deviceBuffer = 0;\r
- }\r
-\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
-}\r
-\r
-void RtApiOss :: startStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_RUNNING ) {\r
- errorText_ = "RtApiOss::startStream(): the stream is already running!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- stream_.state = STREAM_RUNNING;\r
-\r
- // No need to do anything else here ... OSS automatically starts\r
- // when fed samples.\r
-\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- OssHandle *handle = (OssHandle *) stream_.apiHandle;\r
- pthread_cond_signal( &handle->runnable );\r
-}\r
-\r
-void RtApiOss :: stopStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- // The state might change while waiting on a mutex.\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- return;\r
- }\r
-\r
- int result = 0;\r
- OssHandle *handle = (OssHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- // Flush the output with zeros a few times.\r
- char *buffer;\r
- int samples;\r
- RtAudioFormat format;\r
-\r
- if ( stream_.doConvertBuffer[0] ) {\r
- buffer = stream_.deviceBuffer;\r
- samples = stream_.bufferSize * stream_.nDeviceChannels[0];\r
- format = stream_.deviceFormat[0];\r
- }\r
- else {\r
- buffer = stream_.userBuffer[0];\r
- samples = stream_.bufferSize * stream_.nUserChannels[0];\r
- format = stream_.userFormat;\r
- }\r
-\r
- memset( buffer, 0, samples * formatBytes(format) );\r
- for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {\r
- result = write( handle->id[0], buffer, samples * formatBytes(format) );\r
- if ( result == -1 ) {\r
- errorText_ = "RtApiOss::stopStream: audio write error.";\r
- error( RtAudioError::WARNING );\r
- }\r
- }\r
-\r
- result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );\r
- if ( result == -1 ) {\r
- errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- handle->triggered = false;\r
- }\r
-\r
- if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {\r
- result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );\r
- if ( result == -1 ) {\r
- errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- unlock:\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- if ( result != -1 ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiOss :: abortStream()\r
-{\r
- verifyStream();\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- // The state might change while waiting on a mutex.\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- return;\r
- }\r
-\r
- int result = 0;\r
- OssHandle *handle = (OssHandle *) stream_.apiHandle;\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
- result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );\r
- if ( result == -1 ) {\r
- errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- handle->triggered = false;\r
- }\r
-\r
- if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {\r
- result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );\r
- if ( result == -1 ) {\r
- errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";\r
- errorText_ = errorStream_.str();\r
- goto unlock;\r
- }\r
- }\r
-\r
- unlock:\r
- stream_.state = STREAM_STOPPED;\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- if ( result != -1 ) return;\r
- error( RtAudioError::SYSTEM_ERROR );\r
-}\r
-\r
-void RtApiOss :: callbackEvent()\r
-{\r
- OssHandle *handle = (OssHandle *) stream_.apiHandle;\r
- if ( stream_.state == STREAM_STOPPED ) {\r
- MUTEX_LOCK( &stream_.mutex );\r
- pthread_cond_wait( &handle->runnable, &stream_.mutex );\r
- if ( stream_.state != STREAM_RUNNING ) {\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- return;\r
- }\r
- MUTEX_UNLOCK( &stream_.mutex );\r
- }\r
-\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";\r
- error( RtAudioError::WARNING );\r
- return;\r
- }\r
-\r
- // Invoke user callback to get fresh output data.\r
- int doStopStream = 0;\r
- RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;\r
- double streamTime = getStreamTime();\r
- RtAudioStreamStatus status = 0;\r
- if ( stream_.mode != INPUT && handle->xrun[0] == true ) {\r
- status |= RTAUDIO_OUTPUT_UNDERFLOW;\r
- handle->xrun[0] = false;\r
- }\r
- if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {\r
- status |= RTAUDIO_INPUT_OVERFLOW;\r
- handle->xrun[1] = false;\r
- }\r
- doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],\r
- stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );\r
- if ( doStopStream == 2 ) {\r
- this->abortStream();\r
- return;\r
- }\r
-\r
- MUTEX_LOCK( &stream_.mutex );\r
-\r
- // The state might change while waiting on a mutex.\r
- if ( stream_.state == STREAM_STOPPED ) goto unlock;\r
-\r
- int result;\r
- char *buffer;\r
- int samples;\r
- RtAudioFormat format;\r
-\r
- if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {\r
-\r
- // Setup parameters and do buffer conversion if necessary.\r
- if ( stream_.doConvertBuffer[0] ) {\r
- buffer = stream_.deviceBuffer;\r
- convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
- samples = stream_.bufferSize * stream_.nDeviceChannels[0];\r
- format = stream_.deviceFormat[0];\r
- }\r
- else {\r
- buffer = stream_.userBuffer[0];\r
- samples = stream_.bufferSize * stream_.nUserChannels[0];\r
- format = stream_.userFormat;\r
- }\r
-\r
- // Do byte swapping if necessary.\r
- if ( stream_.doByteSwap[0] )\r
- byteSwapBuffer( buffer, samples, format );\r
-\r
- if ( stream_.mode == DUPLEX && handle->triggered == false ) {\r
- int trig = 0;\r
- ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );\r
- result = write( handle->id[0], buffer, samples * formatBytes(format) );\r
- trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;\r
- ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );\r
- handle->triggered = true;\r
- }\r
- else\r
- // Write samples to device.\r
- result = write( handle->id[0], buffer, samples * formatBytes(format) );\r
-\r
- if ( result == -1 ) {\r
- // We'll assume this is an underrun, though there isn't a\r
- // specific means for determining that.\r
- handle->xrun[0] = true;\r
- errorText_ = "RtApiOss::callbackEvent: audio write error.";\r
- error( RtAudioError::WARNING );\r
- // Continue on to input section.\r
- }\r
- }\r
-\r
- if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
-\r
- // Setup parameters.\r
- if ( stream_.doConvertBuffer[1] ) {\r
- buffer = stream_.deviceBuffer;\r
- samples = stream_.bufferSize * stream_.nDeviceChannels[1];\r
- format = stream_.deviceFormat[1];\r
- }\r
- else {\r
- buffer = stream_.userBuffer[1];\r
- samples = stream_.bufferSize * stream_.nUserChannels[1];\r
- format = stream_.userFormat;\r
- }\r
-\r
- // Read samples from device.\r
- result = read( handle->id[1], buffer, samples * formatBytes(format) );\r
-\r
- if ( result == -1 ) {\r
- // We'll assume this is an overrun, though there isn't a\r
- // specific means for determining that.\r
- handle->xrun[1] = true;\r
- errorText_ = "RtApiOss::callbackEvent: audio read error.";\r
- error( RtAudioError::WARNING );\r
- goto unlock;\r
- }\r
-\r
- // Do byte swapping if necessary.\r
- if ( stream_.doByteSwap[1] )\r
- byteSwapBuffer( buffer, samples, format );\r
-\r
- // Do buffer conversion if necessary.\r
- if ( stream_.doConvertBuffer[1] )\r
- convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );\r
- }\r
-\r
- unlock:\r
- MUTEX_UNLOCK( &stream_.mutex );\r
-\r
- RtApi::tickStreamTime();\r
- if ( doStopStream == 1 ) this->stopStream();\r
-}\r
-\r
-static void *ossCallbackHandler( void *ptr )\r
-{\r
- CallbackInfo *info = (CallbackInfo *) ptr;\r
- RtApiOss *object = (RtApiOss *) info->object;\r
- bool *isRunning = &info->isRunning;\r
-\r
- while ( *isRunning == true ) {\r
- pthread_testcancel();\r
- object->callbackEvent();\r
- }\r
-\r
- pthread_exit( NULL );\r
-}\r
-\r
-//******************** End of __LINUX_OSS__ *********************//\r
-#endif\r
-\r
-\r
-// *************************************************** //\r
-//\r
-// Protected common (OS-independent) RtAudio methods.\r
-//\r
-// *************************************************** //\r
-\r
-// This method can be modified to control the behavior of error\r
-// message printing.\r
-void RtApi :: error( RtAudioError::Type type )\r
-{\r
- errorStream_.str(""); // clear the ostringstream\r
-\r
- RtAudioErrorCallback errorCallback = (RtAudioErrorCallback) stream_.callbackInfo.errorCallback;\r
- if ( errorCallback ) {\r
- // abortStream() can generate new error messages. Ignore them. Just keep original one.\r
-\r
- if ( firstErrorOccurred_ )\r
- return;\r
-\r
- firstErrorOccurred_ = true;\r
- const std::string errorMessage = errorText_;\r
-\r
- if ( type != RtAudioError::WARNING && stream_.state != STREAM_STOPPED) {\r
- stream_.callbackInfo.isRunning = false; // exit from the thread\r
- abortStream();\r
- }\r
-\r
- errorCallback( type, errorMessage );\r
- firstErrorOccurred_ = false;\r
- return;\r
- }\r
-\r
- if ( type == RtAudioError::WARNING && showWarnings_ == true )\r
- std::cerr << '\n' << errorText_ << "\n\n";\r
- else if ( type != RtAudioError::WARNING )\r
- throw( RtAudioError( errorText_, type ) );\r
-}\r
-\r
-void RtApi :: verifyStream()\r
-{\r
- if ( stream_.state == STREAM_CLOSED ) {\r
- errorText_ = "RtApi:: a stream is not open!";\r
- error( RtAudioError::INVALID_USE );\r
- }\r
-}\r
-\r
-void RtApi :: clearStreamInfo()\r
-{\r
- stream_.mode = UNINITIALIZED;\r
- stream_.state = STREAM_CLOSED;\r
- stream_.sampleRate = 0;\r
- stream_.bufferSize = 0;\r
- stream_.nBuffers = 0;\r
- stream_.userFormat = 0;\r
- stream_.userInterleaved = true;\r
- stream_.streamTime = 0.0;\r
- stream_.apiHandle = 0;\r
- stream_.deviceBuffer = 0;\r
- stream_.callbackInfo.callback = 0;\r
- stream_.callbackInfo.userData = 0;\r
- stream_.callbackInfo.isRunning = false;\r
- stream_.callbackInfo.errorCallback = 0;\r
- for ( int i=0; i<2; i++ ) {\r
- stream_.device[i] = 11111;\r
- stream_.doConvertBuffer[i] = false;\r
- stream_.deviceInterleaved[i] = true;\r
- stream_.doByteSwap[i] = false;\r
- stream_.nUserChannels[i] = 0;\r
- stream_.nDeviceChannels[i] = 0;\r
- stream_.channelOffset[i] = 0;\r
- stream_.deviceFormat[i] = 0;\r
- stream_.latency[i] = 0;\r
- stream_.userBuffer[i] = 0;\r
- stream_.convertInfo[i].channels = 0;\r
- stream_.convertInfo[i].inJump = 0;\r
- stream_.convertInfo[i].outJump = 0;\r
- stream_.convertInfo[i].inFormat = 0;\r
- stream_.convertInfo[i].outFormat = 0;\r
- stream_.convertInfo[i].inOffset.clear();\r
- stream_.convertInfo[i].outOffset.clear();\r
- }\r
-}\r
-\r
-unsigned int RtApi :: formatBytes( RtAudioFormat format )\r
-{\r
- if ( format == RTAUDIO_SINT16 )\r
- return 2;\r
- else if ( format == RTAUDIO_SINT32 || format == RTAUDIO_FLOAT32 )\r
- return 4;\r
- else if ( format == RTAUDIO_FLOAT64 )\r
- return 8;\r
- else if ( format == RTAUDIO_SINT24 )\r
- return 3;\r
- else if ( format == RTAUDIO_SINT8 )\r
- return 1;\r
-\r
- errorText_ = "RtApi::formatBytes: undefined format.";\r
- error( RtAudioError::WARNING );\r
-\r
- return 0;\r
-}\r
-\r
-void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )\r
-{\r
- if ( mode == INPUT ) { // convert device to user buffer\r
- stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];\r
- stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];\r
- stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];\r
- stream_.convertInfo[mode].outFormat = stream_.userFormat;\r
- }\r
- else { // convert user to device buffer\r
- stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];\r
- stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];\r
- stream_.convertInfo[mode].inFormat = stream_.userFormat;\r
- stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];\r
- }\r
-\r
- if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )\r
- stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;\r
- else\r
- stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;\r
-\r
- // Set up the interleave/deinterleave offsets.\r
- if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {\r
- if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||\r
- ( mode == INPUT && stream_.userInterleaved ) ) {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {\r
- stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );\r
- stream_.convertInfo[mode].outOffset.push_back( k );\r
- stream_.convertInfo[mode].inJump = 1;\r
- }\r
- }\r
- else {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {\r
- stream_.convertInfo[mode].inOffset.push_back( k );\r
- stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );\r
- stream_.convertInfo[mode].outJump = 1;\r
- }\r
- }\r
- }\r
- else { // no (de)interleaving\r
- if ( stream_.userInterleaved ) {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {\r
- stream_.convertInfo[mode].inOffset.push_back( k );\r
- stream_.convertInfo[mode].outOffset.push_back( k );\r
- }\r
- }\r
- else {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {\r
- stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );\r
- stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );\r
- stream_.convertInfo[mode].inJump = 1;\r
- stream_.convertInfo[mode].outJump = 1;\r
- }\r
- }\r
- }\r
-\r
- // Add channel offset.\r
- if ( firstChannel > 0 ) {\r
- if ( stream_.deviceInterleaved[mode] ) {\r
- if ( mode == OUTPUT ) {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )\r
- stream_.convertInfo[mode].outOffset[k] += firstChannel;\r
- }\r
- else {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )\r
- stream_.convertInfo[mode].inOffset[k] += firstChannel;\r
- }\r
- }\r
- else {\r
- if ( mode == OUTPUT ) {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )\r
- stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );\r
- }\r
- else {\r
- for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )\r
- stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );\r
- }\r
- }\r
- }\r
-}\r
-\r
-void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )\r
-{\r
- // This function does format conversion, input/output channel compensation, and\r
- // data interleaving/deinterleaving. 24-bit integers are assumed to occupy\r
- // the lower three bytes of a 32-bit integer.\r
-\r
- // Clear our device buffer when in/out duplex device channels are different\r
- if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&\r
- ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )\r
- memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );\r
-\r
- int j;\r
- if (info.outFormat == RTAUDIO_FLOAT64) {\r
- Float64 scale;\r
- Float64 *out = (Float64 *)outBuffer;\r
-\r
- if (info.inFormat == RTAUDIO_SINT8) {\r
- signed char *in = (signed char *)inBuffer;\r
- scale = 1.0 / 127.5;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT16) {\r
- Int16 *in = (Int16 *)inBuffer;\r
- scale = 1.0 / 32767.5;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT24) {\r
- Int24 *in = (Int24 *)inBuffer;\r
- scale = 1.0 / 8388607.5;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]].asInt());\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT32) {\r
- Int32 *in = (Int32 *)inBuffer;\r
- scale = 1.0 / 2147483647.5;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT32) {\r
- Float32 *in = (Float32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT64) {\r
- // Channel compensation and/or (de)interleaving only.\r
- Float64 *in = (Float64 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- }\r
- else if (info.outFormat == RTAUDIO_FLOAT32) {\r
- Float32 scale;\r
- Float32 *out = (Float32 *)outBuffer;\r
-\r
- if (info.inFormat == RTAUDIO_SINT8) {\r
- signed char *in = (signed char *)inBuffer;\r
- scale = (Float32) ( 1.0 / 127.5 );\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT16) {\r
- Int16 *in = (Int16 *)inBuffer;\r
- scale = (Float32) ( 1.0 / 32767.5 );\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT24) {\r
- Int24 *in = (Int24 *)inBuffer;\r
- scale = (Float32) ( 1.0 / 8388607.5 );\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]].asInt());\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT32) {\r
- Int32 *in = (Int32 *)inBuffer;\r
- scale = (Float32) ( 1.0 / 2147483647.5 );\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];\r
- out[info.outOffset[j]] += 0.5;\r
- out[info.outOffset[j]] *= scale;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT32) {\r
- // Channel compensation and/or (de)interleaving only.\r
- Float32 *in = (Float32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT64) {\r
- Float64 *in = (Float64 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- }\r
- else if (info.outFormat == RTAUDIO_SINT32) {\r
- Int32 *out = (Int32 *)outBuffer;\r
- if (info.inFormat == RTAUDIO_SINT8) {\r
- signed char *in = (signed char *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];\r
- out[info.outOffset[j]] <<= 24;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT16) {\r
- Int16 *in = (Int16 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];\r
- out[info.outOffset[j]] <<= 16;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT24) {\r
- Int24 *in = (Int24 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) in[info.inOffset[j]].asInt();\r
- out[info.outOffset[j]] <<= 8;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT32) {\r
- // Channel compensation and/or (de)interleaving only.\r
- Int32 *in = (Int32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT32) {\r
- Float32 *in = (Float32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT64) {\r
- Float64 *in = (Float64 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- }\r
- else if (info.outFormat == RTAUDIO_SINT24) {\r
- Int24 *out = (Int24 *)outBuffer;\r
- if (info.inFormat == RTAUDIO_SINT8) {\r
- signed char *in = (signed char *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] << 16);\r
- //out[info.outOffset[j]] <<= 16;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT16) {\r
- Int16 *in = (Int16 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] << 8);\r
- //out[info.outOffset[j]] <<= 8;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT24) {\r
- // Channel compensation and/or (de)interleaving only.\r
- Int24 *in = (Int24 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT32) {\r
- Int32 *in = (Int32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] >> 8);\r
- //out[info.outOffset[j]] >>= 8;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT32) {\r
- Float32 *in = (Float32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT64) {\r
- Float64 *in = (Float64 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- }\r
- else if (info.outFormat == RTAUDIO_SINT16) {\r
- Int16 *out = (Int16 *)outBuffer;\r
- if (info.inFormat == RTAUDIO_SINT8) {\r
- signed char *in = (signed char *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];\r
- out[info.outOffset[j]] <<= 8;\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT16) {\r
- // Channel compensation and/or (de)interleaving only.\r
- Int16 *in = (Int16 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT24) {\r
- Int24 *in = (Int24 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]].asInt() >> 8);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT32) {\r
- Int32 *in = (Int32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT32) {\r
- Float32 *in = (Float32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT64) {\r
- Float64 *in = (Float64 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- }\r
- else if (info.outFormat == RTAUDIO_SINT8) {\r
- signed char *out = (signed char *)outBuffer;\r
- if (info.inFormat == RTAUDIO_SINT8) {\r
- // Channel compensation and/or (de)interleaving only.\r
- signed char *in = (signed char *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = in[info.inOffset[j]];\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- if (info.inFormat == RTAUDIO_SINT16) {\r
- Int16 *in = (Int16 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT24) {\r
- Int24 *in = (Int24 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]].asInt() >> 16);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_SINT32) {\r
- Int32 *in = (Int32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT32) {\r
- Float32 *in = (Float32 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- else if (info.inFormat == RTAUDIO_FLOAT64) {\r
- Float64 *in = (Float64 *)inBuffer;\r
- for (unsigned int i=0; i<stream_.bufferSize; i++) {\r
- for (j=0; j<info.channels; j++) {\r
- out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);\r
- }\r
- in += info.inJump;\r
- out += info.outJump;\r
- }\r
- }\r
- }\r
-}\r
-\r
-//static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }\r
-//static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }\r
-//static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }\r
-\r
-void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )\r
-{\r
- char val;\r
- char *ptr;\r
-\r
- ptr = buffer;\r
- if ( format == RTAUDIO_SINT16 ) {\r
- for ( unsigned int i=0; i<samples; i++ ) {\r
- // Swap 1st and 2nd bytes.\r
- val = *(ptr);\r
- *(ptr) = *(ptr+1);\r
- *(ptr+1) = val;\r
-\r
- // Increment 2 bytes.\r
- ptr += 2;\r
- }\r
- }\r
- else if ( format == RTAUDIO_SINT32 ||\r
- format == RTAUDIO_FLOAT32 ) {\r
- for ( unsigned int i=0; i<samples; i++ ) {\r
- // Swap 1st and 4th bytes.\r
- val = *(ptr);\r
- *(ptr) = *(ptr+3);\r
- *(ptr+3) = val;\r
-\r
- // Swap 2nd and 3rd bytes.\r
- ptr += 1;\r
- val = *(ptr);\r
- *(ptr) = *(ptr+1);\r
- *(ptr+1) = val;\r
-\r
- // Increment 3 more bytes.\r
- ptr += 3;\r
- }\r
- }\r
- else if ( format == RTAUDIO_SINT24 ) {\r
- for ( unsigned int i=0; i<samples; i++ ) {\r
- // Swap 1st and 3rd bytes.\r
- val = *(ptr);\r
- *(ptr) = *(ptr+2);\r
- *(ptr+2) = val;\r
-\r
- // Increment 2 more bytes.\r
- ptr += 2;\r
- }\r
- }\r
- else if ( format == RTAUDIO_FLOAT64 ) {\r
- for ( unsigned int i=0; i<samples; i++ ) {\r
- // Swap 1st and 8th bytes\r
- val = *(ptr);\r
- *(ptr) = *(ptr+7);\r
- *(ptr+7) = val;\r
-\r
- // Swap 2nd and 7th bytes\r
- ptr += 1;\r
- val = *(ptr);\r
- *(ptr) = *(ptr+5);\r
- *(ptr+5) = val;\r
-\r
- // Swap 3rd and 6th bytes\r
- ptr += 1;\r
- val = *(ptr);\r
- *(ptr) = *(ptr+3);\r
- *(ptr+3) = val;\r
-\r
- // Swap 4th and 5th bytes\r
- ptr += 1;\r
- val = *(ptr);\r
- *(ptr) = *(ptr+1);\r
- *(ptr+1) = val;\r
-\r
- // Increment 5 more bytes.\r
- ptr += 5;\r
- }\r
- }\r
-}\r
-\r
- // Indentation settings for Vim and Emacs\r
- //\r
- // Local Variables:\r
- // c-basic-offset: 2\r
- // indent-tabs-mode: nil\r
- // End:\r
- //\r
- // vim: et sts=2 sw=2\r
+++ /dev/null
-/************************************************************************/
-/*! \class RtAudio
- \brief Realtime audio i/o C++ classes.
-
- RtAudio provides a common API (Application Programming Interface)
- for realtime audio input/output across Linux (native ALSA, Jack,
- and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
- (DirectSound, ASIO and WASAPI) operating systems.
-
- RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
-
- RtAudio: realtime audio i/o C++ classes
- Copyright (c) 2001-2016 Gary P. Scavone
-
- Permission is hereby granted, free of charge, to any person
- obtaining a copy of this software and associated documentation files
- (the "Software"), to deal in the Software without restriction,
- including without limitation the rights to use, copy, modify, merge,
- publish, distribute, sublicense, and/or sell copies of the Software,
- and to permit persons to whom the Software is furnished to do so,
- subject to the following conditions:
-
- The above copyright notice and this permission notice shall be
- included in all copies or substantial portions of the Software.
-
- Any person wishing to distribute modifications to the Software is
- asked to send the modifications to the original developer so that
- they can be incorporated into the canonical version. This is,
- however, not a binding provision of this license.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
- IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
- ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
- CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-/************************************************************************/
-
-/*!
- \file RtAudio.h
- */
-
-#ifndef __RTAUDIO_H
-#define __RTAUDIO_H
-
-#define RTAUDIO_VERSION "4.1.2"
-
-#include <string>
-#include <vector>
-#include <exception>
-#include <iostream>
-
-/*! \typedef typedef unsigned long RtAudioFormat;
- \brief RtAudio data format type.
-
- Support for signed integers and floats. Audio data fed to/from an
- RtAudio stream is assumed to ALWAYS be in host byte order. The
- internal routines will automatically take care of any necessary
- byte-swapping between the host format and the soundcard. Thus,
- endian-ness is not a concern in the following format definitions.
-
- - \e RTAUDIO_SINT8: 8-bit signed integer.
- - \e RTAUDIO_SINT16: 16-bit signed integer.
- - \e RTAUDIO_SINT24: 24-bit signed integer.
- - \e RTAUDIO_SINT32: 32-bit signed integer.
- - \e RTAUDIO_FLOAT32: Normalized between plus/minus 1.0.
- - \e RTAUDIO_FLOAT64: Normalized between plus/minus 1.0.
-*/
-typedef unsigned long RtAudioFormat;
-static const RtAudioFormat RTAUDIO_SINT8 = 0x1; // 8-bit signed integer.
-static const RtAudioFormat RTAUDIO_SINT16 = 0x2; // 16-bit signed integer.
-static const RtAudioFormat RTAUDIO_SINT24 = 0x4; // 24-bit signed integer.
-static const RtAudioFormat RTAUDIO_SINT32 = 0x8; // 32-bit signed integer.
-static const RtAudioFormat RTAUDIO_FLOAT32 = 0x10; // Normalized between plus/minus 1.0.
-static const RtAudioFormat RTAUDIO_FLOAT64 = 0x20; // Normalized between plus/minus 1.0.
-
-/*! \typedef typedef unsigned long RtAudioStreamFlags;
- \brief RtAudio stream option flags.
-
- The following flags can be OR'ed together to allow a client to
- make changes to the default stream behavior:
-
- - \e RTAUDIO_NONINTERLEAVED: Use non-interleaved buffers (default = interleaved).
- - \e RTAUDIO_MINIMIZE_LATENCY: Attempt to set stream parameters for lowest possible latency.
- - \e RTAUDIO_HOG_DEVICE: Attempt grab device for exclusive use.
- - \e RTAUDIO_ALSA_USE_DEFAULT: Use the "default" PCM device (ALSA only).
-
- By default, RtAudio streams pass and receive audio data from the
- client in an interleaved format. By passing the
- RTAUDIO_NONINTERLEAVED flag to the openStream() function, audio
- data will instead be presented in non-interleaved buffers. In
- this case, each buffer argument in the RtAudioCallback function
- will point to a single array of data, with \c nFrames samples for
- each channel concatenated back-to-back. For example, the first
- sample of data for the second channel would be located at index \c
- nFrames (assuming the \c buffer pointer was recast to the correct
- data type for the stream).
-
- Certain audio APIs offer a number of parameters that influence the
- I/O latency of a stream. By default, RtAudio will attempt to set
- these parameters internally for robust (glitch-free) performance
- (though some APIs, like Windows Direct Sound, make this difficult).
- By passing the RTAUDIO_MINIMIZE_LATENCY flag to the openStream()
- function, internal stream settings will be influenced in an attempt
- to minimize stream latency, though possibly at the expense of stream
- performance.
-
- If the RTAUDIO_HOG_DEVICE flag is set, RtAudio will attempt to
- open the input and/or output stream device(s) for exclusive use.
- Note that this is not possible with all supported audio APIs.
-
- If the RTAUDIO_SCHEDULE_REALTIME flag is set, RtAudio will attempt
- to select realtime scheduling (round-robin) for the callback thread.
-
- If the RTAUDIO_ALSA_USE_DEFAULT flag is set, RtAudio will attempt to
- open the "default" PCM device when using the ALSA API. Note that this
- will override any specified input or output device id.
-*/
-typedef unsigned int RtAudioStreamFlags;
-static const RtAudioStreamFlags RTAUDIO_NONINTERLEAVED = 0x1; // Use non-interleaved buffers (default = interleaved).
-static const RtAudioStreamFlags RTAUDIO_MINIMIZE_LATENCY = 0x2; // Attempt to set stream parameters for lowest possible latency.
-static const RtAudioStreamFlags RTAUDIO_HOG_DEVICE = 0x4; // Attempt grab device and prevent use by others.
-static const RtAudioStreamFlags RTAUDIO_SCHEDULE_REALTIME = 0x8; // Try to select realtime scheduling for callback thread.
-static const RtAudioStreamFlags RTAUDIO_ALSA_USE_DEFAULT = 0x10; // Use the "default" PCM device (ALSA only).
-
-/*! \typedef typedef unsigned long RtAudioStreamStatus;
- \brief RtAudio stream status (over- or underflow) flags.
-
- Notification of a stream over- or underflow is indicated by a
- non-zero stream \c status argument in the RtAudioCallback function.
- The stream status can be one of the following two options,
- depending on whether the stream is open for output and/or input:
-
- - \e RTAUDIO_INPUT_OVERFLOW: Input data was discarded because of an overflow condition at the driver.
- - \e RTAUDIO_OUTPUT_UNDERFLOW: The output buffer ran low, likely producing a break in the output sound.
-*/
-typedef unsigned int RtAudioStreamStatus;
-static const RtAudioStreamStatus RTAUDIO_INPUT_OVERFLOW = 0x1; // Input data was discarded because of an overflow condition at the driver.
-static const RtAudioStreamStatus RTAUDIO_OUTPUT_UNDERFLOW = 0x2; // The output buffer ran low, likely causing a gap in the output sound.
-
-//! RtAudio callback function prototype.
-/*!
- All RtAudio clients must create a function of type RtAudioCallback
- to read and/or write data from/to the audio stream. When the
- underlying audio system is ready for new input or output data, this
- function will be invoked.
-
- \param outputBuffer For output (or duplex) streams, the client
- should write \c nFrames of audio sample frames into this
- buffer. This argument should be recast to the datatype
- specified when the stream was opened. For input-only
- streams, this argument will be NULL.
-
- \param inputBuffer For input (or duplex) streams, this buffer will
- hold \c nFrames of input audio sample frames. This
- argument should be recast to the datatype specified when the
- stream was opened. For output-only streams, this argument
- will be NULL.
-
- \param nFrames The number of sample frames of input or output
- data in the buffers. The actual buffer size in bytes is
- dependent on the data type and number of channels in use.
-
- \param streamTime The number of seconds that have elapsed since the
- stream was started.
-
- \param status If non-zero, this argument indicates a data overflow
- or underflow condition for the stream. The particular
- condition can be determined by comparison with the
- RtAudioStreamStatus flags.
-
- \param userData A pointer to optional data provided by the client
- when opening the stream (default = NULL).
-
- To continue normal stream operation, the RtAudioCallback function
- should return a value of zero. To stop the stream and drain the
- output buffer, the function should return a value of one. To abort
- the stream immediately, the client should return a value of two.
- */
-typedef int (*RtAudioCallback)( void *outputBuffer, void *inputBuffer,
- unsigned int nFrames,
- double streamTime,
- RtAudioStreamStatus status,
- void *userData );
-
-/************************************************************************/
-/*! \class RtAudioError
- \brief Exception handling class for RtAudio.
-
- The RtAudioError class is quite simple but it does allow errors to be
- "caught" by RtAudioError::Type. See the RtAudio documentation to know
- which methods can throw an RtAudioError.
-*/
-/************************************************************************/
-
-class RtAudioError : public std::exception
-{
- public:
- //! Defined RtAudioError types.
- enum Type {
- WARNING, /*!< A non-critical error. */
- DEBUG_WARNING, /*!< A non-critical error which might be useful for debugging. */
- UNSPECIFIED, /*!< The default, unspecified error type. */
- NO_DEVICES_FOUND, /*!< No devices found on system. */
- INVALID_DEVICE, /*!< An invalid device ID was specified. */
- MEMORY_ERROR, /*!< An error occured during memory allocation. */
- INVALID_PARAMETER, /*!< An invalid parameter was specified to a function. */
- INVALID_USE, /*!< The function was called incorrectly. */
- DRIVER_ERROR, /*!< A system driver error occured. */
- SYSTEM_ERROR, /*!< A system error occured. */
- THREAD_ERROR /*!< A thread error occured. */
- };
-
- //! The constructor.
- RtAudioError( const std::string& message, Type type = RtAudioError::UNSPECIFIED ) throw() : message_(message), type_(type) {}
-
- //! The destructor.
- virtual ~RtAudioError( void ) throw() {}
-
- //! Prints thrown error message to stderr.
- virtual void printMessage( void ) const throw() { std::cerr << '\n' << message_ << "\n\n"; }
-
- //! Returns the thrown error message type.
- virtual const Type& getType(void) const throw() { return type_; }
-
- //! Returns the thrown error message string.
- virtual const std::string& getMessage(void) const throw() { return message_; }
-
- //! Returns the thrown error message as a c-style string.
- virtual const char* what( void ) const throw() { return message_.c_str(); }
-
- protected:
- std::string message_;
- Type type_;
-};
-
-//! RtAudio error callback function prototype.
-/*!
- \param type Type of error.
- \param errorText Error description.
- */
-typedef void (*RtAudioErrorCallback)( RtAudioError::Type type, const std::string &errorText );
-
-// **************************************************************** //
-//
-// RtAudio class declaration.
-//
-// RtAudio is a "controller" used to select an available audio i/o
-// interface. It presents a common API for the user to call but all
-// functionality is implemented by the class RtApi and its
-// subclasses. RtAudio creates an instance of an RtApi subclass
-// based on the user's API choice. If no choice is made, RtAudio
-// attempts to make a "logical" API selection.
-//
-// **************************************************************** //
-
-class RtApi;
-
-class RtAudio
-{
- public:
-
- //! Audio API specifier arguments.
- enum Api {
- UNSPECIFIED, /*!< Search for a working compiled API. */
- LINUX_ALSA, /*!< The Advanced Linux Sound Architecture API. */
- LINUX_PULSE, /*!< The Linux PulseAudio API. */
- LINUX_OSS, /*!< The Linux Open Sound System API. */
- UNIX_JACK, /*!< The Jack Low-Latency Audio Server API. */
- MACOSX_CORE, /*!< Macintosh OS-X Core Audio API. */
- WINDOWS_WASAPI, /*!< The Microsoft WASAPI API. */
- WINDOWS_ASIO, /*!< The Steinberg Audio Stream I/O API. */
- WINDOWS_DS, /*!< The Microsoft Direct Sound API. */
- RTAUDIO_DUMMY /*!< A compilable but non-functional API. */
- };
-
- //! The public device information structure for returning queried values.
- struct DeviceInfo {
- bool probed; /*!< true if the device capabilities were successfully probed. */
- std::string name; /*!< Character string device identifier. */
- unsigned int outputChannels; /*!< Maximum output channels supported by device. */
- unsigned int inputChannels; /*!< Maximum input channels supported by device. */
- unsigned int duplexChannels; /*!< Maximum simultaneous input/output channels supported by device. */
- bool isDefaultOutput; /*!< true if this is the default output device. */
- bool isDefaultInput; /*!< true if this is the default input device. */
- std::vector<unsigned int> sampleRates; /*!< Supported sample rates (queried from list of standard rates). */
- unsigned int preferredSampleRate; /*!< Preferred sample rate, eg. for WASAPI the system sample rate. */
- RtAudioFormat nativeFormats; /*!< Bit mask of supported data formats. */
-
- // Default constructor.
- DeviceInfo()
- :probed(false), outputChannels(0), inputChannels(0), duplexChannels(0),
- isDefaultOutput(false), isDefaultInput(false), preferredSampleRate(0), nativeFormats(0) {}
- };
-
- //! The structure for specifying input or ouput stream parameters.
- struct StreamParameters {
- unsigned int deviceId; /*!< Device index (0 to getDeviceCount() - 1). */
- unsigned int nChannels; /*!< Number of channels. */
- unsigned int firstChannel; /*!< First channel index on device (default = 0). */
-
- // Default constructor.
- StreamParameters()
- : deviceId(0), nChannels(0), firstChannel(0) {}
- };
-
- //! The structure for specifying stream options.
- /*!
- The following flags can be OR'ed together to allow a client to
- make changes to the default stream behavior:
-
- - \e RTAUDIO_NONINTERLEAVED: Use non-interleaved buffers (default = interleaved).
- - \e RTAUDIO_MINIMIZE_LATENCY: Attempt to set stream parameters for lowest possible latency.
- - \e RTAUDIO_HOG_DEVICE: Attempt grab device for exclusive use.
- - \e RTAUDIO_SCHEDULE_REALTIME: Attempt to select realtime scheduling for callback thread.
- - \e RTAUDIO_ALSA_USE_DEFAULT: Use the "default" PCM device (ALSA only).
-
- By default, RtAudio streams pass and receive audio data from the
- client in an interleaved format. By passing the
- RTAUDIO_NONINTERLEAVED flag to the openStream() function, audio
- data will instead be presented in non-interleaved buffers. In
- this case, each buffer argument in the RtAudioCallback function
- will point to a single array of data, with \c nFrames samples for
- each channel concatenated back-to-back. For example, the first
- sample of data for the second channel would be located at index \c
- nFrames (assuming the \c buffer pointer was recast to the correct
- data type for the stream).
-
- Certain audio APIs offer a number of parameters that influence the
- I/O latency of a stream. By default, RtAudio will attempt to set
- these parameters internally for robust (glitch-free) performance
- (though some APIs, like Windows Direct Sound, make this difficult).
- By passing the RTAUDIO_MINIMIZE_LATENCY flag to the openStream()
- function, internal stream settings will be influenced in an attempt
- to minimize stream latency, though possibly at the expense of stream
- performance.
-
- If the RTAUDIO_HOG_DEVICE flag is set, RtAudio will attempt to
- open the input and/or output stream device(s) for exclusive use.
- Note that this is not possible with all supported audio APIs.
-
- If the RTAUDIO_SCHEDULE_REALTIME flag is set, RtAudio will attempt
- to select realtime scheduling (round-robin) for the callback thread.
- The \c priority parameter will only be used if the RTAUDIO_SCHEDULE_REALTIME
- flag is set. It defines the thread's realtime priority.
-
- If the RTAUDIO_ALSA_USE_DEFAULT flag is set, RtAudio will attempt to
- open the "default" PCM device when using the ALSA API. Note that this
- will override any specified input or output device id.
-
- The \c numberOfBuffers parameter can be used to control stream
- latency in the Windows DirectSound, Linux OSS, and Linux Alsa APIs
- only. A value of two is usually the smallest allowed. Larger
- numbers can potentially result in more robust stream performance,
- though likely at the cost of stream latency. The value set by the
- user is replaced during execution of the RtAudio::openStream()
- function by the value actually used by the system.
-
- The \c streamName parameter can be used to set the client name
- when using the Jack API. By default, the client name is set to
- RtApiJack. However, if you wish to create multiple instances of
- RtAudio with Jack, each instance must have a unique client name.
- */
- struct StreamOptions {
- RtAudioStreamFlags flags; /*!< A bit-mask of stream flags (RTAUDIO_NONINTERLEAVED, RTAUDIO_MINIMIZE_LATENCY, RTAUDIO_HOG_DEVICE, RTAUDIO_ALSA_USE_DEFAULT). */
- unsigned int numberOfBuffers; /*!< Number of stream buffers. */
- std::string streamName; /*!< A stream name (currently used only in Jack). */
- int priority; /*!< Scheduling priority of callback thread (only used with flag RTAUDIO_SCHEDULE_REALTIME). */
-
- // Default constructor.
- StreamOptions()
- : flags(0), numberOfBuffers(0), priority(0) {}
- };
-
- //! A static function to determine the current RtAudio version.
- static std::string getVersion( void ) throw();
-
- //! A static function to determine the available compiled audio APIs.
- /*!
- The values returned in the std::vector can be compared against
- the enumerated list values. Note that there can be more than one
- API compiled for certain operating systems.
- */
- static void getCompiledApi( std::vector<RtAudio::Api> &apis ) throw();
-
- //! The class constructor.
- /*!
- The constructor performs minor initialization tasks. An exception
- can be thrown if no API support is compiled.
-
- If no API argument is specified and multiple API support has been
- compiled, the default order of use is JACK, ALSA, OSS (Linux
- systems) and ASIO, DS (Windows systems).
- */
- RtAudio( RtAudio::Api api=UNSPECIFIED );
-
- //! The destructor.
- /*!
- If a stream is running or open, it will be stopped and closed
- automatically.
- */
- ~RtAudio() throw();
-
- //! Returns the audio API specifier for the current instance of RtAudio.
- RtAudio::Api getCurrentApi( void ) throw();
-
- //! A public function that queries for the number of audio devices available.
- /*!
- This function performs a system query of available devices each time it
- is called, thus supporting devices connected \e after instantiation. If
- a system error occurs during processing, a warning will be issued.
- */
- unsigned int getDeviceCount( void ) throw();
-
- //! Return an RtAudio::DeviceInfo structure for a specified device number.
- /*!
-
- Any device integer between 0 and getDeviceCount() - 1 is valid.
- If an invalid argument is provided, an RtAudioError (type = INVALID_USE)
- will be thrown. If a device is busy or otherwise unavailable, the
- structure member "probed" will have a value of "false" and all
- other members are undefined. If the specified device is the
- current default input or output device, the corresponding
- "isDefault" member will have a value of "true".
- */
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
-
- //! A function that returns the index of the default output device.
- /*!
- If the underlying audio API does not provide a "default
- device", or if no devices are available, the return value will be
- 0. Note that this is a valid device identifier and it is the
- client's responsibility to verify that a device is available
- before attempting to open a stream.
- */
- unsigned int getDefaultOutputDevice( void ) throw();
-
- //! A function that returns the index of the default input device.
- /*!
- If the underlying audio API does not provide a "default
- device", or if no devices are available, the return value will be
- 0. Note that this is a valid device identifier and it is the
- client's responsibility to verify that a device is available
- before attempting to open a stream.
- */
- unsigned int getDefaultInputDevice( void ) throw();
-
- //! A public function for opening a stream with the specified parameters.
- /*!
- An RtAudioError (type = SYSTEM_ERROR) is thrown if a stream cannot be
- opened with the specified parameters or an error occurs during
- processing. An RtAudioError (type = INVALID_USE) is thrown if any
- invalid device ID or channel number parameters are specified.
-
- \param outputParameters Specifies output stream parameters to use
- when opening a stream, including a device ID, number of channels,
- and starting channel number. For input-only streams, this
- argument should be NULL. The device ID is an index value between
- 0 and getDeviceCount() - 1.
- \param inputParameters Specifies input stream parameters to use
- when opening a stream, including a device ID, number of channels,
- and starting channel number. For output-only streams, this
- argument should be NULL. The device ID is an index value between
- 0 and getDeviceCount() - 1.
- \param format An RtAudioFormat specifying the desired sample data format.
- \param sampleRate The desired sample rate (sample frames per second).
- \param *bufferFrames A pointer to a value indicating the desired
- internal buffer size in sample frames. The actual value
- used by the device is returned via the same pointer. A
- value of zero can be specified, in which case the lowest
- allowable value is determined.
- \param callback A client-defined function that will be invoked
- when input data is available and/or output data is needed.
- \param userData An optional pointer to data that can be accessed
- from within the callback function.
- \param options An optional pointer to a structure containing various
- global stream options, including a list of OR'ed RtAudioStreamFlags
- and a suggested number of stream buffers that can be used to
- control stream latency. More buffers typically result in more
- robust performance, though at a cost of greater latency. If a
- value of zero is specified, a system-specific median value is
- chosen. If the RTAUDIO_MINIMIZE_LATENCY flag bit is set, the
- lowest allowable value is used. The actual value used is
- returned via the structure argument. The parameter is API dependent.
- \param errorCallback A client-defined function that will be invoked
- when an error has occured.
- */
- void openStream( RtAudio::StreamParameters *outputParameters,
- RtAudio::StreamParameters *inputParameters,
- RtAudioFormat format, unsigned int sampleRate,
- unsigned int *bufferFrames, RtAudioCallback callback,
- void *userData = NULL, RtAudio::StreamOptions *options = NULL, RtAudioErrorCallback errorCallback = NULL );
-
- //! A function that closes a stream and frees any associated stream memory.
- /*!
- If a stream is not open, this function issues a warning and
- returns (no exception is thrown).
- */
- void closeStream( void ) throw();
-
- //! A function that starts a stream.
- /*!
- An RtAudioError (type = SYSTEM_ERROR) is thrown if an error occurs
- during processing. An RtAudioError (type = INVALID_USE) is thrown if a
- stream is not open. A warning is issued if the stream is already
- running.
- */
- void startStream( void );
-
- //! Stop a stream, allowing any samples remaining in the output queue to be played.
- /*!
- An RtAudioError (type = SYSTEM_ERROR) is thrown if an error occurs
- during processing. An RtAudioError (type = INVALID_USE) is thrown if a
- stream is not open. A warning is issued if the stream is already
- stopped.
- */
- void stopStream( void );
-
- //! Stop a stream, discarding any samples remaining in the input/output queue.
- /*!
- An RtAudioError (type = SYSTEM_ERROR) is thrown if an error occurs
- during processing. An RtAudioError (type = INVALID_USE) is thrown if a
- stream is not open. A warning is issued if the stream is already
- stopped.
- */
- void abortStream( void );
-
- //! Returns true if a stream is open and false if not.
- bool isStreamOpen( void ) const throw();
-
- //! Returns true if the stream is running and false if it is stopped or not open.
- bool isStreamRunning( void ) const throw();
-
- //! Returns the number of elapsed seconds since the stream was started.
- /*!
- If a stream is not open, an RtAudioError (type = INVALID_USE) will be thrown.
- */
- double getStreamTime( void );
-
- //! Set the stream time to a time in seconds greater than or equal to 0.0.
- /*!
- If a stream is not open, an RtAudioError (type = INVALID_USE) will be thrown.
- */
- void setStreamTime( double time );
-
- //! Returns the internal stream latency in sample frames.
- /*!
- The stream latency refers to delay in audio input and/or output
- caused by internal buffering by the audio system and/or hardware.
- For duplex streams, the returned value will represent the sum of
- the input and output latencies. If a stream is not open, an
- RtAudioError (type = INVALID_USE) will be thrown. If the API does not
- report latency, the return value will be zero.
- */
- long getStreamLatency( void );
-
- //! Returns actual sample rate in use by the stream.
- /*!
- On some systems, the sample rate used may be slightly different
- than that specified in the stream parameters. If a stream is not
- open, an RtAudioError (type = INVALID_USE) will be thrown.
- */
- unsigned int getStreamSampleRate( void );
-
- //! Specify whether warning messages should be printed to stderr.
- void showWarnings( bool value = true ) throw();
-
- /* --- Monocasual hack ---------------------------------------------------- */
- //protected:
- /* ------------------------------------------------------------------------ */
-
- void openRtApi( RtAudio::Api api );
- RtApi *rtapi_;
-};
-
-// Operating system dependent thread functionality.
-#if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) || defined(__WINDOWS_WASAPI__)
-
- #ifndef NOMINMAX
- #define NOMINMAX
- #endif
- #include <windows.h>
- #include <process.h>
-
- typedef uintptr_t ThreadHandle;
- typedef CRITICAL_SECTION StreamMutex;
-
-#elif defined(__LINUX_ALSA__) || defined(__LINUX_PULSE__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
- // Using pthread library for various flavors of unix.
- #include <pthread.h>
-
- typedef pthread_t ThreadHandle;
- typedef pthread_mutex_t StreamMutex;
-
-#else // Setup for "dummy" behavior
-
- #define __RTAUDIO_DUMMY__
- typedef int ThreadHandle;
- typedef int StreamMutex;
-
-#endif
-
-// This global structure type is used to pass callback information
-// between the private RtAudio stream structure and global callback
-// handling functions.
-struct CallbackInfo {
- void *object; // Used as a "this" pointer.
- ThreadHandle thread;
- void *callback;
- void *userData;
- void *errorCallback;
- void *apiInfo; // void pointer for API specific callback information
- bool isRunning;
- bool doRealtime;
- int priority;
-
- // Default constructor.
- CallbackInfo()
- :object(0), callback(0), userData(0), errorCallback(0), apiInfo(0), isRunning(false), doRealtime(false) {}
-};
-
-// **************************************************************** //
-//
-// RtApi class declaration.
-//
-// Subclasses of RtApi contain all API- and OS-specific code necessary
-// to fully implement the RtAudio API.
-//
-// Note that RtApi is an abstract base class and cannot be
-// explicitly instantiated. The class RtAudio will create an
-// instance of an RtApi subclass (RtApiOss, RtApiAlsa,
-// RtApiJack, RtApiCore, RtApiDs, or RtApiAsio).
-//
-// **************************************************************** //
-
-#pragma pack(push, 1)
-class S24 {
-
- protected:
- unsigned char c3[3];
-
- public:
- S24() {}
-
- S24& operator = ( const int& i ) {
- c3[0] = (i & 0x000000ff);
- c3[1] = (i & 0x0000ff00) >> 8;
- c3[2] = (i & 0x00ff0000) >> 16;
- return *this;
- }
-
- S24( const S24& v ) { *this = v; }
- S24( const double& d ) { *this = (int) d; }
- S24( const float& f ) { *this = (int) f; }
- S24( const signed short& s ) { *this = (int) s; }
- S24( const char& c ) { *this = (int) c; }
-
- int asInt() {
- int i = c3[0] | (c3[1] << 8) | (c3[2] << 16);
- if (i & 0x800000) i |= ~0xffffff;
- return i;
- }
-};
-#pragma pack(pop)
-
-#if defined( HAVE_GETTIMEOFDAY )
- #include <sys/time.h>
-#endif
-
-#include <sstream>
-
-class RtApi
-{
-public:
-
- /* --- Monocasual hack ---------------------------------------------------- */
- #if defined(__linux__) || defined(__FreeBSD__)
- void *__HACK__getJackClient();
- #endif
- /* ------------------------------------------------------------------------ */
-
- RtApi();
- virtual ~RtApi();
- virtual RtAudio::Api getCurrentApi( void ) = 0;
- virtual unsigned int getDeviceCount( void ) = 0;
- virtual RtAudio::DeviceInfo getDeviceInfo( unsigned int device ) = 0;
- virtual unsigned int getDefaultInputDevice( void );
- virtual unsigned int getDefaultOutputDevice( void );
- void openStream( RtAudio::StreamParameters *outputParameters,
- RtAudio::StreamParameters *inputParameters,
- RtAudioFormat format, unsigned int sampleRate,
- unsigned int *bufferFrames, RtAudioCallback callback,
- void *userData, RtAudio::StreamOptions *options,
- RtAudioErrorCallback errorCallback );
- virtual void closeStream( void );
- virtual void startStream( void ) = 0;
- virtual void stopStream( void ) = 0;
- virtual void abortStream( void ) = 0;
- long getStreamLatency( void );
- unsigned int getStreamSampleRate( void );
- virtual double getStreamTime( void );
- virtual void setStreamTime( double time );
- bool isStreamOpen( void ) const { return stream_.state != STREAM_CLOSED; }
- bool isStreamRunning( void ) const { return stream_.state == STREAM_RUNNING; }
- void showWarnings( bool value ) { showWarnings_ = value; }
-
-
-protected:
-
- static const unsigned int MAX_SAMPLE_RATES;
- static const unsigned int SAMPLE_RATES[];
-
- enum { FAILURE, SUCCESS };
-
- enum StreamState {
- STREAM_STOPPED,
- STREAM_STOPPING,
- STREAM_RUNNING,
- STREAM_CLOSED = -50
- };
-
- enum StreamMode {
- OUTPUT,
- INPUT,
- DUPLEX,
- UNINITIALIZED = -75
- };
-
- // A protected structure used for buffer conversion.
- struct ConvertInfo {
- int channels;
- int inJump, outJump;
- RtAudioFormat inFormat, outFormat;
- std::vector<int> inOffset;
- std::vector<int> outOffset;
- };
-
- // A protected structure for audio streams.
- struct RtApiStream {
- unsigned int device[2]; // Playback and record, respectively.
- void *apiHandle; // void pointer for API specific stream handle information
- StreamMode mode; // OUTPUT, INPUT, or DUPLEX.
- StreamState state; // STOPPED, RUNNING, or CLOSED
- char *userBuffer[2]; // Playback and record, respectively.
- char *deviceBuffer;
- bool doConvertBuffer[2]; // Playback and record, respectively.
- bool userInterleaved;
- bool deviceInterleaved[2]; // Playback and record, respectively.
- bool doByteSwap[2]; // Playback and record, respectively.
- unsigned int sampleRate;
- unsigned int bufferSize;
- unsigned int nBuffers;
- unsigned int nUserChannels[2]; // Playback and record, respectively.
- unsigned int nDeviceChannels[2]; // Playback and record channels, respectively.
- unsigned int channelOffset[2]; // Playback and record, respectively.
- unsigned long latency[2]; // Playback and record, respectively.
- RtAudioFormat userFormat;
- RtAudioFormat deviceFormat[2]; // Playback and record, respectively.
- StreamMutex mutex;
- CallbackInfo callbackInfo;
- ConvertInfo convertInfo[2];
- double streamTime; // Number of elapsed seconds since the stream started.
-
-#if defined(HAVE_GETTIMEOFDAY)
- struct timeval lastTickTimestamp;
-#endif
-
- RtApiStream()
- :apiHandle(0), deviceBuffer(0) { device[0] = 11111; device[1] = 11111; }
- };
-
- typedef S24 Int24;
- typedef signed short Int16;
- typedef signed int Int32;
- typedef float Float32;
- typedef double Float64;
-
- std::ostringstream errorStream_;
- std::string errorText_;
- bool showWarnings_;
- RtApiStream stream_;
- bool firstErrorOccurred_;
-
- /*!
- Protected, api-specific method that attempts to open a device
- with the given parameters. This function MUST be implemented by
- all subclasses. If an error is encountered during the probe, a
- "warning" message is reported and FAILURE is returned. A
- successful probe is indicated by a return value of SUCCESS.
- */
- virtual bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-
- //! A protected function used to increment the stream time.
- void tickStreamTime( void );
-
- //! Protected common method to clear an RtApiStream structure.
- void clearStreamInfo();
-
- /*!
- Protected common method that throws an RtAudioError (type =
- INVALID_USE) if a stream is not open.
- */
- void verifyStream( void );
-
- //! Protected common error method to allow global control over error handling.
- void error( RtAudioError::Type type );
-
- /*!
- Protected method used to perform format, channel number, and/or interleaving
- conversions between the user and device buffers.
- */
- void convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info );
-
- //! Protected common method used to perform byte-swapping on buffers.
- void byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format );
-
- //! Protected common method that returns the number of bytes for a given format.
- unsigned int formatBytes( RtAudioFormat format );
-
- //! Protected common method that sets up the parameters for buffer conversion.
- void setConvertInfo( StreamMode mode, unsigned int firstChannel );
-};
-
-// **************************************************************** //
-//
-// Inline RtAudio definitions.
-//
-// **************************************************************** //
-
-inline RtAudio::Api RtAudio :: getCurrentApi( void ) throw() { return rtapi_->getCurrentApi(); }
-inline unsigned int RtAudio :: getDeviceCount( void ) throw() { return rtapi_->getDeviceCount(); }
-inline RtAudio::DeviceInfo RtAudio :: getDeviceInfo( unsigned int device ) { return rtapi_->getDeviceInfo( device ); }
-inline unsigned int RtAudio :: getDefaultInputDevice( void ) throw() { return rtapi_->getDefaultInputDevice(); }
-inline unsigned int RtAudio :: getDefaultOutputDevice( void ) throw() { return rtapi_->getDefaultOutputDevice(); }
-inline void RtAudio :: closeStream( void ) throw() { return rtapi_->closeStream(); }
-inline void RtAudio :: startStream( void ) { return rtapi_->startStream(); }
-inline void RtAudio :: stopStream( void ) { return rtapi_->stopStream(); }
-inline void RtAudio :: abortStream( void ) { return rtapi_->abortStream(); }
-inline bool RtAudio :: isStreamOpen( void ) const throw() { return rtapi_->isStreamOpen(); }
-inline bool RtAudio :: isStreamRunning( void ) const throw() { return rtapi_->isStreamRunning(); }
-inline long RtAudio :: getStreamLatency( void ) { return rtapi_->getStreamLatency(); }
-inline unsigned int RtAudio :: getStreamSampleRate( void ) { return rtapi_->getStreamSampleRate(); }
-inline double RtAudio :: getStreamTime( void ) { return rtapi_->getStreamTime(); }
-inline void RtAudio :: setStreamTime( double time ) { return rtapi_->setStreamTime( time ); }
-inline void RtAudio :: showWarnings( bool value ) throw() { rtapi_->showWarnings( value ); }
-
-// RtApi Subclass prototypes.
-
-#if defined(__MACOSX_CORE__)
-
-#include <CoreAudio/AudioHardware.h>
-
-class RtApiCore: public RtApi
-{
-public:
-
- RtApiCore();
- ~RtApiCore();
- RtAudio::Api getCurrentApi( void ) { return RtAudio::MACOSX_CORE; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- unsigned int getDefaultOutputDevice( void );
- unsigned int getDefaultInputDevice( void );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
- long getStreamLatency( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- bool callbackEvent( AudioDeviceID deviceId,
- const AudioBufferList *inBufferList,
- const AudioBufferList *outBufferList );
-
- private:
-
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
- static const char* getErrorCode( OSStatus code );
-};
-
-#endif
-
-#if defined(__UNIX_JACK__)
-
-class RtApiJack: public RtApi
-{
-public:
-
- RtApiJack();
- ~RtApiJack();
- RtAudio::Api getCurrentApi( void ) { return RtAudio::UNIX_JACK; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
- long getStreamLatency( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- bool callbackEvent( unsigned long nframes );
-
- private:
-
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-};
-
-#endif
-
-#if defined(__WINDOWS_ASIO__)
-
-class RtApiAsio: public RtApi
-{
-public:
-
- RtApiAsio();
- ~RtApiAsio();
- RtAudio::Api getCurrentApi( void ) { return RtAudio::WINDOWS_ASIO; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
- long getStreamLatency( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- bool callbackEvent( long bufferIndex );
-
- private:
-
- std::vector<RtAudio::DeviceInfo> devices_;
- void saveDeviceInfo( void );
- bool coInitialized_;
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-};
-
-#endif
-
-#if defined(__WINDOWS_DS__)
-
-class RtApiDs: public RtApi
-{
-public:
-
- RtApiDs();
- ~RtApiDs();
- RtAudio::Api getCurrentApi( void ) { return RtAudio::WINDOWS_DS; }
- unsigned int getDeviceCount( void );
- unsigned int getDefaultOutputDevice( void );
- unsigned int getDefaultInputDevice( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
- long getStreamLatency( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- void callbackEvent( void );
-
- private:
-
- bool coInitialized_;
- bool buffersRolling;
- long duplexPrerollBytes;
- std::vector<struct DsDevice> dsDevices;
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-};
-
-#endif
-
-#if defined(__WINDOWS_WASAPI__)
-
-struct IMMDeviceEnumerator;
-
-class RtApiWasapi : public RtApi
-{
-public:
- RtApiWasapi();
- ~RtApiWasapi();
-
- RtAudio::Api getCurrentApi( void ) { return RtAudio::WINDOWS_WASAPI; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- unsigned int getDefaultOutputDevice( void );
- unsigned int getDefaultInputDevice( void );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
-
-private:
- bool coInitialized_;
- IMMDeviceEnumerator* deviceEnumerator_;
-
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int* bufferSize,
- RtAudio::StreamOptions* options );
-
- static DWORD WINAPI runWasapiThread( void* wasapiPtr );
- static DWORD WINAPI stopWasapiThread( void* wasapiPtr );
- static DWORD WINAPI abortWasapiThread( void* wasapiPtr );
- void wasapiThread();
-};
-
-#endif
-
-#if defined(__LINUX_ALSA__)
-
-class RtApiAlsa: public RtApi
-{
-public:
-
- RtApiAlsa();
- ~RtApiAlsa();
- RtAudio::Api getCurrentApi() { return RtAudio::LINUX_ALSA; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- void callbackEvent( void );
-
- private:
-
- std::vector<RtAudio::DeviceInfo> devices_;
- void saveDeviceInfo( void );
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-};
-
-#endif
-
-#if defined(__LINUX_PULSE__)
-
-class RtApiPulse: public RtApi
-{
-public:
- ~RtApiPulse();
- RtAudio::Api getCurrentApi() { return RtAudio::LINUX_PULSE; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- void callbackEvent( void );
-
- private:
-
- std::vector<RtAudio::DeviceInfo> devices_;
- void saveDeviceInfo( void );
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-};
-
-#endif
-
-#if defined(__LINUX_OSS__)
-
-class RtApiOss: public RtApi
-{
-public:
-
- RtApiOss();
- ~RtApiOss();
- RtAudio::Api getCurrentApi() { return RtAudio::LINUX_OSS; }
- unsigned int getDeviceCount( void );
- RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
- void closeStream( void );
- void startStream( void );
- void stopStream( void );
- void abortStream( void );
-
- // This function is intended for internal use only. It must be
- // public because it is called by the internal callback handler,
- // which is not a member of RtAudio. External use of this function
- // will most likely produce highly undesireable results!
- void callbackEvent( void );
-
- private:
-
- bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
- unsigned int firstChannel, unsigned int sampleRate,
- RtAudioFormat format, unsigned int *bufferSize,
- RtAudio::StreamOptions *options );
-};
-
-#endif
-
-#if defined(__RTAUDIO_DUMMY__)
-
-class RtApiDummy: public RtApi
-{
-public:
-
- RtApiDummy() { errorText_ = "RtApiDummy: This class provides no functionality."; error( RtAudioError::WARNING ); }
- RtAudio::Api getCurrentApi( void ) { return RtAudio::RTAUDIO_DUMMY; }
- unsigned int getDeviceCount( void ) { return 0; }
- RtAudio::DeviceInfo getDeviceInfo( unsigned int /*device*/ ) { RtAudio::DeviceInfo info; return info; }
- void closeStream( void ) {}
- void startStream( void ) {}
- void stopStream( void ) {}
- void abortStream( void ) {}
-
- private:
-
- bool probeDeviceOpen( unsigned int /*device*/, StreamMode /*mode*/, unsigned int /*channels*/,
- unsigned int /*firstChannel*/, unsigned int /*sampleRate*/,
- RtAudioFormat /*format*/, unsigned int * /*bufferSize*/,
- RtAudio::StreamOptions * /*options*/ ) { return false; }
-};
-
-#endif
-
-#endif
-
-// Indentation settings for Vim and Emacs
-//
-// Local Variables:
-// c-basic-offset: 2
-// indent-tabs-mode: nil
-// End:
-//
-// vim: et sts=2 sw=2
--- /dev/null
+/************************************************************************/
+/*! \class RtAudio
+ \brief Realtime audio i/o C++ classes.
+
+ RtAudio provides a common API (Application Programming Interface)
+ for realtime audio input/output across Linux (native ALSA, Jack,
+ and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
+ (DirectSound, ASIO and WASAPI) operating systems.
+
+ RtAudio GitHub site: https://github.com/thestk/rtaudio
+ RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
+
+ RtAudio: realtime audio i/o C++ classes
+ Copyright (c) 2001-2019 Gary P. Scavone
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation files
+ (the "Software"), to deal in the Software without restriction,
+ including without limitation the rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of the Software,
+ and to permit persons to whom the Software is furnished to do so,
+ subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ Any person wishing to distribute modifications to the Software is
+ asked to send the modifications to the original developer so that
+ they can be incorporated into the canonical version. This is,
+ however, not a binding provision of this license.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+/************************************************************************/
+
+// RtAudio: Version 5.1.0
+
+#include "RtAudio.h"
+#include <iostream>
+#include <cstdlib>
+#include <cstring>
+#include <climits>
+#include <cmath>
+#include <algorithm>
+
+// Static variable definitions.
+const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
+const unsigned int RtApi::SAMPLE_RATES[] = {
+ 4000, 5512, 8000, 9600, 11025, 16000, 22050,
+ 32000, 44100, 48000, 88200, 96000, 176400, 192000
+};
+
+#if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) || defined(__WINDOWS_WASAPI__)
+ #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
+ #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
+ #define MUTEX_LOCK(A) EnterCriticalSection(A)
+ #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
+
+ #include "tchar.h"
+
+ static std::string convertCharPointerToStdString(const char *text)
+ {
+ return std::string(text);
+ }
+
+ static std::string convertCharPointerToStdString(const wchar_t *text)
+ {
+ int length = WideCharToMultiByte(CP_UTF8, 0, text, -1, NULL, 0, NULL, NULL);
+ std::string s( length-1, '\0' );
+ WideCharToMultiByte(CP_UTF8, 0, text, -1, &s[0], length, NULL, NULL);
+ return s;
+ }
+
+#elif defined(__LINUX_ALSA__) || defined(__LINUX_PULSE__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
+ // pthread API
+ #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
+ #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
+ #define MUTEX_LOCK(A) pthread_mutex_lock(A)
+ #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
+#else
+ #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
+ #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
+#endif
+
+// *************************************************** //
+//
+// RtAudio definitions.
+//
+// *************************************************** //
+
+std::string RtAudio :: getVersion( void )
+{
+ return RTAUDIO_VERSION;
+}
+
+// Define API names and display names.
+// Must be in same order as API enum.
+extern "C" {
+const char* rtaudio_api_names[][2] = {
+ { "unspecified" , "Unknown" },
+ { "alsa" , "ALSA" },
+ { "pulse" , "Pulse" },
+ { "oss" , "OpenSoundSystem" },
+ { "jack" , "Jack" },
+ { "core" , "CoreAudio" },
+ { "wasapi" , "WASAPI" },
+ { "asio" , "ASIO" },
+ { "ds" , "DirectSound" },
+ { "dummy" , "Dummy" },
+};
+const unsigned int rtaudio_num_api_names =
+ sizeof(rtaudio_api_names)/sizeof(rtaudio_api_names[0]);
+
+// The order here will control the order of RtAudio's API search in
+// the constructor.
+extern "C" const RtAudio::Api rtaudio_compiled_apis[] = {
+#if defined(__UNIX_JACK__)
+ RtAudio::UNIX_JACK,
+#endif
+#if defined(__LINUX_PULSE__)
+ RtAudio::LINUX_PULSE,
+#endif
+#if defined(__LINUX_ALSA__)
+ RtAudio::LINUX_ALSA,
+#endif
+#if defined(__LINUX_OSS__)
+ RtAudio::LINUX_OSS,
+#endif
+#if defined(__WINDOWS_ASIO__)
+ RtAudio::WINDOWS_ASIO,
+#endif
+#if defined(__WINDOWS_WASAPI__)
+ RtAudio::WINDOWS_WASAPI,
+#endif
+#if defined(__WINDOWS_DS__)
+ RtAudio::WINDOWS_DS,
+#endif
+#if defined(__MACOSX_CORE__)
+ RtAudio::MACOSX_CORE,
+#endif
+#if defined(__RTAUDIO_DUMMY__)
+ RtAudio::RTAUDIO_DUMMY,
+#endif
+ RtAudio::UNSPECIFIED,
+};
+extern "C" const unsigned int rtaudio_num_compiled_apis =
+ sizeof(rtaudio_compiled_apis)/sizeof(rtaudio_compiled_apis[0])-1;
+}
+
+// This is a compile-time check that rtaudio_num_api_names == RtAudio::NUM_APIS.
+// If the build breaks here, check that they match.
+template<bool b> class StaticAssert { private: StaticAssert() {} };
+template<> class StaticAssert<true>{ public: StaticAssert() {} };
+class StaticAssertions { StaticAssertions() {
+ StaticAssert<rtaudio_num_api_names == RtAudio::NUM_APIS>();
+}};
+
+void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis )
+{
+ apis = std::vector<RtAudio::Api>(rtaudio_compiled_apis,
+ rtaudio_compiled_apis + rtaudio_num_compiled_apis);
+}
+
+std::string RtAudio :: getApiName( RtAudio::Api api )
+{
+ if (api < 0 || api >= RtAudio::NUM_APIS)
+ return "";
+ return rtaudio_api_names[api][0];
+}
+
+std::string RtAudio :: getApiDisplayName( RtAudio::Api api )
+{
+ if (api < 0 || api >= RtAudio::NUM_APIS)
+ return "Unknown";
+ return rtaudio_api_names[api][1];
+}
+
+RtAudio::Api RtAudio :: getCompiledApiByName( const std::string &name )
+{
+ unsigned int i=0;
+ for (i = 0; i < rtaudio_num_compiled_apis; ++i)
+ if (name == rtaudio_api_names[rtaudio_compiled_apis[i]][0])
+ return rtaudio_compiled_apis[i];
+ return RtAudio::UNSPECIFIED;
+}
+
+void RtAudio :: openRtApi( RtAudio::Api api )
+{
+ if ( rtapi_ )
+ delete rtapi_;
+ rtapi_ = 0;
+
+#if defined(__UNIX_JACK__)
+ if ( api == UNIX_JACK )
+ rtapi_ = new RtApiJack();
+#endif
+#if defined(__LINUX_ALSA__)
+ if ( api == LINUX_ALSA )
+ rtapi_ = new RtApiAlsa();
+#endif
+#if defined(__LINUX_PULSE__)
+ if ( api == LINUX_PULSE )
+ rtapi_ = new RtApiPulse();
+#endif
+#if defined(__LINUX_OSS__)
+ if ( api == LINUX_OSS )
+ rtapi_ = new RtApiOss();
+#endif
+#if defined(__WINDOWS_ASIO__)
+ if ( api == WINDOWS_ASIO )
+ rtapi_ = new RtApiAsio();
+#endif
+#if defined(__WINDOWS_WASAPI__)
+ if ( api == WINDOWS_WASAPI )
+ rtapi_ = new RtApiWasapi();
+#endif
+#if defined(__WINDOWS_DS__)
+ if ( api == WINDOWS_DS )
+ rtapi_ = new RtApiDs();
+#endif
+#if defined(__MACOSX_CORE__)
+ if ( api == MACOSX_CORE )
+ rtapi_ = new RtApiCore();
+#endif
+#if defined(__RTAUDIO_DUMMY__)
+ if ( api == RTAUDIO_DUMMY )
+ rtapi_ = new RtApiDummy();
+#endif
+}
+
+RtAudio :: RtAudio( RtAudio::Api api )
+{
+ rtapi_ = 0;
+
+ if ( api != UNSPECIFIED ) {
+ // Attempt to open the specified API.
+ openRtApi( api );
+ if ( rtapi_ ) return;
+
+ // No compiled support for specified API value. Issue a debug
+ // warning and continue as if no API was specified.
+ std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
+ }
+
+ // Iterate through the compiled APIs and return as soon as we find
+ // one with at least one device or we reach the end of the list.
+ std::vector< RtAudio::Api > apis;
+ getCompiledApi( apis );
+ for ( unsigned int i=0; i<apis.size(); i++ ) {
+ openRtApi( apis[i] );
+ if ( rtapi_ && rtapi_->getDeviceCount() ) break;
+ }
+
+ if ( rtapi_ ) return;
+
+ // It should not be possible to get here because the preprocessor
+ // definition __RTAUDIO_DUMMY__ is automatically defined if no
+ // API-specific definitions are passed to the compiler. But just in
+ // case something weird happens, we'll thow an error.
+ std::string errorText = "\nRtAudio: no compiled API support found ... critical error!!\n\n";
+ throw( RtAudioError( errorText, RtAudioError::UNSPECIFIED ) );
+}
+
+RtAudio :: ~RtAudio()
+{
+ if ( rtapi_ )
+ delete rtapi_;
+}
+
+void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
+ RtAudio::StreamParameters *inputParameters,
+ RtAudioFormat format, unsigned int sampleRate,
+ unsigned int *bufferFrames,
+ RtAudioCallback callback, void *userData,
+ RtAudio::StreamOptions *options,
+ RtAudioErrorCallback errorCallback )
+{
+ return rtapi_->openStream( outputParameters, inputParameters, format,
+ sampleRate, bufferFrames, callback,
+ userData, options, errorCallback );
+}
+
+// *************************************************** //
+//
+// Public RtApi definitions (see end of file for
+// private or protected utility functions).
+//
+// *************************************************** //
+
+RtApi :: RtApi()
+{
+ stream_.state = STREAM_CLOSED;
+ stream_.mode = UNINITIALIZED;
+ stream_.apiHandle = 0;
+ stream_.userBuffer[0] = 0;
+ stream_.userBuffer[1] = 0;
+ MUTEX_INITIALIZE( &stream_.mutex );
+ showWarnings_ = true;
+ firstErrorOccurred_ = false;
+}
+
+RtApi :: ~RtApi()
+{
+ MUTEX_DESTROY( &stream_.mutex );
+}
+
+void RtApi :: openStream( RtAudio::StreamParameters *oParams,
+ RtAudio::StreamParameters *iParams,
+ RtAudioFormat format, unsigned int sampleRate,
+ unsigned int *bufferFrames,
+ RtAudioCallback callback, void *userData,
+ RtAudio::StreamOptions *options,
+ RtAudioErrorCallback errorCallback )
+{
+ if ( stream_.state != STREAM_CLOSED ) {
+ errorText_ = "RtApi::openStream: a stream is already open!";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+
+ // Clear stream information potentially left from a previously open stream.
+ clearStreamInfo();
+
+ if ( oParams && oParams->nChannels < 1 ) {
+ errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+
+ if ( iParams && iParams->nChannels < 1 ) {
+ errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+
+ if ( oParams == NULL && iParams == NULL ) {
+ errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+
+ if ( formatBytes(format) == 0 ) {
+ errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+
+ unsigned int nDevices = getDeviceCount();
+ unsigned int oChannels = 0;
+ if ( oParams ) {
+ oChannels = oParams->nChannels;
+ if ( oParams->deviceId >= nDevices ) {
+ errorText_ = "RtApi::openStream: output device parameter value is invalid.";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+ }
+
+ unsigned int iChannels = 0;
+ if ( iParams ) {
+ iChannels = iParams->nChannels;
+ if ( iParams->deviceId >= nDevices ) {
+ errorText_ = "RtApi::openStream: input device parameter value is invalid.";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+ }
+
+ bool result;
+
+ if ( oChannels > 0 ) {
+
+ result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
+ sampleRate, format, bufferFrames, options );
+ if ( result == false ) {
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ }
+
+ if ( iChannels > 0 ) {
+
+ result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
+ sampleRate, format, bufferFrames, options );
+ if ( result == false ) {
+ if ( oChannels > 0 ) closeStream();
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ }
+
+ stream_.callbackInfo.callback = (void *) callback;
+ stream_.callbackInfo.userData = userData;
+ stream_.callbackInfo.errorCallback = (void *) errorCallback;
+
+ if ( options ) options->numberOfBuffers = stream_.nBuffers;
+ stream_.state = STREAM_STOPPED;
+}
+
+unsigned int RtApi :: getDefaultInputDevice( void )
+{
+ // Should be implemented in subclasses if possible.
+ return 0;
+}
+
+unsigned int RtApi :: getDefaultOutputDevice( void )
+{
+ // Should be implemented in subclasses if possible.
+ return 0;
+}
+
+void RtApi :: closeStream( void )
+{
+ // MUST be implemented in subclasses!
+ return;
+}
+
+bool RtApi :: probeDeviceOpen( unsigned int /*device*/, StreamMode /*mode*/, unsigned int /*channels*/,
+ unsigned int /*firstChannel*/, unsigned int /*sampleRate*/,
+ RtAudioFormat /*format*/, unsigned int * /*bufferSize*/,
+ RtAudio::StreamOptions * /*options*/ )
+{
+ // MUST be implemented in subclasses!
+ return FAILURE;
+}
+
+void RtApi :: tickStreamTime( void )
+{
+ // Subclasses that do not provide their own implementation of
+ // getStreamTime should call this function once per buffer I/O to
+ // provide basic stream time support.
+
+ stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
+
+#if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+#endif
+}
+
+long RtApi :: getStreamLatency( void )
+{
+ verifyStream();
+
+ long totalLatency = 0;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
+ totalLatency = stream_.latency[0];
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
+ totalLatency += stream_.latency[1];
+
+ return totalLatency;
+}
+
+double RtApi :: getStreamTime( void )
+{
+ verifyStream();
+
+#if defined( HAVE_GETTIMEOFDAY )
+ // Return a very accurate estimate of the stream time by
+ // adding in the elapsed time since the last tick.
+ struct timeval then;
+ struct timeval now;
+
+ if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
+ return stream_.streamTime;
+
+ gettimeofday( &now, NULL );
+ then = stream_.lastTickTimestamp;
+ return stream_.streamTime +
+ ((now.tv_sec + 0.000001 * now.tv_usec) -
+ (then.tv_sec + 0.000001 * then.tv_usec));
+#else
+ return stream_.streamTime;
+#endif
+}
+
+void RtApi :: setStreamTime( double time )
+{
+ verifyStream();
+
+ if ( time >= 0.0 )
+ stream_.streamTime = time;
+#if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+#endif
+}
+
+unsigned int RtApi :: getStreamSampleRate( void )
+{
+ verifyStream();
+
+ return stream_.sampleRate;
+}
+
+
+// *************************************************** //
+//
+// OS/API-specific methods.
+//
+// *************************************************** //
+
+#if defined(__MACOSX_CORE__)
+
+// The OS X CoreAudio API is designed to use a separate callback
+// procedure for each of its audio devices. A single RtAudio duplex
+// stream using two different devices is supported here, though it
+// cannot be guaranteed to always behave correctly because we cannot
+// synchronize these two callbacks.
+//
+// A property listener is installed for over/underrun information.
+// However, no functionality is currently provided to allow property
+// listeners to trigger user handlers because it is unclear what could
+// be done if a critical stream parameter (buffer size, sample rate,
+// device disconnect) notification arrived. The listeners entail
+// quite a bit of extra code and most likely, a user program wouldn't
+// be prepared for the result anyway. However, we do provide a flag
+// to the client callback function to inform of an over/underrun.
+
+// A structure to hold various information related to the CoreAudio API
+// implementation.
+struct CoreHandle {
+ AudioDeviceID id[2]; // device ids
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ AudioDeviceIOProcID procId[2];
+#endif
+ UInt32 iStream[2]; // device stream index (or first if using multiple)
+ UInt32 nStreams[2]; // number of streams to use
+ bool xrun[2];
+ char *deviceBuffer;
+ pthread_cond_t condition;
+ int drainCounter; // Tracks callback counts when draining
+ bool internalDrain; // Indicates if stop is initiated from callback or not.
+
+ CoreHandle()
+ :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
+};
+
+RtApiCore:: RtApiCore()
+{
+#if defined( AVAILABLE_MAC_OS_X_VERSION_10_6_AND_LATER )
+ // This is a largely undocumented but absolutely necessary
+ // requirement starting with OS-X 10.6. If not called, queries and
+ // updates to various audio device properties are not handled
+ // correctly.
+ CFRunLoopRef theRunLoop = NULL;
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyRunLoop,
+ kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster };
+ OSStatus result = AudioObjectSetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, sizeof(CFRunLoopRef), &theRunLoop);
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::RtApiCore: error setting run loop property!";
+ error( RtAudioError::WARNING );
+ }
+#endif
+}
+
+RtApiCore :: ~RtApiCore()
+{
+ // The subclass destructor gets called before the base class
+ // destructor, so close an existing stream before deallocating
+ // apiDeviceId memory.
+ if ( stream_.state != STREAM_CLOSED ) closeStream();
+}
+
+unsigned int RtApiCore :: getDeviceCount( void )
+{
+ // Find out how many audio devices there are, if any.
+ UInt32 dataSize;
+ AudioObjectPropertyAddress propertyAddress = { kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
+ OSStatus result = AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ return dataSize / sizeof( AudioDeviceID );
+}
+
+unsigned int RtApiCore :: getDefaultInputDevice( void )
+{
+ unsigned int nDevices = getDeviceCount();
+ if ( nDevices <= 1 ) return 0;
+
+ AudioDeviceID id;
+ UInt32 dataSize = sizeof( AudioDeviceID );
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultInputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
+ OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ dataSize *= nDevices;
+ AudioDeviceID deviceList[ nDevices ];
+ property.mSelector = kAudioHardwarePropertyDevices;
+ result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ for ( unsigned int i=0; i<nDevices; i++ )
+ if ( id == deviceList[i] ) return i;
+
+ errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
+ error( RtAudioError::WARNING );
+ return 0;
+}
+
+unsigned int RtApiCore :: getDefaultOutputDevice( void )
+{
+ unsigned int nDevices = getDeviceCount();
+ if ( nDevices <= 1 ) return 0;
+
+ AudioDeviceID id;
+ UInt32 dataSize = sizeof( AudioDeviceID );
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultOutputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
+ OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ dataSize = sizeof( AudioDeviceID ) * nDevices;
+ AudioDeviceID deviceList[ nDevices ];
+ property.mSelector = kAudioHardwarePropertyDevices;
+ result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ for ( unsigned int i=0; i<nDevices; i++ )
+ if ( id == deviceList[i] ) return i;
+
+ errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
+ error( RtAudioError::WARNING );
+ return 0;
+}
+
+RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = false;
+
+ // Get device ID
+ unsigned int nDevices = getDeviceCount();
+ if ( nDevices == 0 ) {
+ errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ if ( device >= nDevices ) {
+ errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ AudioDeviceID deviceList[ nDevices ];
+ UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
+ kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster };
+ OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,
+ 0, NULL, &dataSize, (void *) &deviceList );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ AudioDeviceID id = deviceList[ device ];
+
+ // Get the device name.
+ info.name.erase();
+ CFStringRef cfname;
+ dataSize = sizeof( CFStringRef );
+ property.mSelector = kAudioObjectPropertyManufacturer;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ //const char *mname = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );
+ int length = CFStringGetLength(cfname);
+ char *mname = (char *)malloc(length * 3 + 1);
+#if defined( UNICODE ) || defined( _UNICODE )
+ CFStringGetCString(cfname, mname, length * 3 + 1, kCFStringEncodingUTF8);
+#else
+ CFStringGetCString(cfname, mname, length * 3 + 1, CFStringGetSystemEncoding());
+#endif
+ info.name.append( (const char *)mname, strlen(mname) );
+ info.name.append( ": " );
+ CFRelease( cfname );
+ free(mname);
+
+ property.mSelector = kAudioObjectPropertyName;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ //const char *name = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );
+ length = CFStringGetLength(cfname);
+ char *name = (char *)malloc(length * 3 + 1);
+#if defined( UNICODE ) || defined( _UNICODE )
+ CFStringGetCString(cfname, name, length * 3 + 1, kCFStringEncodingUTF8);
+#else
+ CFStringGetCString(cfname, name, length * 3 + 1, CFStringGetSystemEncoding());
+#endif
+ info.name.append( (const char *)name, strlen(name) );
+ CFRelease( cfname );
+ free(name);
+
+ // Get the output stream "configuration".
+ AudioBufferList *bufferList = nil;
+ property.mSelector = kAudioDevicePropertyStreamConfiguration;
+ property.mScope = kAudioDevicePropertyScopeOutput;
+ // property.mElement = kAudioObjectPropertyElementWildcard;
+ dataSize = 0;
+ result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
+ if ( result != noErr || dataSize == 0 ) {
+ errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Allocate the AudioBufferList.
+ bufferList = (AudioBufferList *) malloc( dataSize );
+ if ( bufferList == NULL ) {
+ errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
+ if ( result != noErr || dataSize == 0 ) {
+ free( bufferList );
+ errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Get output channel information.
+ unsigned int i, nStreams = bufferList->mNumberBuffers;
+ for ( i=0; i<nStreams; i++ )
+ info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
+ free( bufferList );
+
+ // Get the input stream "configuration".
+ property.mScope = kAudioDevicePropertyScopeInput;
+ result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
+ if ( result != noErr || dataSize == 0 ) {
+ errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Allocate the AudioBufferList.
+ bufferList = (AudioBufferList *) malloc( dataSize );
+ if ( bufferList == NULL ) {
+ errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
+ if (result != noErr || dataSize == 0) {
+ free( bufferList );
+ errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Get input channel information.
+ nStreams = bufferList->mNumberBuffers;
+ for ( i=0; i<nStreams; i++ )
+ info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
+ free( bufferList );
+
+ // If device opens for both playback and capture, we determine the channels.
+ if ( info.outputChannels > 0 && info.inputChannels > 0 )
+ info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
+
+ // Probe the device sample rates.
+ bool isInput = false;
+ if ( info.outputChannels == 0 ) isInput = true;
+
+ // Determine the supported sample rates.
+ property.mSelector = kAudioDevicePropertyAvailableNominalSampleRates;
+ if ( isInput == false ) property.mScope = kAudioDevicePropertyScopeOutput;
+ result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
+ if ( result != kAudioHardwareNoError || dataSize == 0 ) {
+ errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ UInt32 nRanges = dataSize / sizeof( AudioValueRange );
+ AudioValueRange rangeList[ nRanges ];
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &rangeList );
+ if ( result != kAudioHardwareNoError ) {
+ errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // The sample rate reporting mechanism is a bit of a mystery. It
+ // seems that it can either return individual rates or a range of
+ // rates. I assume that if the min / max range values are the same,
+ // then that represents a single supported rate and if the min / max
+ // range values are different, the device supports an arbitrary
+ // range of values (though there might be multiple ranges, so we'll
+ // use the most conservative range).
+ Float64 minimumRate = 1.0, maximumRate = 10000000000.0;
+ bool haveValueRange = false;
+ info.sampleRates.clear();
+ for ( UInt32 i=0; i<nRanges; i++ ) {
+ if ( rangeList[i].mMinimum == rangeList[i].mMaximum ) {
+ unsigned int tmpSr = (unsigned int) rangeList[i].mMinimum;
+ info.sampleRates.push_back( tmpSr );
+
+ if ( !info.preferredSampleRate || ( tmpSr <= 48000 && tmpSr > info.preferredSampleRate ) )
+ info.preferredSampleRate = tmpSr;
+
+ } else {
+ haveValueRange = true;
+ if ( rangeList[i].mMinimum > minimumRate ) minimumRate = rangeList[i].mMinimum;
+ if ( rangeList[i].mMaximum < maximumRate ) maximumRate = rangeList[i].mMaximum;
+ }
+ }
+
+ if ( haveValueRange ) {
+ for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
+ if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate ) {
+ info.sampleRates.push_back( SAMPLE_RATES[k] );
+
+ if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )
+ info.preferredSampleRate = SAMPLE_RATES[k];
+ }
+ }
+ }
+
+ // Sort and remove any redundant values
+ std::sort( info.sampleRates.begin(), info.sampleRates.end() );
+ info.sampleRates.erase( unique( info.sampleRates.begin(), info.sampleRates.end() ), info.sampleRates.end() );
+
+ if ( info.sampleRates.size() == 0 ) {
+ errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // CoreAudio always uses 32-bit floating point data for PCM streams.
+ // Thus, any other "physical" formats supported by the device are of
+ // no interest to the client.
+ info.nativeFormats = RTAUDIO_FLOAT32;
+
+ if ( info.outputChannels > 0 )
+ if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;
+ if ( info.inputChannels > 0 )
+ if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;
+
+ info.probed = true;
+ return info;
+}
+
+static OSStatus callbackHandler( AudioDeviceID inDevice,
+ const AudioTimeStamp* /*inNow*/,
+ const AudioBufferList* inInputData,
+ const AudioTimeStamp* /*inInputTime*/,
+ AudioBufferList* outOutputData,
+ const AudioTimeStamp* /*inOutputTime*/,
+ void* infoPointer )
+{
+ CallbackInfo *info = (CallbackInfo *) infoPointer;
+
+ RtApiCore *object = (RtApiCore *) info->object;
+ if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
+ return kAudioHardwareUnspecifiedError;
+ else
+ return kAudioHardwareNoError;
+}
+
+static OSStatus xrunListener( AudioObjectID /*inDevice*/,
+ UInt32 nAddresses,
+ const AudioObjectPropertyAddress properties[],
+ void* handlePointer )
+{
+ CoreHandle *handle = (CoreHandle *) handlePointer;
+ for ( UInt32 i=0; i<nAddresses; i++ ) {
+ if ( properties[i].mSelector == kAudioDeviceProcessorOverload ) {
+ if ( properties[i].mScope == kAudioDevicePropertyScopeInput )
+ handle->xrun[1] = true;
+ else
+ handle->xrun[0] = true;
+ }
+ }
+
+ return kAudioHardwareNoError;
+}
+
+static OSStatus rateListener( AudioObjectID inDevice,
+ UInt32 /*nAddresses*/,
+ const AudioObjectPropertyAddress /*properties*/[],
+ void* ratePointer )
+{
+ Float64 *rate = (Float64 *) ratePointer;
+ UInt32 dataSize = sizeof( Float64 );
+ AudioObjectPropertyAddress property = { kAudioDevicePropertyNominalSampleRate,
+ kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster };
+ AudioObjectGetPropertyData( inDevice, &property, 0, NULL, &dataSize, rate );
+ return kAudioHardwareNoError;
+}
+
+bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options )
+{
+ // Get device ID
+ unsigned int nDevices = getDeviceCount();
+ if ( nDevices == 0 ) {
+ // This should not happen because a check is made before this function is called.
+ errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
+ return FAILURE;
+ }
+
+ if ( device >= nDevices ) {
+ // This should not happen because a check is made before this function is called.
+ errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
+ return FAILURE;
+ }
+
+ AudioDeviceID deviceList[ nDevices ];
+ UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
+ kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster };
+ OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,
+ 0, NULL, &dataSize, (void *) &deviceList );
+ if ( result != noErr ) {
+ errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
+ return FAILURE;
+ }
+
+ AudioDeviceID id = deviceList[ device ];
+
+ // Setup for stream mode.
+ bool isInput = false;
+ if ( mode == INPUT ) {
+ isInput = true;
+ property.mScope = kAudioDevicePropertyScopeInput;
+ }
+ else
+ property.mScope = kAudioDevicePropertyScopeOutput;
+
+ // Get the stream "configuration".
+ AudioBufferList *bufferList = nil;
+ dataSize = 0;
+ property.mSelector = kAudioDevicePropertyStreamConfiguration;
+ result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
+ if ( result != noErr || dataSize == 0 ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Allocate the AudioBufferList.
+ bufferList = (AudioBufferList *) malloc( dataSize );
+ if ( bufferList == NULL ) {
+ errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
+ return FAILURE;
+ }
+
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
+ if (result != noErr || dataSize == 0) {
+ free( bufferList );
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Search for one or more streams that contain the desired number of
+ // channels. CoreAudio devices can have an arbitrary number of
+ // streams and each stream can have an arbitrary number of channels.
+ // For each stream, a single buffer of interleaved samples is
+ // provided. RtAudio prefers the use of one stream of interleaved
+ // data or multiple consecutive single-channel streams. However, we
+ // now support multiple consecutive multi-channel streams of
+ // interleaved data as well.
+ UInt32 iStream, offsetCounter = firstChannel;
+ UInt32 nStreams = bufferList->mNumberBuffers;
+ bool monoMode = false;
+ bool foundStream = false;
+
+ // First check that the device supports the requested number of
+ // channels.
+ UInt32 deviceChannels = 0;
+ for ( iStream=0; iStream<nStreams; iStream++ )
+ deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;
+
+ if ( deviceChannels < ( channels + firstChannel ) ) {
+ free( bufferList );
+ errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Look for a single stream meeting our needs.
+ UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;
+ for ( iStream=0; iStream<nStreams; iStream++ ) {
+ streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
+ if ( streamChannels >= channels + offsetCounter ) {
+ firstStream = iStream;
+ channelOffset = offsetCounter;
+ foundStream = true;
+ break;
+ }
+ if ( streamChannels > offsetCounter ) break;
+ offsetCounter -= streamChannels;
+ }
+
+ // If we didn't find a single stream above, then we should be able
+ // to meet the channel specification with multiple streams.
+ if ( foundStream == false ) {
+ monoMode = true;
+ offsetCounter = firstChannel;
+ for ( iStream=0; iStream<nStreams; iStream++ ) {
+ streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
+ if ( streamChannels > offsetCounter ) break;
+ offsetCounter -= streamChannels;
+ }
+
+ firstStream = iStream;
+ channelOffset = offsetCounter;
+ Int32 channelCounter = channels + offsetCounter - streamChannels;
+
+ if ( streamChannels > 1 ) monoMode = false;
+ while ( channelCounter > 0 ) {
+ streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;
+ if ( streamChannels > 1 ) monoMode = false;
+ channelCounter -= streamChannels;
+ streamCount++;
+ }
+ }
+
+ free( bufferList );
+
+ // Determine the buffer size.
+ AudioValueRange bufferRange;
+ dataSize = sizeof( AudioValueRange );
+ property.mSelector = kAudioDevicePropertyBufferFrameSizeRange;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &bufferRange );
+
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
+ else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
+ if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
+
+ // Set the buffer size. For multiple streams, I'm assuming we only
+ // need to make this setting for the master channel.
+ UInt32 theSize = (UInt32) *bufferSize;
+ dataSize = sizeof( UInt32 );
+ property.mSelector = kAudioDevicePropertyBufferFrameSize;
+ result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &theSize );
+
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // If attempting to setup a duplex stream, the bufferSize parameter
+ // MUST be the same in both directions!
+ *bufferSize = theSize;
+ if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ stream_.bufferSize = *bufferSize;
+ stream_.nBuffers = 1;
+
+ // Try to set "hog" mode ... it's not clear to me this is working.
+ if ( options && options->flags & RTAUDIO_HOG_DEVICE ) {
+ pid_t hog_pid;
+ dataSize = sizeof( hog_pid );
+ property.mSelector = kAudioDevicePropertyHogMode;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &hog_pid );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting 'hog' state!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ if ( hog_pid != getpid() ) {
+ hog_pid = getpid();
+ result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &hog_pid );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+ }
+
+ // Check and if necessary, change the sample rate for the device.
+ Float64 nominalRate;
+ dataSize = sizeof( Float64 );
+ property.mSelector = kAudioDevicePropertyNominalSampleRate;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &nominalRate );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting current sample rate.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Only change the sample rate if off by more than 1 Hz.
+ if ( fabs( nominalRate - (double)sampleRate ) > 1.0 ) {
+
+ // Set a property listener for the sample rate change
+ Float64 reportedRate = 0.0;
+ AudioObjectPropertyAddress tmp = { kAudioDevicePropertyNominalSampleRate, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
+ result = AudioObjectAddPropertyListener( id, &tmp, rateListener, (void *) &reportedRate );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate property listener for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ nominalRate = (Float64) sampleRate;
+ result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &nominalRate );
+ if ( result != noErr ) {
+ AudioObjectRemovePropertyListener( id, &tmp, rateListener, (void *) &reportedRate );
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Now wait until the reported nominal rate is what we just set.
+ UInt32 microCounter = 0;
+ while ( reportedRate != nominalRate ) {
+ microCounter += 5000;
+ if ( microCounter > 5000000 ) break;
+ usleep( 5000 );
+ }
+
+ // Remove the property listener.
+ AudioObjectRemovePropertyListener( id, &tmp, rateListener, (void *) &reportedRate );
+
+ if ( microCounter > 5000000 ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: timeout waiting for sample rate update for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // Now set the stream format for all streams. Also, check the
+ // physical format of the device and change that if necessary.
+ AudioStreamBasicDescription description;
+ dataSize = sizeof( AudioStreamBasicDescription );
+ property.mSelector = kAudioStreamPropertyVirtualFormat;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &description );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Set the sample rate and data format id. However, only make the
+ // change if the sample rate is not within 1.0 of the desired
+ // rate and the format is not linear pcm.
+ bool updateFormat = false;
+ if ( fabs( description.mSampleRate - (Float64)sampleRate ) > 1.0 ) {
+ description.mSampleRate = (Float64) sampleRate;
+ updateFormat = true;
+ }
+
+ if ( description.mFormatID != kAudioFormatLinearPCM ) {
+ description.mFormatID = kAudioFormatLinearPCM;
+ updateFormat = true;
+ }
+
+ if ( updateFormat ) {
+ result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &description );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // Now check the physical format.
+ property.mSelector = kAudioStreamPropertyPhysicalFormat;
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &description );
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ //std::cout << "Current physical stream format:" << std::endl;
+ //std::cout << " mBitsPerChan = " << description.mBitsPerChannel << std::endl;
+ //std::cout << " aligned high = " << (description.mFormatFlags & kAudioFormatFlagIsAlignedHigh) << ", isPacked = " << (description.mFormatFlags & kAudioFormatFlagIsPacked) << std::endl;
+ //std::cout << " bytesPerFrame = " << description.mBytesPerFrame << std::endl;
+ //std::cout << " sample rate = " << description.mSampleRate << std::endl;
+
+ if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 16 ) {
+ description.mFormatID = kAudioFormatLinearPCM;
+ //description.mSampleRate = (Float64) sampleRate;
+ AudioStreamBasicDescription testDescription = description;
+ UInt32 formatFlags;
+
+ // We'll try higher bit rates first and then work our way down.
+ std::vector< std::pair<UInt32, UInt32> > physicalFormats;
+ formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsFloat) & ~kLinearPCMFormatFlagIsSignedInteger;
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 32, formatFlags ) );
+ formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked) & ~kLinearPCMFormatFlagIsFloat;
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 32, formatFlags ) );
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 24, formatFlags ) ); // 24-bit packed
+ formatFlags &= ~( kAudioFormatFlagIsPacked | kAudioFormatFlagIsAlignedHigh );
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 24.2, formatFlags ) ); // 24-bit in 4 bytes, aligned low
+ formatFlags |= kAudioFormatFlagIsAlignedHigh;
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 24.4, formatFlags ) ); // 24-bit in 4 bytes, aligned high
+ formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked) & ~kLinearPCMFormatFlagIsFloat;
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 16, formatFlags ) );
+ physicalFormats.push_back( std::pair<Float32, UInt32>( 8, formatFlags ) );
+
+ bool setPhysicalFormat = false;
+ for( unsigned int i=0; i<physicalFormats.size(); i++ ) {
+ testDescription = description;
+ testDescription.mBitsPerChannel = (UInt32) physicalFormats[i].first;
+ testDescription.mFormatFlags = physicalFormats[i].second;
+ if ( (24 == (UInt32)physicalFormats[i].first) && ~( physicalFormats[i].second & kAudioFormatFlagIsPacked ) )
+ testDescription.mBytesPerFrame = 4 * testDescription.mChannelsPerFrame;
+ else
+ testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
+ testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
+ result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &testDescription );
+ if ( result == noErr ) {
+ setPhysicalFormat = true;
+ //std::cout << "Updated physical stream format:" << std::endl;
+ //std::cout << " mBitsPerChan = " << testDescription.mBitsPerChannel << std::endl;
+ //std::cout << " aligned high = " << (testDescription.mFormatFlags & kAudioFormatFlagIsAlignedHigh) << ", isPacked = " << (testDescription.mFormatFlags & kAudioFormatFlagIsPacked) << std::endl;
+ //std::cout << " bytesPerFrame = " << testDescription.mBytesPerFrame << std::endl;
+ //std::cout << " sample rate = " << testDescription.mSampleRate << std::endl;
+ break;
+ }
+ }
+
+ if ( !setPhysicalFormat ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ } // done setting virtual/physical formats.
+
+ // Get the stream / device latency.
+ UInt32 latency;
+ dataSize = sizeof( UInt32 );
+ property.mSelector = kAudioDevicePropertyLatency;
+ if ( AudioObjectHasProperty( id, &property ) == true ) {
+ result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &latency );
+ if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
+ else {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ }
+ }
+
+ // Byte-swapping: According to AudioHardware.h, the stream data will
+ // always be presented in native-endian format, so we should never
+ // need to byte swap.
+ stream_.doByteSwap[mode] = false;
+
+ // From the CoreAudio documentation, PCM data must be supplied as
+ // 32-bit floats.
+ stream_.userFormat = format;
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
+
+ if ( streamCount == 1 )
+ stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
+ else // multiple streams
+ stream_.nDeviceChannels[mode] = channels;
+ stream_.nUserChannels[mode] = channels;
+ stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
+ else stream_.userInterleaved = true;
+ stream_.deviceInterleaved[mode] = true;
+ if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;
+
+ // Set flags for buffer conversion.
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( streamCount == 1 ) {
+ if ( stream_.nUserChannels[mode] > 1 &&
+ stream_.userInterleaved != stream_.deviceInterleaved[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ }
+ else if ( monoMode && stream_.userInterleaved )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate our CoreHandle structure for the stream.
+ CoreHandle *handle = 0;
+ if ( stream_.apiHandle == 0 ) {
+ try {
+ handle = new CoreHandle;
+ }
+ catch ( std::bad_alloc& ) {
+ errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
+ goto error;
+ }
+
+ if ( pthread_cond_init( &handle->condition, NULL ) ) {
+ errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
+ goto error;
+ }
+ stream_.apiHandle = (void *) handle;
+ }
+ else
+ handle = (CoreHandle *) stream_.apiHandle;
+ handle->iStream[mode] = firstStream;
+ handle->nStreams[mode] = streamCount;
+ handle->id[mode] = id;
+
+ // Allocate necessary internal buffers.
+ unsigned long bufferBytes;
+ bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ // stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ stream_.userBuffer[mode] = (char *) malloc( bufferBytes * sizeof(char) );
+ memset( stream_.userBuffer[mode], 0, bufferBytes * sizeof(char) );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+
+ // If possible, we will make use of the CoreAudio stream buffers as
+ // "device buffers". However, we can't do this if using multiple
+ // streams.
+ if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {
+
+ bool makeBuffer = true;
+ bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
+ if ( mode == INPUT ) {
+ if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ if ( bufferBytes <= bytesOut ) makeBuffer = false;
+ }
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ stream_.sampleRate = sampleRate;
+ stream_.device[mode] = device;
+ stream_.state = STREAM_STOPPED;
+ stream_.callbackInfo.object = (void *) this;
+
+ // Setup the buffer conversion information structure.
+ if ( stream_.doConvertBuffer[mode] ) {
+ if ( streamCount > 1 ) setConvertInfo( mode, 0 );
+ else setConvertInfo( mode, channelOffset );
+ }
+
+ if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
+ // Only one callback procedure per device.
+ stream_.mode = DUPLEX;
+ else {
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );
+#else
+ // deprecated in favor of AudioDeviceCreateIOProcID()
+ result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
+#endif
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+ if ( stream_.mode == OUTPUT && mode == INPUT )
+ stream_.mode = DUPLEX;
+ else
+ stream_.mode = mode;
+ }
+
+ // Setup the device property listener for over/underload.
+ property.mSelector = kAudioDeviceProcessorOverload;
+ property.mScope = kAudioObjectPropertyScopeGlobal;
+ result = AudioObjectAddPropertyListener( id, &property, xrunListener, (void *) handle );
+
+ return SUCCESS;
+
+ error:
+ if ( handle ) {
+ pthread_cond_destroy( &handle->condition );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.state = STREAM_CLOSED;
+ return FAILURE;
+}
+
+void RtApiCore :: closeStream( void )
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiCore::closeStream(): no open stream to close!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ if (handle) {
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
+ kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster };
+
+ property.mSelector = kAudioDeviceProcessorOverload;
+ property.mScope = kAudioObjectPropertyScopeGlobal;
+ if (AudioObjectRemovePropertyListener( handle->id[0], &property, xrunListener, (void *) handle ) != noErr) {
+ errorText_ = "RtApiCore::closeStream(): error removing property listener!";
+ error( RtAudioError::WARNING );
+ }
+
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ if ( stream_.state == STREAM_RUNNING )
+ AudioDeviceStop( handle->id[0], handle->procId[0] );
+ AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );
+#else // deprecated behaviour
+ if ( stream_.state == STREAM_RUNNING )
+ AudioDeviceStop( handle->id[0], callbackHandler );
+ AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
+#endif
+ }
+ }
+
+ if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
+ if (handle) {
+ AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
+ kAudioObjectPropertyScopeGlobal,
+ kAudioObjectPropertyElementMaster };
+
+ property.mSelector = kAudioDeviceProcessorOverload;
+ property.mScope = kAudioObjectPropertyScopeGlobal;
+ if (AudioObjectRemovePropertyListener( handle->id[1], &property, xrunListener, (void *) handle ) != noErr) {
+ errorText_ = "RtApiCore::closeStream(): error removing property listener!";
+ error( RtAudioError::WARNING );
+ }
+
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ if ( stream_.state == STREAM_RUNNING )
+ AudioDeviceStop( handle->id[1], handle->procId[1] );
+ AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );
+#else // deprecated behaviour
+ if ( stream_.state == STREAM_RUNNING )
+ AudioDeviceStop( handle->id[1], callbackHandler );
+ AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
+#endif
+ }
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ // Destroy pthread condition variable.
+ pthread_cond_destroy( &handle->condition );
+ delete handle;
+ stream_.apiHandle = 0;
+
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+}
+
+void RtApiCore :: startStream( void )
+{
+ verifyStream();
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiCore::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+#if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+#endif
+
+ OSStatus result = noErr;
+ CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ result = AudioDeviceStart( handle->id[0], handle->procId[0] );
+#else // deprecated behaviour
+ result = AudioDeviceStart( handle->id[0], callbackHandler );
+#endif
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ if ( stream_.mode == INPUT ||
+ ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
+
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ result = AudioDeviceStart( handle->id[1], handle->procId[1] );
+#else // deprecated behaviour
+ result = AudioDeviceStart( handle->id[1], callbackHandler );
+#endif
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ handle->drainCounter = 0;
+ handle->internalDrain = false;
+ stream_.state = STREAM_RUNNING;
+
+ unlock:
+ if ( result == noErr ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiCore :: stopStream( void )
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ OSStatus result = noErr;
+ CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ if ( handle->drainCounter == 0 ) {
+ handle->drainCounter = 2;
+ pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
+ }
+
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ result = AudioDeviceStop( handle->id[0], handle->procId[0] );
+#else // deprecated behaviour
+ result = AudioDeviceStop( handle->id[0], callbackHandler );
+#endif
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
+
+#if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
+ result = AudioDeviceStop( handle->id[0], handle->procId[1] );
+#else // deprecated behaviour
+ result = AudioDeviceStop( handle->id[1], callbackHandler );
+#endif
+ if ( result != noErr ) {
+ errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ stream_.state = STREAM_STOPPED;
+
+ unlock:
+ if ( result == noErr ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiCore :: abortStream( void )
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
+ handle->drainCounter = 2;
+
+ stopStream();
+}
+
+// This function will be called by a spawned thread when the user
+// callback function signals that the stream should be stopped or
+// aborted. It is better to handle it this way because the
+// callbackEvent() function probably should return before the AudioDeviceStop()
+// function is called.
+static void *coreStopStream( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiCore *object = (RtApiCore *) info->object;
+
+ object->stopStream();
+ pthread_exit( NULL );
+}
+
+bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
+ const AudioBufferList *inBufferList,
+ const AudioBufferList *outBufferList )
+{
+ if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) return SUCCESS;
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return FAILURE;
+ }
+
+ CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
+ CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
+
+ // Check if we were draining the stream and signal is finished.
+ if ( handle->drainCounter > 3 ) {
+ ThreadHandle threadId;
+
+ stream_.state = STREAM_STOPPING;
+ if ( handle->internalDrain == true )
+ pthread_create( &threadId, NULL, coreStopStream, info );
+ else // external call to stopStream()
+ pthread_cond_signal( &handle->condition );
+ return SUCCESS;
+ }
+
+ AudioDeviceID outputDevice = handle->id[0];
+
+ // Invoke user callback to get fresh output data UNLESS we are
+ // draining stream or duplex mode AND the input/output devices are
+ // different AND this function is called for the input device.
+ if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
+ RtAudioCallback callback = (RtAudioCallback) info->callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
+ status |= RTAUDIO_OUTPUT_UNDERFLOW;
+ handle->xrun[0] = false;
+ }
+ if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
+ status |= RTAUDIO_INPUT_OVERFLOW;
+ handle->xrun[1] = false;
+ }
+
+ int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],
+ stream_.bufferSize, streamTime, status, info->userData );
+ if ( cbReturnValue == 2 ) {
+ stream_.state = STREAM_STOPPING;
+ handle->drainCounter = 2;
+ abortStream();
+ return SUCCESS;
+ }
+ else if ( cbReturnValue == 1 ) {
+ handle->drainCounter = 1;
+ handle->internalDrain = true;
+ }
+ }
+
+ if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
+
+ if ( handle->drainCounter > 1 ) { // write zeros to the output stream
+
+ if ( handle->nStreams[0] == 1 ) {
+ memset( outBufferList->mBuffers[handle->iStream[0]].mData,
+ 0,
+ outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
+ }
+ else { // fill multiple streams with zeros
+ for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
+ memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
+ 0,
+ outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
+ }
+ }
+ }
+ else if ( handle->nStreams[0] == 1 ) {
+ if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer
+ convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,
+ stream_.userBuffer[0], stream_.convertInfo[0] );
+ }
+ else { // copy from user buffer
+ memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
+ stream_.userBuffer[0],
+ outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
+ }
+ }
+ else { // fill multiple streams
+ Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];
+ if ( stream_.doConvertBuffer[0] ) {
+ convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
+ inBuffer = (Float32 *) stream_.deviceBuffer;
+ }
+
+ if ( stream_.deviceInterleaved[0] == false ) { // mono mode
+ UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
+ for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
+ memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
+ (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );
+ }
+ }
+ else { // fill multiple multi-channel streams with interleaved data
+ UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;
+ Float32 *out, *in;
+
+ bool inInterleaved = ( stream_.userInterleaved ) ? true : false;
+ UInt32 inChannels = stream_.nUserChannels[0];
+ if ( stream_.doConvertBuffer[0] ) {
+ inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
+ inChannels = stream_.nDeviceChannels[0];
+ }
+
+ if ( inInterleaved ) inOffset = 1;
+ else inOffset = stream_.bufferSize;
+
+ channelsLeft = inChannels;
+ for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
+ in = inBuffer;
+ out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;
+ streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;
+
+ outJump = 0;
+ // Account for possible channel offset in first stream
+ if ( i == 0 && stream_.channelOffset[0] > 0 ) {
+ streamChannels -= stream_.channelOffset[0];
+ outJump = stream_.channelOffset[0];
+ out += outJump;
+ }
+
+ // Account for possible unfilled channels at end of the last stream
+ if ( streamChannels > channelsLeft ) {
+ outJump = streamChannels - channelsLeft;
+ streamChannels = channelsLeft;
+ }
+
+ // Determine input buffer offsets and skips
+ if ( inInterleaved ) {
+ inJump = inChannels;
+ in += inChannels - channelsLeft;
+ }
+ else {
+ inJump = 1;
+ in += (inChannels - channelsLeft) * inOffset;
+ }
+
+ for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
+ for ( unsigned int j=0; j<streamChannels; j++ ) {
+ *out++ = in[j*inOffset];
+ }
+ out += outJump;
+ in += inJump;
+ }
+ channelsLeft -= streamChannels;
+ }
+ }
+ }
+ }
+
+ // Don't bother draining input
+ if ( handle->drainCounter ) {
+ handle->drainCounter++;
+ goto unlock;
+ }
+
+ AudioDeviceID inputDevice;
+ inputDevice = handle->id[1];
+ if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
+
+ if ( handle->nStreams[1] == 1 ) {
+ if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer
+ convertBuffer( stream_.userBuffer[1],
+ (char *) inBufferList->mBuffers[handle->iStream[1]].mData,
+ stream_.convertInfo[1] );
+ }
+ else { // copy to user buffer
+ memcpy( stream_.userBuffer[1],
+ inBufferList->mBuffers[handle->iStream[1]].mData,
+ inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
+ }
+ }
+ else { // read from multiple streams
+ Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];
+ if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;
+
+ if ( stream_.deviceInterleaved[1] == false ) { // mono mode
+ UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
+ for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
+ memcpy( (void *)&outBuffer[i*stream_.bufferSize],
+ inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
+ }
+ }
+ else { // read from multiple multi-channel streams
+ UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;
+ Float32 *out, *in;
+
+ bool outInterleaved = ( stream_.userInterleaved ) ? true : false;
+ UInt32 outChannels = stream_.nUserChannels[1];
+ if ( stream_.doConvertBuffer[1] ) {
+ outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
+ outChannels = stream_.nDeviceChannels[1];
+ }
+
+ if ( outInterleaved ) outOffset = 1;
+ else outOffset = stream_.bufferSize;
+
+ channelsLeft = outChannels;
+ for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {
+ out = outBuffer;
+ in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;
+ streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;
+
+ inJump = 0;
+ // Account for possible channel offset in first stream
+ if ( i == 0 && stream_.channelOffset[1] > 0 ) {
+ streamChannels -= stream_.channelOffset[1];
+ inJump = stream_.channelOffset[1];
+ in += inJump;
+ }
+
+ // Account for possible unread channels at end of the last stream
+ if ( streamChannels > channelsLeft ) {
+ inJump = streamChannels - channelsLeft;
+ streamChannels = channelsLeft;
+ }
+
+ // Determine output buffer offsets and skips
+ if ( outInterleaved ) {
+ outJump = outChannels;
+ out += outChannels - channelsLeft;
+ }
+ else {
+ outJump = 1;
+ out += (outChannels - channelsLeft) * outOffset;
+ }
+
+ for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
+ for ( unsigned int j=0; j<streamChannels; j++ ) {
+ out[j*outOffset] = *in++;
+ }
+ out += outJump;
+ in += inJump;
+ }
+ channelsLeft -= streamChannels;
+ }
+ }
+
+ if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer
+ convertBuffer( stream_.userBuffer[1],
+ stream_.deviceBuffer,
+ stream_.convertInfo[1] );
+ }
+ }
+ }
+
+ unlock:
+ //MUTEX_UNLOCK( &stream_.mutex );
+
+ // Make sure to only tick duplex stream time once if using two devices
+ if ( stream_.mode != DUPLEX || (stream_.mode == DUPLEX && handle->id[0] != handle->id[1] && deviceId == handle->id[0] ) )
+ RtApi::tickStreamTime();
+
+ return SUCCESS;
+}
+
+const char* RtApiCore :: getErrorCode( OSStatus code )
+{
+ switch( code ) {
+
+ case kAudioHardwareNotRunningError:
+ return "kAudioHardwareNotRunningError";
+
+ case kAudioHardwareUnspecifiedError:
+ return "kAudioHardwareUnspecifiedError";
+
+ case kAudioHardwareUnknownPropertyError:
+ return "kAudioHardwareUnknownPropertyError";
+
+ case kAudioHardwareBadPropertySizeError:
+ return "kAudioHardwareBadPropertySizeError";
+
+ case kAudioHardwareIllegalOperationError:
+ return "kAudioHardwareIllegalOperationError";
+
+ case kAudioHardwareBadObjectError:
+ return "kAudioHardwareBadObjectError";
+
+ case kAudioHardwareBadDeviceError:
+ return "kAudioHardwareBadDeviceError";
+
+ case kAudioHardwareBadStreamError:
+ return "kAudioHardwareBadStreamError";
+
+ case kAudioHardwareUnsupportedOperationError:
+ return "kAudioHardwareUnsupportedOperationError";
+
+ case kAudioDeviceUnsupportedFormatError:
+ return "kAudioDeviceUnsupportedFormatError";
+
+ case kAudioDevicePermissionsError:
+ return "kAudioDevicePermissionsError";
+
+ default:
+ return "CoreAudio unknown error";
+ }
+}
+
+ //******************** End of __MACOSX_CORE__ *********************//
+#endif
+
+#if defined(__UNIX_JACK__)
+
+// JACK is a low-latency audio server, originally written for the
+// GNU/Linux operating system and now also ported to OS-X. It can
+// connect a number of different applications to an audio device, as
+// well as allowing them to share audio between themselves.
+//
+// When using JACK with RtAudio, "devices" refer to JACK clients that
+// have ports connected to the server. The JACK server is typically
+// started in a terminal as follows:
+//
+// .jackd -d alsa -d hw:0
+//
+// or through an interface program such as qjackctl. Many of the
+// parameters normally set for a stream are fixed by the JACK server
+// and can be specified when the JACK server is started. In
+// particular,
+//
+// .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
+//
+// specifies a sample rate of 44100 Hz, a buffer size of 512 sample
+// frames, and number of buffers = 4. Once the server is running, it
+// is not possible to override these values. If the values are not
+// specified in the command-line, the JACK server uses default values.
+//
+// The JACK server does not have to be running when an instance of
+// RtApiJack is created, though the function getDeviceCount() will
+// report 0 devices found until JACK has been started. When no
+// devices are available (i.e., the JACK server is not running), a
+// stream cannot be opened.
+
+#include <jack/jack.h>
+#include <unistd.h>
+#include <cstdio>
+
+// A structure to hold various information related to the Jack API
+// implementation.
+struct JackHandle {
+ jack_client_t *client;
+ jack_port_t **ports[2];
+ std::string deviceName[2];
+ bool xrun[2];
+ pthread_cond_t condition;
+ int drainCounter; // Tracks callback counts when draining
+ bool internalDrain; // Indicates if stop is initiated from callback or not.
+
+ JackHandle()
+ :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
+};
+
+#if defined(__UNIX_JACK__)
+void* RtAudio :: HACK__getJackClient()
+{
+ return static_cast<JackHandle*>(rtapi_->stream_.apiHandle)->client;
+}
+#endif
+
+#if !defined(__RTAUDIO_DEBUG__)
+static void jackSilentError( const char * ) {};
+#endif
+
+RtApiJack :: RtApiJack()
+ :shouldAutoconnect_(true) {
+ // Nothing to do here.
+#if !defined(__RTAUDIO_DEBUG__)
+ // Turn off Jack's internal error reporting.
+ jack_set_error_function( &jackSilentError );
+#endif
+}
+
+RtApiJack :: ~RtApiJack()
+{
+ if ( stream_.state != STREAM_CLOSED ) closeStream();
+}
+
+unsigned int RtApiJack :: getDeviceCount( void )
+{
+ // See if we can become a jack client.
+ jack_options_t options = (jack_options_t) ( JackNoStartServer ); //JackNullOption;
+ jack_status_t *status = NULL;
+ jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );
+ if ( client == 0 ) return 0;
+
+ const char **ports;
+ std::string port, previousPort;
+ unsigned int nChannels = 0, nDevices = 0;
+ ports = jack_get_ports( client, NULL, JACK_DEFAULT_AUDIO_TYPE, 0 );
+ if ( ports ) {
+ // Parse the port names up to the first colon (:).
+ size_t iColon = 0;
+ do {
+ port = (char *) ports[ nChannels ];
+ iColon = port.find(":");
+ if ( iColon != std::string::npos ) {
+ port = port.substr( 0, iColon + 1 );
+ if ( port != previousPort ) {
+ nDevices++;
+ previousPort = port;
+ }
+ }
+ } while ( ports[++nChannels] );
+ free( ports );
+ }
+
+ jack_client_close( client );
+ return nDevices;
+}
+
+RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = false;
+
+ jack_options_t options = (jack_options_t) ( JackNoStartServer ); //JackNullOption
+ jack_status_t *status = NULL;
+ jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );
+ if ( client == 0 ) {
+ errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ const char **ports;
+ std::string port, previousPort;
+ unsigned int nPorts = 0, nDevices = 0;
+ ports = jack_get_ports( client, NULL, JACK_DEFAULT_AUDIO_TYPE, 0 );
+ if ( ports ) {
+ // Parse the port names up to the first colon (:).
+ size_t iColon = 0;
+ do {
+ port = (char *) ports[ nPorts ];
+ iColon = port.find(":");
+ if ( iColon != std::string::npos ) {
+ port = port.substr( 0, iColon );
+ if ( port != previousPort ) {
+ if ( nDevices == device ) info.name = port;
+ nDevices++;
+ previousPort = port;
+ }
+ }
+ } while ( ports[++nPorts] );
+ free( ports );
+ }
+
+ if ( device >= nDevices ) {
+ jack_client_close( client );
+ errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ // Get the current jack server sample rate.
+ info.sampleRates.clear();
+
+ info.preferredSampleRate = jack_get_sample_rate( client );
+ info.sampleRates.push_back( info.preferredSampleRate );
+
+ // Count the available ports containing the client name as device
+ // channels. Jack "input ports" equal RtAudio output channels.
+ unsigned int nChannels = 0;
+ ports = jack_get_ports( client, info.name.c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput );
+ if ( ports ) {
+ while ( ports[ nChannels ] ) nChannels++;
+ free( ports );
+ info.outputChannels = nChannels;
+ }
+
+ // Jack "output ports" equal RtAudio input channels.
+ nChannels = 0;
+ ports = jack_get_ports( client, info.name.c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput );
+ if ( ports ) {
+ while ( ports[ nChannels ] ) nChannels++;
+ free( ports );
+ info.inputChannels = nChannels;
+ }
+
+ if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
+ jack_client_close(client);
+ errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // If device opens for both playback and capture, we determine the channels.
+ if ( info.outputChannels > 0 && info.inputChannels > 0 )
+ info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
+
+ // Jack always uses 32-bit floats.
+ info.nativeFormats = RTAUDIO_FLOAT32;
+
+ // Jack doesn't provide default devices so we'll use the first available one.
+ if ( device == 0 && info.outputChannels > 0 )
+ info.isDefaultOutput = true;
+ if ( device == 0 && info.inputChannels > 0 )
+ info.isDefaultInput = true;
+
+ jack_client_close(client);
+ info.probed = true;
+ return info;
+}
+
+static int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
+{
+ CallbackInfo *info = (CallbackInfo *) infoPointer;
+
+ RtApiJack *object = (RtApiJack *) info->object;
+ if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
+
+ return 0;
+}
+
+// This function will be called by a spawned thread when the Jack
+// server signals that it is shutting down. It is necessary to handle
+// it this way because the jackShutdown() function must return before
+// the jack_deactivate() function (in closeStream()) will return.
+static void *jackCloseStream( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiJack *object = (RtApiJack *) info->object;
+
+ object->closeStream();
+
+ pthread_exit( NULL );
+}
+static void jackShutdown( void *infoPointer )
+{
+ CallbackInfo *info = (CallbackInfo *) infoPointer;
+ RtApiJack *object = (RtApiJack *) info->object;
+
+ // Check current stream state. If stopped, then we'll assume this
+ // was called as a result of a call to RtApiJack::stopStream (the
+ // deactivation of a client handle causes this function to be called).
+ // If not, we'll assume the Jack server is shutting down or some
+ // other problem occurred and we should close the stream.
+ if ( object->isStreamRunning() == false ) return;
+
+ ThreadHandle threadId;
+ pthread_create( &threadId, NULL, jackCloseStream, info );
+ std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
+}
+
+static int jackXrun( void *infoPointer )
+{
+ JackHandle *handle = *((JackHandle **) infoPointer);
+
+ if ( handle->ports[0] ) handle->xrun[0] = true;
+ if ( handle->ports[1] ) handle->xrun[1] = true;
+
+ return 0;
+}
+
+bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options )
+{
+ JackHandle *handle = (JackHandle *) stream_.apiHandle;
+
+ // Look for jack server and try to become a client (only do once per stream).
+ jack_client_t *client = 0;
+ if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
+ jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer ); //JackNullOption;
+ jack_status_t *status = NULL;
+ if ( options && !options->streamName.empty() )
+ client = jack_client_open( options->streamName.c_str(), jackoptions, status );
+ else
+ client = jack_client_open( "RtApiJack", jackoptions, status );
+ if ( client == 0 ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
+ error( RtAudioError::WARNING );
+ return FAILURE;
+ }
+ }
+ else {
+ // The handle must have been created on an earlier pass.
+ client = handle->client;
+ }
+
+ const char **ports;
+ std::string port, previousPort, deviceName;
+ unsigned int nPorts = 0, nDevices = 0;
+ ports = jack_get_ports( client, NULL, JACK_DEFAULT_AUDIO_TYPE, 0 );
+ if ( ports ) {
+ // Parse the port names up to the first colon (:).
+ size_t iColon = 0;
+ do {
+ port = (char *) ports[ nPorts ];
+ iColon = port.find(":");
+ if ( iColon != std::string::npos ) {
+ port = port.substr( 0, iColon );
+ if ( port != previousPort ) {
+ if ( nDevices == device ) deviceName = port;
+ nDevices++;
+ previousPort = port;
+ }
+ }
+ } while ( ports[++nPorts] );
+ free( ports );
+ }
+
+ if ( device >= nDevices ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
+ return FAILURE;
+ }
+
+ unsigned long flag = JackPortIsInput;
+ if ( mode == INPUT ) flag = JackPortIsOutput;
+
+ if ( ! (options && (options->flags & RTAUDIO_JACK_DONT_CONNECT)) ) {
+ // Count the available ports containing the client name as device
+ // channels. Jack "input ports" equal RtAudio output channels.
+ unsigned int nChannels = 0;
+ ports = jack_get_ports( client, deviceName.c_str(), JACK_DEFAULT_AUDIO_TYPE, flag );
+ if ( ports ) {
+ while ( ports[ nChannels ] ) nChannels++;
+ free( ports );
+ }
+ // Compare the jack ports for specified client to the requested number of channels.
+ if ( nChannels < (channels + firstChannel) ) {
+ errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // Check the jack server sample rate.
+ unsigned int jackRate = jack_get_sample_rate( client );
+ if ( sampleRate != jackRate ) {
+ jack_client_close( client );
+ errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ stream_.sampleRate = jackRate;
+
+ // Get the latency of the JACK port.
+ ports = jack_get_ports( client, deviceName.c_str(), JACK_DEFAULT_AUDIO_TYPE, flag );
+ if ( ports[ firstChannel ] ) {
+ // Added by Ge Wang
+ jack_latency_callback_mode_t cbmode = (mode == INPUT ? JackCaptureLatency : JackPlaybackLatency);
+ // the range (usually the min and max are equal)
+ jack_latency_range_t latrange; latrange.min = latrange.max = 0;
+ // get the latency range
+ jack_port_get_latency_range( jack_port_by_name( client, ports[firstChannel] ), cbmode, &latrange );
+ // be optimistic, use the min!
+ stream_.latency[mode] = latrange.min;
+ //stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
+ }
+ free( ports );
+
+ // The jack server always uses 32-bit floating-point data.
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
+ stream_.userFormat = format;
+
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
+ else stream_.userInterleaved = true;
+
+ // Jack always uses non-interleaved buffers.
+ stream_.deviceInterleaved[mode] = false;
+
+ // Jack always provides host byte-ordered data.
+ stream_.doByteSwap[mode] = false;
+
+ // Get the buffer size. The buffer size and number of buffers
+ // (periods) is set when the jack server is started.
+ stream_.bufferSize = (int) jack_get_buffer_size( client );
+ *bufferSize = stream_.bufferSize;
+
+ stream_.nDeviceChannels[mode] = channels;
+ stream_.nUserChannels[mode] = channels;
+
+ // Set flags for buffer conversion.
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate our JackHandle structure for the stream.
+ if ( handle == 0 ) {
+ try {
+ handle = new JackHandle;
+ }
+ catch ( std::bad_alloc& ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
+ goto error;
+ }
+
+ if ( pthread_cond_init(&handle->condition, NULL) ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
+ goto error;
+ }
+ stream_.apiHandle = (void *) handle;
+ handle->client = client;
+ }
+ handle->deviceName[mode] = deviceName;
+
+ // Allocate necessary internal buffers.
+ unsigned long bufferBytes;
+ bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+
+ if ( stream_.doConvertBuffer[mode] ) {
+
+ bool makeBuffer = true;
+ if ( mode == OUTPUT )
+ bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ else { // mode == INPUT
+ bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
+ if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
+ if ( bufferBytes < bytesOut ) makeBuffer = false;
+ }
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ // Allocate memory for the Jack ports (channels) identifiers.
+ handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
+ if ( handle->ports[mode] == NULL ) {
+ errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
+ goto error;
+ }
+
+ stream_.device[mode] = device;
+ stream_.channelOffset[mode] = firstChannel;
+ stream_.state = STREAM_STOPPED;
+ stream_.callbackInfo.object = (void *) this;
+
+ if ( stream_.mode == OUTPUT && mode == INPUT )
+ // We had already set up the stream for output.
+ stream_.mode = DUPLEX;
+ else {
+ stream_.mode = mode;
+ jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
+ jack_set_xrun_callback( handle->client, jackXrun, (void *) &stream_.apiHandle );
+ jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
+ }
+
+ // Register our ports.
+ char label[64];
+ if ( mode == OUTPUT ) {
+ for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
+ snprintf( label, 64, "outport %d", i );
+ handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
+ JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
+ }
+ }
+ else {
+ for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
+ snprintf( label, 64, "inport %d", i );
+ handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
+ JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
+ }
+ }
+
+ // Setup the buffer conversion information structure. We don't use
+ // buffers to do channel offsets, so we override that parameter
+ // here.
+ if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
+
+ if ( options && options->flags & RTAUDIO_JACK_DONT_CONNECT ) shouldAutoconnect_ = false;
+
+ return SUCCESS;
+
+ error:
+ if ( handle ) {
+ pthread_cond_destroy( &handle->condition );
+ jack_client_close( handle->client );
+
+ if ( handle->ports[0] ) free( handle->ports[0] );
+ if ( handle->ports[1] ) free( handle->ports[1] );
+
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ return FAILURE;
+}
+
+void RtApiJack :: closeStream( void )
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiJack::closeStream(): no open stream to close!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ JackHandle *handle = (JackHandle *) stream_.apiHandle;
+ if ( handle ) {
+
+ if ( stream_.state == STREAM_RUNNING )
+ jack_deactivate( handle->client );
+
+ jack_client_close( handle->client );
+ }
+
+ if ( handle ) {
+ if ( handle->ports[0] ) free( handle->ports[0] );
+ if ( handle->ports[1] ) free( handle->ports[1] );
+ pthread_cond_destroy( &handle->condition );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+}
+
+void RtApiJack :: startStream( void )
+{
+ verifyStream();
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiJack::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ JackHandle *handle = (JackHandle *) stream_.apiHandle;
+ int result = jack_activate( handle->client );
+ if ( result ) {
+ errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
+ goto unlock;
+ }
+
+ const char **ports;
+
+ // Get the list of available ports.
+ if ( shouldAutoconnect_ && (stream_.mode == OUTPUT || stream_.mode == DUPLEX) ) {
+ result = 1;
+ ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput);
+ if ( ports == NULL) {
+ errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
+ goto unlock;
+ }
+
+ // Now make the port connections. Since RtAudio wasn't designed to
+ // allow the user to select particular channels of a device, we'll
+ // just open the first "nChannels" ports with offset.
+ for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
+ result = 1;
+ if ( ports[ stream_.channelOffset[0] + i ] )
+ result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
+ if ( result ) {
+ free( ports );
+ errorText_ = "RtApiJack::startStream(): error connecting output ports!";
+ goto unlock;
+ }
+ }
+ free(ports);
+ }
+
+ if ( shouldAutoconnect_ && (stream_.mode == INPUT || stream_.mode == DUPLEX) ) {
+ result = 1;
+ ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput );
+ if ( ports == NULL) {
+ errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
+ goto unlock;
+ }
+
+ // Now make the port connections. See note above.
+ for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
+ result = 1;
+ if ( ports[ stream_.channelOffset[1] + i ] )
+ result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
+ if ( result ) {
+ free( ports );
+ errorText_ = "RtApiJack::startStream(): error connecting input ports!";
+ goto unlock;
+ }
+ }
+ free(ports);
+ }
+
+ handle->drainCounter = 0;
+ handle->internalDrain = false;
+ stream_.state = STREAM_RUNNING;
+
+ unlock:
+ if ( result == 0 ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiJack :: stopStream( void )
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ JackHandle *handle = (JackHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ if ( handle->drainCounter == 0 ) {
+ handle->drainCounter = 2;
+ pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
+ }
+ }
+
+ jack_deactivate( handle->client );
+ stream_.state = STREAM_STOPPED;
+}
+
+void RtApiJack :: abortStream( void )
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ JackHandle *handle = (JackHandle *) stream_.apiHandle;
+ handle->drainCounter = 2;
+
+ stopStream();
+}
+
+// This function will be called by a spawned thread when the user
+// callback function signals that the stream should be stopped or
+// aborted. It is necessary to handle it this way because the
+// callbackEvent() function must return before the jack_deactivate()
+// function will return.
+static void *jackStopStream( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiJack *object = (RtApiJack *) info->object;
+
+ object->stopStream();
+ pthread_exit( NULL );
+}
+
+bool RtApiJack :: callbackEvent( unsigned long nframes )
+{
+ if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) return SUCCESS;
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return FAILURE;
+ }
+ if ( stream_.bufferSize != nframes ) {
+ errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
+ error( RtAudioError::WARNING );
+ return FAILURE;
+ }
+
+ CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
+ JackHandle *handle = (JackHandle *) stream_.apiHandle;
+
+ // Check if we were draining the stream and signal is finished.
+ if ( handle->drainCounter > 3 ) {
+ ThreadHandle threadId;
+
+ stream_.state = STREAM_STOPPING;
+ if ( handle->internalDrain == true )
+ pthread_create( &threadId, NULL, jackStopStream, info );
+ else
+ pthread_cond_signal( &handle->condition );
+ return SUCCESS;
+ }
+
+ // Invoke user callback first, to get fresh output data.
+ if ( handle->drainCounter == 0 ) {
+ RtAudioCallback callback = (RtAudioCallback) info->callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
+ status |= RTAUDIO_OUTPUT_UNDERFLOW;
+ handle->xrun[0] = false;
+ }
+ if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
+ status |= RTAUDIO_INPUT_OVERFLOW;
+ handle->xrun[1] = false;
+ }
+ int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],
+ stream_.bufferSize, streamTime, status, info->userData );
+ if ( cbReturnValue == 2 ) {
+ stream_.state = STREAM_STOPPING;
+ handle->drainCounter = 2;
+ ThreadHandle id;
+ pthread_create( &id, NULL, jackStopStream, info );
+ return SUCCESS;
+ }
+ else if ( cbReturnValue == 1 ) {
+ handle->drainCounter = 1;
+ handle->internalDrain = true;
+ }
+ }
+
+ jack_default_audio_sample_t *jackbuffer;
+ unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ if ( handle->drainCounter > 1 ) { // write zeros to the output stream
+
+ for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
+ jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
+ memset( jackbuffer, 0, bufferBytes );
+ }
+
+ }
+ else if ( stream_.doConvertBuffer[0] ) {
+
+ convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
+
+ for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
+ jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
+ memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
+ }
+ }
+ else { // no buffer conversion
+ for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
+ jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
+ memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
+ }
+ }
+ }
+
+ // Don't bother draining input
+ if ( handle->drainCounter ) {
+ handle->drainCounter++;
+ goto unlock;
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+
+ if ( stream_.doConvertBuffer[1] ) {
+ for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
+ jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
+ memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
+ }
+ convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
+ }
+ else { // no buffer conversion
+ for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
+ jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
+ memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
+ }
+ }
+ }
+
+ unlock:
+ RtApi::tickStreamTime();
+ return SUCCESS;
+}
+ //******************** End of __UNIX_JACK__ *********************//
+#endif
+
+#if defined(__WINDOWS_ASIO__) // ASIO API on Windows
+
+// The ASIO API is designed around a callback scheme, so this
+// implementation is similar to that used for OS-X CoreAudio and Linux
+// Jack. The primary constraint with ASIO is that it only allows
+// access to a single driver at a time. Thus, it is not possible to
+// have more than one simultaneous RtAudio stream.
+//
+// This implementation also requires a number of external ASIO files
+// and a few global variables. The ASIO callback scheme does not
+// allow for the passing of user data, so we must create a global
+// pointer to our callbackInfo structure.
+//
+// On unix systems, we make use of a pthread condition variable.
+// Since there is no equivalent in Windows, I hacked something based
+// on information found in
+// http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
+
+#include "asiosys.h"
+#include "asio.h"
+#include "iasiothiscallresolver.h"
+#include "asiodrivers.h"
+#include <cmath>
+
+static AsioDrivers drivers;
+static ASIOCallbacks asioCallbacks;
+static ASIODriverInfo driverInfo;
+static CallbackInfo *asioCallbackInfo;
+static bool asioXRun;
+
+struct AsioHandle {
+ int drainCounter; // Tracks callback counts when draining
+ bool internalDrain; // Indicates if stop is initiated from callback or not.
+ ASIOBufferInfo *bufferInfos;
+ HANDLE condition;
+
+ AsioHandle()
+ :drainCounter(0), internalDrain(false), bufferInfos(0) {}
+};
+
+// Function declarations (definitions at end of section)
+static const char* getAsioErrorString( ASIOError result );
+static void sampleRateChanged( ASIOSampleRate sRate );
+static long asioMessages( long selector, long value, void* message, double* opt );
+
+RtApiAsio :: RtApiAsio()
+{
+ // ASIO cannot run on a multi-threaded appartment. You can call
+ // CoInitialize beforehand, but it must be for appartment threading
+ // (in which case, CoInitilialize will return S_FALSE here).
+ coInitialized_ = false;
+ HRESULT hr = CoInitialize( NULL );
+ if ( FAILED(hr) ) {
+ errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
+ error( RtAudioError::WARNING );
+ }
+ coInitialized_ = true;
+
+ drivers.removeCurrentDriver();
+ driverInfo.asioVersion = 2;
+
+ // See note in DirectSound implementation about GetDesktopWindow().
+ driverInfo.sysRef = GetForegroundWindow();
+}
+
+RtApiAsio :: ~RtApiAsio()
+{
+ if ( stream_.state != STREAM_CLOSED ) closeStream();
+ if ( coInitialized_ ) CoUninitialize();
+}
+
+unsigned int RtApiAsio :: getDeviceCount( void )
+{
+ return (unsigned int) drivers.asioGetNumDev();
+}
+
+RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = false;
+
+ // Get device ID
+ unsigned int nDevices = getDeviceCount();
+ if ( nDevices == 0 ) {
+ errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ if ( device >= nDevices ) {
+ errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
+ if ( stream_.state != STREAM_CLOSED ) {
+ if ( device >= devices_.size() ) {
+ errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+ return devices_[ device ];
+ }
+
+ char driverName[32];
+ ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ info.name = driverName;
+
+ if ( !drivers.loadDriver( driverName ) ) {
+ errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ result = ASIOInit( &driverInfo );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Determine the device channel information.
+ long inputChannels, outputChannels;
+ result = ASIOGetChannels( &inputChannels, &outputChannels );
+ if ( result != ASE_OK ) {
+ drivers.removeCurrentDriver();
+ errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ info.outputChannels = outputChannels;
+ info.inputChannels = inputChannels;
+ if ( info.outputChannels > 0 && info.inputChannels > 0 )
+ info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
+
+ // Determine the supported sample rates.
+ info.sampleRates.clear();
+ for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
+ result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
+ if ( result == ASE_OK ) {
+ info.sampleRates.push_back( SAMPLE_RATES[i] );
+
+ if ( !info.preferredSampleRate || ( SAMPLE_RATES[i] <= 48000 && SAMPLE_RATES[i] > info.preferredSampleRate ) )
+ info.preferredSampleRate = SAMPLE_RATES[i];
+ }
+ }
+
+ // Determine supported data types ... just check first channel and assume rest are the same.
+ ASIOChannelInfo channelInfo;
+ channelInfo.channel = 0;
+ channelInfo.isInput = true;
+ if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
+ result = ASIOGetChannelInfo( &channelInfo );
+ if ( result != ASE_OK ) {
+ drivers.removeCurrentDriver();
+ errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ info.nativeFormats = 0;
+ if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
+ info.nativeFormats |= RTAUDIO_SINT16;
+ else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
+ info.nativeFormats |= RTAUDIO_SINT32;
+ else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
+ info.nativeFormats |= RTAUDIO_FLOAT32;
+ else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
+ info.nativeFormats |= RTAUDIO_FLOAT64;
+ else if ( channelInfo.type == ASIOSTInt24MSB || channelInfo.type == ASIOSTInt24LSB )
+ info.nativeFormats |= RTAUDIO_SINT24;
+
+ if ( info.outputChannels > 0 )
+ if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;
+ if ( info.inputChannels > 0 )
+ if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;
+
+ info.probed = true;
+ drivers.removeCurrentDriver();
+ return info;
+}
+
+static void bufferSwitch( long index, ASIOBool /*processNow*/ )
+{
+ RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
+ object->callbackEvent( index );
+}
+
+void RtApiAsio :: saveDeviceInfo( void )
+{
+ devices_.clear();
+
+ unsigned int nDevices = getDeviceCount();
+ devices_.resize( nDevices );
+ for ( unsigned int i=0; i<nDevices; i++ )
+ devices_[i] = getDeviceInfo( i );
+}
+
+bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options )
+{////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ bool isDuplexInput = mode == INPUT && stream_.mode == OUTPUT;
+
+ // For ASIO, a duplex stream MUST use the same driver.
+ if ( isDuplexInput && stream_.device[0] != device ) {
+ errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
+ return FAILURE;
+ }
+
+ char driverName[32];
+ ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Only load the driver once for duplex stream.
+ if ( !isDuplexInput ) {
+ // The getDeviceInfo() function will not work when a stream is open
+ // because ASIO does not allow multiple devices to run at the same
+ // time. Thus, we'll probe the system before opening a stream and
+ // save the results for use by getDeviceInfo().
+ this->saveDeviceInfo();
+
+ if ( !drivers.loadDriver( driverName ) ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ result = ASIOInit( &driverInfo );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // keep them before any "goto error", they are used for error cleanup + goto device boundary checks
+ bool buffersAllocated = false;
+ AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
+ unsigned int nChannels;
+
+
+ // Check the device channel count.
+ long inputChannels, outputChannels;
+ result = ASIOGetChannels( &inputChannels, &outputChannels );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
+ ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+ stream_.nDeviceChannels[mode] = channels;
+ stream_.nUserChannels[mode] = channels;
+ stream_.channelOffset[mode] = firstChannel;
+
+ // Verify the sample rate is supported.
+ result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ // Get the current sample rate
+ ASIOSampleRate currentRate;
+ result = ASIOGetSampleRate( ¤tRate );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ // Set the sample rate only if necessary
+ if ( currentRate != sampleRate ) {
+ result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+ }
+
+ // Determine the driver data type.
+ ASIOChannelInfo channelInfo;
+ channelInfo.channel = 0;
+ if ( mode == OUTPUT ) channelInfo.isInput = false;
+ else channelInfo.isInput = true;
+ result = ASIOGetChannelInfo( &channelInfo );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ // Assuming WINDOWS host is always little-endian.
+ stream_.doByteSwap[mode] = false;
+ stream_.userFormat = format;
+ stream_.deviceFormat[mode] = 0;
+ if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
+ }
+ else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT32;
+ if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
+ }
+ else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
+ if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
+ }
+ else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
+ if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
+ }
+ else if ( channelInfo.type == ASIOSTInt24MSB || channelInfo.type == ASIOSTInt24LSB ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT24;
+ if ( channelInfo.type == ASIOSTInt24MSB ) stream_.doByteSwap[mode] = true;
+ }
+
+ if ( stream_.deviceFormat[mode] == 0 ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ // Set the buffer size. For a duplex stream, this will end up
+ // setting the buffer size based on the input constraints, which
+ // should be ok.
+ long minSize, maxSize, preferSize, granularity;
+ result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ if ( isDuplexInput ) {
+ // When this is the duplex input (output was opened before), then we have to use the same
+ // buffersize as the output, because it might use the preferred buffer size, which most
+ // likely wasn't passed as input to this. The buffer sizes have to be identically anyway,
+ // So instead of throwing an error, make them equal. The caller uses the reference
+ // to the "bufferSize" param as usual to set up processing buffers.
+
+ *bufferSize = stream_.bufferSize;
+
+ } else {
+ if ( *bufferSize == 0 ) *bufferSize = preferSize;
+ else if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
+ else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
+ else if ( granularity == -1 ) {
+ // Make sure bufferSize is a power of two.
+ int log2_of_min_size = 0;
+ int log2_of_max_size = 0;
+
+ for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {
+ if ( minSize & ((long)1 << i) ) log2_of_min_size = i;
+ if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;
+ }
+
+ long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );
+ int min_delta_num = log2_of_min_size;
+
+ for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
+ long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );
+ if (current_delta < min_delta) {
+ min_delta = current_delta;
+ min_delta_num = i;
+ }
+ }
+
+ *bufferSize = ( (unsigned int)1 << min_delta_num );
+ if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
+ else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
+ }
+ else if ( granularity != 0 ) {
+ // Set to an even multiple of granularity, rounding up.
+ *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
+ }
+ }
+
+ /*
+ // we don't use it anymore, see above!
+ // Just left it here for the case...
+ if ( isDuplexInput && stream_.bufferSize != *bufferSize ) {
+ errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
+ goto error;
+ }
+ */
+
+ stream_.bufferSize = *bufferSize;
+ stream_.nBuffers = 2;
+
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
+ else stream_.userInterleaved = true;
+
+ // ASIO always uses non-interleaved buffers.
+ stream_.deviceInterleaved[mode] = false;
+
+ // Allocate, if necessary, our AsioHandle structure for the stream.
+ if ( handle == 0 ) {
+ try {
+ handle = new AsioHandle;
+ }
+ catch ( std::bad_alloc& ) {
+ errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
+ goto error;
+ }
+ handle->bufferInfos = 0;
+
+ // Create a manual-reset event.
+ handle->condition = CreateEvent( NULL, // no security
+ TRUE, // manual-reset
+ FALSE, // non-signaled initially
+ NULL ); // unnamed
+ stream_.apiHandle = (void *) handle;
+ }
+
+ // Create the ASIO internal buffers. Since RtAudio sets up input
+ // and output separately, we'll have to dispose of previously
+ // created output buffers for a duplex stream.
+ if ( mode == INPUT && stream_.mode == OUTPUT ) {
+ ASIODisposeBuffers();
+ if ( handle->bufferInfos ) free( handle->bufferInfos );
+ }
+
+ // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
+ unsigned int i;
+ nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
+ handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
+ if ( handle->bufferInfos == NULL ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+
+ ASIOBufferInfo *infos;
+ infos = handle->bufferInfos;
+ for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
+ infos->isInput = ASIOFalse;
+ infos->channelNum = i + stream_.channelOffset[0];
+ infos->buffers[0] = infos->buffers[1] = 0;
+ }
+ for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
+ infos->isInput = ASIOTrue;
+ infos->channelNum = i + stream_.channelOffset[1];
+ infos->buffers[0] = infos->buffers[1] = 0;
+ }
+
+ // prepare for callbacks
+ stream_.sampleRate = sampleRate;
+ stream_.device[mode] = device;
+ stream_.mode = isDuplexInput ? DUPLEX : mode;
+
+ // store this class instance before registering callbacks, that are going to use it
+ asioCallbackInfo = &stream_.callbackInfo;
+ stream_.callbackInfo.object = (void *) this;
+
+ // Set up the ASIO callback structure and create the ASIO data buffers.
+ asioCallbacks.bufferSwitch = &bufferSwitch;
+ asioCallbacks.sampleRateDidChange = &sampleRateChanged;
+ asioCallbacks.asioMessage = &asioMessages;
+ asioCallbacks.bufferSwitchTimeInfo = NULL;
+ result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
+ if ( result != ASE_OK ) {
+ // Standard method failed. This can happen with strict/misbehaving drivers that return valid buffer size ranges
+ // but only accept the preferred buffer size as parameter for ASIOCreateBuffers (e.g. Creative's ASIO driver).
+ // In that case, let's be naïve and try that instead.
+ *bufferSize = preferSize;
+ stream_.bufferSize = *bufferSize;
+ result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
+ }
+
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
+ errorText_ = errorStream_.str();
+ goto error;
+ }
+ buffersAllocated = true;
+ stream_.state = STREAM_STOPPED;
+
+ // Set flags for buffer conversion.
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate necessary internal buffers
+ unsigned long bufferBytes;
+ bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+
+ if ( stream_.doConvertBuffer[mode] ) {
+
+ bool makeBuffer = true;
+ bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
+ if ( isDuplexInput && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ if ( bufferBytes <= bytesOut ) makeBuffer = false;
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ // Determine device latencies
+ long inputLatency, outputLatency;
+ result = ASIOGetLatencies( &inputLatency, &outputLatency );
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING); // warn but don't fail
+ }
+ else {
+ stream_.latency[0] = outputLatency;
+ stream_.latency[1] = inputLatency;
+ }
+
+ // Setup the buffer conversion information structure. We don't use
+ // buffers to do channel offsets, so we override that parameter
+ // here.
+ if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
+
+ return SUCCESS;
+
+ error:
+ if ( !isDuplexInput ) {
+ // the cleanup for error in the duplex input, is done by RtApi::openStream
+ // So we clean up for single channel only
+
+ if ( buffersAllocated )
+ ASIODisposeBuffers();
+
+ drivers.removeCurrentDriver();
+
+ if ( handle ) {
+ CloseHandle( handle->condition );
+ if ( handle->bufferInfos )
+ free( handle->bufferInfos );
+
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+
+ if ( stream_.userBuffer[mode] ) {
+ free( stream_.userBuffer[mode] );
+ stream_.userBuffer[mode] = 0;
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+ }
+
+ return FAILURE;
+}////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+void RtApiAsio :: closeStream()
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ if ( stream_.state == STREAM_RUNNING ) {
+ stream_.state = STREAM_STOPPED;
+ ASIOStop();
+ }
+ ASIODisposeBuffers();
+ drivers.removeCurrentDriver();
+
+ AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
+ if ( handle ) {
+ CloseHandle( handle->condition );
+ if ( handle->bufferInfos )
+ free( handle->bufferInfos );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+}
+
+bool stopThreadCalled = false;
+
+void RtApiAsio :: startStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiAsio::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
+ ASIOError result = ASIOStart();
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ handle->drainCounter = 0;
+ handle->internalDrain = false;
+ ResetEvent( handle->condition );
+ stream_.state = STREAM_RUNNING;
+ asioXRun = false;
+
+ unlock:
+ stopThreadCalled = false;
+
+ if ( result == ASE_OK ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiAsio :: stopStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ if ( handle->drainCounter == 0 ) {
+ handle->drainCounter = 2;
+ WaitForSingleObject( handle->condition, INFINITE ); // block until signaled
+ }
+ }
+
+ stream_.state = STREAM_STOPPED;
+
+ ASIOError result = ASIOStop();
+ if ( result != ASE_OK ) {
+ errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
+ errorText_ = errorStream_.str();
+ }
+
+ if ( result == ASE_OK ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiAsio :: abortStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ // The following lines were commented-out because some behavior was
+ // noted where the device buffers need to be zeroed to avoid
+ // continuing sound, even when the device buffers are completely
+ // disposed. So now, calling abort is the same as calling stop.
+ // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
+ // handle->drainCounter = 2;
+ stopStream();
+}
+
+// This function will be called by a spawned thread when the user
+// callback function signals that the stream should be stopped or
+// aborted. It is necessary to handle it this way because the
+// callbackEvent() function must return before the ASIOStop()
+// function will return.
+static unsigned __stdcall asioStopStream( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiAsio *object = (RtApiAsio *) info->object;
+
+ object->stopStream();
+ _endthreadex( 0 );
+ return 0;
+}
+
+bool RtApiAsio :: callbackEvent( long bufferIndex )
+{
+ if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) return SUCCESS;
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return FAILURE;
+ }
+
+ CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
+ AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
+
+ // Check if we were draining the stream and signal if finished.
+ if ( handle->drainCounter > 3 ) {
+
+ stream_.state = STREAM_STOPPING;
+ if ( handle->internalDrain == false )
+ SetEvent( handle->condition );
+ else { // spawn a thread to stop the stream
+ unsigned threadId;
+ stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &asioStopStream,
+ &stream_.callbackInfo, 0, &threadId );
+ }
+ return SUCCESS;
+ }
+
+ // Invoke user callback to get fresh output data UNLESS we are
+ // draining stream.
+ if ( handle->drainCounter == 0 ) {
+ RtAudioCallback callback = (RtAudioCallback) info->callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ if ( stream_.mode != INPUT && asioXRun == true ) {
+ status |= RTAUDIO_OUTPUT_UNDERFLOW;
+ asioXRun = false;
+ }
+ if ( stream_.mode != OUTPUT && asioXRun == true ) {
+ status |= RTAUDIO_INPUT_OVERFLOW;
+ asioXRun = false;
+ }
+ int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],
+ stream_.bufferSize, streamTime, status, info->userData );
+ if ( cbReturnValue == 2 ) {
+ stream_.state = STREAM_STOPPING;
+ handle->drainCounter = 2;
+ unsigned threadId;
+ stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &asioStopStream,
+ &stream_.callbackInfo, 0, &threadId );
+ return SUCCESS;
+ }
+ else if ( cbReturnValue == 1 ) {
+ handle->drainCounter = 1;
+ handle->internalDrain = true;
+ }
+ }
+
+ unsigned int nChannels, bufferBytes, i, j;
+ nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
+
+ if ( handle->drainCounter > 1 ) { // write zeros to the output stream
+
+ for ( i=0, j=0; i<nChannels; i++ ) {
+ if ( handle->bufferInfos[i].isInput != ASIOTrue )
+ memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
+ }
+
+ }
+ else if ( stream_.doConvertBuffer[0] ) {
+
+ convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
+ if ( stream_.doByteSwap[0] )
+ byteSwapBuffer( stream_.deviceBuffer,
+ stream_.bufferSize * stream_.nDeviceChannels[0],
+ stream_.deviceFormat[0] );
+
+ for ( i=0, j=0; i<nChannels; i++ ) {
+ if ( handle->bufferInfos[i].isInput != ASIOTrue )
+ memcpy( handle->bufferInfos[i].buffers[bufferIndex],
+ &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
+ }
+
+ }
+ else {
+
+ if ( stream_.doByteSwap[0] )
+ byteSwapBuffer( stream_.userBuffer[0],
+ stream_.bufferSize * stream_.nUserChannels[0],
+ stream_.userFormat );
+
+ for ( i=0, j=0; i<nChannels; i++ ) {
+ if ( handle->bufferInfos[i].isInput != ASIOTrue )
+ memcpy( handle->bufferInfos[i].buffers[bufferIndex],
+ &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
+ }
+
+ }
+ }
+
+ // Don't bother draining input
+ if ( handle->drainCounter ) {
+ handle->drainCounter++;
+ goto unlock;
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+
+ bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
+
+ if (stream_.doConvertBuffer[1]) {
+
+ // Always interleave ASIO input data.
+ for ( i=0, j=0; i<nChannels; i++ ) {
+ if ( handle->bufferInfos[i].isInput == ASIOTrue )
+ memcpy( &stream_.deviceBuffer[j++*bufferBytes],
+ handle->bufferInfos[i].buffers[bufferIndex],
+ bufferBytes );
+ }
+
+ if ( stream_.doByteSwap[1] )
+ byteSwapBuffer( stream_.deviceBuffer,
+ stream_.bufferSize * stream_.nDeviceChannels[1],
+ stream_.deviceFormat[1] );
+ convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
+
+ }
+ else {
+ for ( i=0, j=0; i<nChannels; i++ ) {
+ if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
+ memcpy( &stream_.userBuffer[1][bufferBytes*j++],
+ handle->bufferInfos[i].buffers[bufferIndex],
+ bufferBytes );
+ }
+ }
+
+ if ( stream_.doByteSwap[1] )
+ byteSwapBuffer( stream_.userBuffer[1],
+ stream_.bufferSize * stream_.nUserChannels[1],
+ stream_.userFormat );
+ }
+ }
+
+ unlock:
+ // The following call was suggested by Malte Clasen. While the API
+ // documentation indicates it should not be required, some device
+ // drivers apparently do not function correctly without it.
+ ASIOOutputReady();
+
+ RtApi::tickStreamTime();
+ return SUCCESS;
+}
+
+static void sampleRateChanged( ASIOSampleRate sRate )
+{
+ // The ASIO documentation says that this usually only happens during
+ // external sync. Audio processing is not stopped by the driver,
+ // actual sample rate might not have even changed, maybe only the
+ // sample rate status of an AES/EBU or S/PDIF digital input at the
+ // audio device.
+
+ RtApi *object = (RtApi *) asioCallbackInfo->object;
+ try {
+ object->stopStream();
+ }
+ catch ( RtAudioError &exception ) {
+ std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
+ return;
+ }
+
+ std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
+}
+
+static long asioMessages( long selector, long value, void* /*message*/, double* /*opt*/ )
+{
+ long ret = 0;
+
+ switch( selector ) {
+ case kAsioSelectorSupported:
+ if ( value == kAsioResetRequest
+ || value == kAsioEngineVersion
+ || value == kAsioResyncRequest
+ || value == kAsioLatenciesChanged
+ // The following three were added for ASIO 2.0, you don't
+ // necessarily have to support them.
+ || value == kAsioSupportsTimeInfo
+ || value == kAsioSupportsTimeCode
+ || value == kAsioSupportsInputMonitor)
+ ret = 1L;
+ break;
+ case kAsioResetRequest:
+ // Defer the task and perform the reset of the driver during the
+ // next "safe" situation. You cannot reset the driver right now,
+ // as this code is called from the driver. Reset the driver is
+ // done by completely destruct is. I.e. ASIOStop(),
+ // ASIODisposeBuffers(), Destruction Afterwards you initialize the
+ // driver again.
+ std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
+ ret = 1L;
+ break;
+ case kAsioResyncRequest:
+ // This informs the application that the driver encountered some
+ // non-fatal data loss. It is used for synchronization purposes
+ // of different media. Added mainly to work around the Win16Mutex
+ // problems in Windows 95/98 with the Windows Multimedia system,
+ // which could lose data because the Mutex was held too long by
+ // another thread. However a driver can issue it in other
+ // situations, too.
+ // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
+ asioXRun = true;
+ ret = 1L;
+ break;
+ case kAsioLatenciesChanged:
+ // This will inform the host application that the drivers were
+ // latencies changed. Beware, it this does not mean that the
+ // buffer sizes have changed! You might need to update internal
+ // delay data.
+ std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
+ ret = 1L;
+ break;
+ case kAsioEngineVersion:
+ // Return the supported ASIO version of the host application. If
+ // a host application does not implement this selector, ASIO 1.0
+ // is assumed by the driver.
+ ret = 2L;
+ break;
+ case kAsioSupportsTimeInfo:
+ // Informs the driver whether the
+ // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
+ // For compatibility with ASIO 1.0 drivers the host application
+ // should always support the "old" bufferSwitch method, too.
+ ret = 0;
+ break;
+ case kAsioSupportsTimeCode:
+ // Informs the driver whether application is interested in time
+ // code info. If an application does not need to know about time
+ // code, the driver has less work to do.
+ ret = 0;
+ break;
+ }
+ return ret;
+}
+
+static const char* getAsioErrorString( ASIOError result )
+{
+ struct Messages
+ {
+ ASIOError value;
+ const char*message;
+ };
+
+ static const Messages m[] =
+ {
+ { ASE_NotPresent, "Hardware input or output is not present or available." },
+ { ASE_HWMalfunction, "Hardware is malfunctioning." },
+ { ASE_InvalidParameter, "Invalid input parameter." },
+ { ASE_InvalidMode, "Invalid mode." },
+ { ASE_SPNotAdvancing, "Sample position not advancing." },
+ { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
+ { ASE_NoMemory, "Not enough memory to complete the request." }
+ };
+
+ for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
+ if ( m[i].value == result ) return m[i].message;
+
+ return "Unknown error.";
+}
+
+//******************** End of __WINDOWS_ASIO__ *********************//
+#endif
+
+
+#if defined(__WINDOWS_WASAPI__) // Windows WASAPI API
+
+// Authored by Marcus Tomlinson <themarcustomlinson@gmail.com>, April 2014
+// - Introduces support for the Windows WASAPI API
+// - Aims to deliver bit streams to and from hardware at the lowest possible latency, via the absolute minimum buffer sizes required
+// - Provides flexible stream configuration to an otherwise strict and inflexible WASAPI interface
+// - Includes automatic internal conversion of sample rate and buffer size between hardware and the user
+
+#ifndef INITGUID
+ #define INITGUID
+#endif
+
+#include <mfapi.h>
+#include <mferror.h>
+#include <mfplay.h>
+#include <mftransform.h>
+#include <wmcodecdsp.h>
+
+#include <audioclient.h>
+#include <avrt.h>
+#include <mmdeviceapi.h>
+#include <functiondiscoverykeys_devpkey.h>
+
+#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT
+ #define MF_E_TRANSFORM_NEED_MORE_INPUT _HRESULT_TYPEDEF_(0xc00d6d72)
+#endif
+
+#ifndef MFSTARTUP_NOSOCKET
+ #define MFSTARTUP_NOSOCKET 0x1
+#endif
+
+#ifdef _MSC_VER
+ #pragma comment( lib, "ksuser" )
+ #pragma comment( lib, "mfplat.lib" )
+ #pragma comment( lib, "mfuuid.lib" )
+ #pragma comment( lib, "wmcodecdspuuid" )
+#endif
+
+//=============================================================================
+
+#define SAFE_RELEASE( objectPtr )\
+if ( objectPtr )\
+{\
+ objectPtr->Release();\
+ objectPtr = NULL;\
+}
+
+typedef HANDLE ( __stdcall *TAvSetMmThreadCharacteristicsPtr )( LPCWSTR TaskName, LPDWORD TaskIndex );
+
+//-----------------------------------------------------------------------------
+
+// WASAPI dictates stream sample rate, format, channel count, and in some cases, buffer size.
+// Therefore we must perform all necessary conversions to user buffers in order to satisfy these
+// requirements. WasapiBuffer ring buffers are used between HwIn->UserIn and UserOut->HwOut to
+// provide intermediate storage for read / write synchronization.
+class WasapiBuffer
+{
+public:
+ WasapiBuffer()
+ : buffer_( NULL ),
+ bufferSize_( 0 ),
+ inIndex_( 0 ),
+ outIndex_( 0 ) {}
+
+ ~WasapiBuffer() {
+ free( buffer_ );
+ }
+
+ // sets the length of the internal ring buffer
+ void setBufferSize( unsigned int bufferSize, unsigned int formatBytes ) {
+ free( buffer_ );
+
+ buffer_ = ( char* ) calloc( bufferSize, formatBytes );
+
+ bufferSize_ = bufferSize;
+ inIndex_ = 0;
+ outIndex_ = 0;
+ }
+
+ // attempt to push a buffer into the ring buffer at the current "in" index
+ bool pushBuffer( char* buffer, unsigned int bufferSize, RtAudioFormat format )
+ {
+ if ( !buffer || // incoming buffer is NULL
+ bufferSize == 0 || // incoming buffer has no data
+ bufferSize > bufferSize_ ) // incoming buffer too large
+ {
+ return false;
+ }
+
+ unsigned int relOutIndex = outIndex_;
+ unsigned int inIndexEnd = inIndex_ + bufferSize;
+ if ( relOutIndex < inIndex_ && inIndexEnd >= bufferSize_ ) {
+ relOutIndex += bufferSize_;
+ }
+
+ // the "IN" index CAN BEGIN at the "OUT" index
+ // the "IN" index CANNOT END at the "OUT" index
+ if ( inIndex_ < relOutIndex && inIndexEnd >= relOutIndex ) {
+ return false; // not enough space between "in" index and "out" index
+ }
+
+ // copy buffer from external to internal
+ int fromZeroSize = inIndex_ + bufferSize - bufferSize_;
+ fromZeroSize = fromZeroSize < 0 ? 0 : fromZeroSize;
+ int fromInSize = bufferSize - fromZeroSize;
+
+ switch( format )
+ {
+ case RTAUDIO_SINT8:
+ memcpy( &( ( char* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( char ) );
+ memcpy( buffer_, &( ( char* ) buffer )[fromInSize], fromZeroSize * sizeof( char ) );
+ break;
+ case RTAUDIO_SINT16:
+ memcpy( &( ( short* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( short ) );
+ memcpy( buffer_, &( ( short* ) buffer )[fromInSize], fromZeroSize * sizeof( short ) );
+ break;
+ case RTAUDIO_SINT24:
+ memcpy( &( ( S24* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( S24 ) );
+ memcpy( buffer_, &( ( S24* ) buffer )[fromInSize], fromZeroSize * sizeof( S24 ) );
+ break;
+ case RTAUDIO_SINT32:
+ memcpy( &( ( int* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( int ) );
+ memcpy( buffer_, &( ( int* ) buffer )[fromInSize], fromZeroSize * sizeof( int ) );
+ break;
+ case RTAUDIO_FLOAT32:
+ memcpy( &( ( float* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( float ) );
+ memcpy( buffer_, &( ( float* ) buffer )[fromInSize], fromZeroSize * sizeof( float ) );
+ break;
+ case RTAUDIO_FLOAT64:
+ memcpy( &( ( double* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( double ) );
+ memcpy( buffer_, &( ( double* ) buffer )[fromInSize], fromZeroSize * sizeof( double ) );
+ break;
+ }
+
+ // update "in" index
+ inIndex_ += bufferSize;
+ inIndex_ %= bufferSize_;
+
+ return true;
+ }
+
+ // attempt to pull a buffer from the ring buffer from the current "out" index
+ bool pullBuffer( char* buffer, unsigned int bufferSize, RtAudioFormat format )
+ {
+ if ( !buffer || // incoming buffer is NULL
+ bufferSize == 0 || // incoming buffer has no data
+ bufferSize > bufferSize_ ) // incoming buffer too large
+ {
+ return false;
+ }
+
+ unsigned int relInIndex = inIndex_;
+ unsigned int outIndexEnd = outIndex_ + bufferSize;
+ if ( relInIndex < outIndex_ && outIndexEnd >= bufferSize_ ) {
+ relInIndex += bufferSize_;
+ }
+
+ // the "OUT" index CANNOT BEGIN at the "IN" index
+ // the "OUT" index CAN END at the "IN" index
+ if ( outIndex_ <= relInIndex && outIndexEnd > relInIndex ) {
+ return false; // not enough space between "out" index and "in" index
+ }
+
+ // copy buffer from internal to external
+ int fromZeroSize = outIndex_ + bufferSize - bufferSize_;
+ fromZeroSize = fromZeroSize < 0 ? 0 : fromZeroSize;
+ int fromOutSize = bufferSize - fromZeroSize;
+
+ switch( format )
+ {
+ case RTAUDIO_SINT8:
+ memcpy( buffer, &( ( char* ) buffer_ )[outIndex_], fromOutSize * sizeof( char ) );
+ memcpy( &( ( char* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( char ) );
+ break;
+ case RTAUDIO_SINT16:
+ memcpy( buffer, &( ( short* ) buffer_ )[outIndex_], fromOutSize * sizeof( short ) );
+ memcpy( &( ( short* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( short ) );
+ break;
+ case RTAUDIO_SINT24:
+ memcpy( buffer, &( ( S24* ) buffer_ )[outIndex_], fromOutSize * sizeof( S24 ) );
+ memcpy( &( ( S24* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( S24 ) );
+ break;
+ case RTAUDIO_SINT32:
+ memcpy( buffer, &( ( int* ) buffer_ )[outIndex_], fromOutSize * sizeof( int ) );
+ memcpy( &( ( int* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( int ) );
+ break;
+ case RTAUDIO_FLOAT32:
+ memcpy( buffer, &( ( float* ) buffer_ )[outIndex_], fromOutSize * sizeof( float ) );
+ memcpy( &( ( float* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( float ) );
+ break;
+ case RTAUDIO_FLOAT64:
+ memcpy( buffer, &( ( double* ) buffer_ )[outIndex_], fromOutSize * sizeof( double ) );
+ memcpy( &( ( double* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( double ) );
+ break;
+ }
+
+ // update "out" index
+ outIndex_ += bufferSize;
+ outIndex_ %= bufferSize_;
+
+ return true;
+ }
+
+private:
+ char* buffer_;
+ unsigned int bufferSize_;
+ unsigned int inIndex_;
+ unsigned int outIndex_;
+};
+
+//-----------------------------------------------------------------------------
+
+// In order to satisfy WASAPI's buffer requirements, we need a means of converting sample rate
+// between HW and the user. The WasapiResampler class is used to perform this conversion between
+// HwIn->UserIn and UserOut->HwOut during the stream callback loop.
+class WasapiResampler
+{
+public:
+ WasapiResampler( bool isFloat, unsigned int bitsPerSample, unsigned int channelCount,
+ unsigned int inSampleRate, unsigned int outSampleRate )
+ : _bytesPerSample( bitsPerSample / 8 )
+ , _channelCount( channelCount )
+ , _sampleRatio( ( float ) outSampleRate / inSampleRate )
+ , _transformUnk( NULL )
+ , _transform( NULL )
+ , _mediaType( NULL )
+ , _inputMediaType( NULL )
+ , _outputMediaType( NULL )
+
+ #ifdef __IWMResamplerProps_FWD_DEFINED__
+ , _resamplerProps( NULL )
+ #endif
+ {
+ // 1. Initialization
+
+ MFStartup( MF_VERSION, MFSTARTUP_NOSOCKET );
+
+ // 2. Create Resampler Transform Object
+
+ CoCreateInstance( CLSID_CResamplerMediaObject, NULL, CLSCTX_INPROC_SERVER,
+ IID_IUnknown, ( void** ) &_transformUnk );
+
+ _transformUnk->QueryInterface( IID_PPV_ARGS( &_transform ) );
+
+ #ifdef __IWMResamplerProps_FWD_DEFINED__
+ _transformUnk->QueryInterface( IID_PPV_ARGS( &_resamplerProps ) );
+ _resamplerProps->SetHalfFilterLength( 60 ); // best conversion quality
+ #endif
+
+ // 3. Specify input / output format
+
+ MFCreateMediaType( &_mediaType );
+ _mediaType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio );
+ _mediaType->SetGUID( MF_MT_SUBTYPE, isFloat ? MFAudioFormat_Float : MFAudioFormat_PCM );
+ _mediaType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, channelCount );
+ _mediaType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, inSampleRate );
+ _mediaType->SetUINT32( MF_MT_AUDIO_BLOCK_ALIGNMENT, _bytesPerSample * channelCount );
+ _mediaType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, _bytesPerSample * channelCount * inSampleRate );
+ _mediaType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample );
+ _mediaType->SetUINT32( MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE );
+
+ MFCreateMediaType( &_inputMediaType );
+ _mediaType->CopyAllItems( _inputMediaType );
+
+ _transform->SetInputType( 0, _inputMediaType, 0 );
+
+ MFCreateMediaType( &_outputMediaType );
+ _mediaType->CopyAllItems( _outputMediaType );
+
+ _outputMediaType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, outSampleRate );
+ _outputMediaType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, _bytesPerSample * channelCount * outSampleRate );
+
+ _transform->SetOutputType( 0, _outputMediaType, 0 );
+
+ // 4. Send stream start messages to Resampler
+
+ _transform->ProcessMessage( MFT_MESSAGE_COMMAND_FLUSH, 0 );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0 );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0 );
+ }
+
+ ~WasapiResampler()
+ {
+ // 8. Send stream stop messages to Resampler
+
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0 );
+ _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_STREAMING, 0 );
+
+ // 9. Cleanup
+
+ MFShutdown();
+
+ SAFE_RELEASE( _transformUnk );
+ SAFE_RELEASE( _transform );
+ SAFE_RELEASE( _mediaType );
+ SAFE_RELEASE( _inputMediaType );
+ SAFE_RELEASE( _outputMediaType );
+
+ #ifdef __IWMResamplerProps_FWD_DEFINED__
+ SAFE_RELEASE( _resamplerProps );
+ #endif
+ }
+
+ void Convert( char* outBuffer, const char* inBuffer, unsigned int inSampleCount, unsigned int& outSampleCount, int maxOutSampleCount = -1 )
+ {
+ unsigned int inputBufferSize = _bytesPerSample * _channelCount * inSampleCount;
+ if ( _sampleRatio == 1 )
+ {
+ // no sample rate conversion required
+ memcpy( outBuffer, inBuffer, inputBufferSize );
+ outSampleCount = inSampleCount;
+ return;
+ }
+
+ unsigned int outputBufferSize = 0;
+ if ( maxOutSampleCount != -1 )
+ {
+ outputBufferSize = _bytesPerSample * _channelCount * maxOutSampleCount;
+ }
+ else
+ {
+ outputBufferSize = ( unsigned int ) ceilf( inputBufferSize * _sampleRatio ) + ( _bytesPerSample * _channelCount );
+ }
+
+ IMFMediaBuffer* rInBuffer;
+ IMFSample* rInSample;
+ BYTE* rInByteBuffer = NULL;
+
+ // 5. Create Sample object from input data
+
+ MFCreateMemoryBuffer( inputBufferSize, &rInBuffer );
+
+ rInBuffer->Lock( &rInByteBuffer, NULL, NULL );
+ memcpy( rInByteBuffer, inBuffer, inputBufferSize );
+ rInBuffer->Unlock();
+ rInByteBuffer = NULL;
+
+ rInBuffer->SetCurrentLength( inputBufferSize );
+
+ MFCreateSample( &rInSample );
+ rInSample->AddBuffer( rInBuffer );
+
+ // 6. Pass input data to Resampler
+
+ _transform->ProcessInput( 0, rInSample, 0 );
+
+ SAFE_RELEASE( rInBuffer );
+ SAFE_RELEASE( rInSample );
+
+ // 7. Perform sample rate conversion
+
+ IMFMediaBuffer* rOutBuffer = NULL;
+ BYTE* rOutByteBuffer = NULL;
+
+ MFT_OUTPUT_DATA_BUFFER rOutDataBuffer;
+ DWORD rStatus;
+ DWORD rBytes = outputBufferSize; // maximum bytes accepted per ProcessOutput
+
+ // 7.1 Create Sample object for output data
+
+ memset( &rOutDataBuffer, 0, sizeof rOutDataBuffer );
+ MFCreateSample( &( rOutDataBuffer.pSample ) );
+ MFCreateMemoryBuffer( rBytes, &rOutBuffer );
+ rOutDataBuffer.pSample->AddBuffer( rOutBuffer );
+ rOutDataBuffer.dwStreamID = 0;
+ rOutDataBuffer.dwStatus = 0;
+ rOutDataBuffer.pEvents = NULL;
+
+ // 7.2 Get output data from Resampler
+
+ if ( _transform->ProcessOutput( 0, 1, &rOutDataBuffer, &rStatus ) == MF_E_TRANSFORM_NEED_MORE_INPUT )
+ {
+ outSampleCount = 0;
+ SAFE_RELEASE( rOutBuffer );
+ SAFE_RELEASE( rOutDataBuffer.pSample );
+ return;
+ }
+
+ // 7.3 Write output data to outBuffer
+
+ SAFE_RELEASE( rOutBuffer );
+ rOutDataBuffer.pSample->ConvertToContiguousBuffer( &rOutBuffer );
+ rOutBuffer->GetCurrentLength( &rBytes );
+
+ rOutBuffer->Lock( &rOutByteBuffer, NULL, NULL );
+ memcpy( outBuffer, rOutByteBuffer, rBytes );
+ rOutBuffer->Unlock();
+ rOutByteBuffer = NULL;
+
+ outSampleCount = rBytes / _bytesPerSample / _channelCount;
+ SAFE_RELEASE( rOutBuffer );
+ SAFE_RELEASE( rOutDataBuffer.pSample );
+ }
+
+private:
+ unsigned int _bytesPerSample;
+ unsigned int _channelCount;
+ float _sampleRatio;
+
+ IUnknown* _transformUnk;
+ IMFTransform* _transform;
+ IMFMediaType* _mediaType;
+ IMFMediaType* _inputMediaType;
+ IMFMediaType* _outputMediaType;
+
+ #ifdef __IWMResamplerProps_FWD_DEFINED__
+ IWMResamplerProps* _resamplerProps;
+ #endif
+};
+
+//-----------------------------------------------------------------------------
+
+// A structure to hold various information related to the WASAPI implementation.
+struct WasapiHandle
+{
+ IAudioClient* captureAudioClient;
+ IAudioClient* renderAudioClient;
+ IAudioCaptureClient* captureClient;
+ IAudioRenderClient* renderClient;
+ HANDLE captureEvent;
+ HANDLE renderEvent;
+
+ WasapiHandle()
+ : captureAudioClient( NULL ),
+ renderAudioClient( NULL ),
+ captureClient( NULL ),
+ renderClient( NULL ),
+ captureEvent( NULL ),
+ renderEvent( NULL ) {}
+};
+
+//=============================================================================
+
+RtApiWasapi::RtApiWasapi()
+ : coInitialized_( false ), deviceEnumerator_( NULL )
+{
+ // WASAPI can run either apartment or multi-threaded
+ HRESULT hr = CoInitialize( NULL );
+ if ( !FAILED( hr ) )
+ coInitialized_ = true;
+
+ // Instantiate device enumerator
+ hr = CoCreateInstance( __uuidof( MMDeviceEnumerator ), NULL,
+ CLSCTX_ALL, __uuidof( IMMDeviceEnumerator ),
+ ( void** ) &deviceEnumerator_ );
+
+ // If this runs on an old Windows, it will fail. Ignore and proceed.
+ if ( FAILED( hr ) )
+ deviceEnumerator_ = NULL;
+}
+
+//-----------------------------------------------------------------------------
+
+RtApiWasapi::~RtApiWasapi()
+{
+ if ( stream_.state != STREAM_CLOSED )
+ closeStream();
+
+ SAFE_RELEASE( deviceEnumerator_ );
+
+ // If this object previously called CoInitialize()
+ if ( coInitialized_ )
+ CoUninitialize();
+}
+
+//=============================================================================
+
+unsigned int RtApiWasapi::getDeviceCount( void )
+{
+ unsigned int captureDeviceCount = 0;
+ unsigned int renderDeviceCount = 0;
+
+ IMMDeviceCollection* captureDevices = NULL;
+ IMMDeviceCollection* renderDevices = NULL;
+
+ if ( !deviceEnumerator_ )
+ return 0;
+
+ // Count capture devices
+ errorText_.clear();
+ HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve capture device collection.";
+ goto Exit;
+ }
+
+ hr = captureDevices->GetCount( &captureDeviceCount );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve capture device count.";
+ goto Exit;
+ }
+
+ // Count render devices
+ hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve render device collection.";
+ goto Exit;
+ }
+
+ hr = renderDevices->GetCount( &renderDeviceCount );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve render device count.";
+ goto Exit;
+ }
+
+Exit:
+ // release all references
+ SAFE_RELEASE( captureDevices );
+ SAFE_RELEASE( renderDevices );
+
+ if ( errorText_.empty() )
+ return captureDeviceCount + renderDeviceCount;
+
+ error( RtAudioError::DRIVER_ERROR );
+ return 0;
+}
+
+//-----------------------------------------------------------------------------
+
+RtAudio::DeviceInfo RtApiWasapi::getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ unsigned int captureDeviceCount = 0;
+ unsigned int renderDeviceCount = 0;
+ std::string defaultDeviceName;
+ bool isCaptureDevice = false;
+
+ PROPVARIANT deviceNameProp;
+ PROPVARIANT defaultDeviceNameProp;
+
+ IMMDeviceCollection* captureDevices = NULL;
+ IMMDeviceCollection* renderDevices = NULL;
+ IMMDevice* devicePtr = NULL;
+ IMMDevice* defaultDevicePtr = NULL;
+ IAudioClient* audioClient = NULL;
+ IPropertyStore* devicePropStore = NULL;
+ IPropertyStore* defaultDevicePropStore = NULL;
+
+ WAVEFORMATEX* deviceFormat = NULL;
+ WAVEFORMATEX* closestMatchFormat = NULL;
+
+ // probed
+ info.probed = false;
+
+ // Count capture devices
+ errorText_.clear();
+ RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;
+ HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device collection.";
+ goto Exit;
+ }
+
+ hr = captureDevices->GetCount( &captureDeviceCount );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device count.";
+ goto Exit;
+ }
+
+ // Count render devices
+ hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device collection.";
+ goto Exit;
+ }
+
+ hr = renderDevices->GetCount( &renderDeviceCount );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device count.";
+ goto Exit;
+ }
+
+ // validate device index
+ if ( device >= captureDeviceCount + renderDeviceCount ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Invalid device index.";
+ errorType = RtAudioError::INVALID_USE;
+ goto Exit;
+ }
+
+ // determine whether index falls within capture or render devices
+ if ( device >= renderDeviceCount ) {
+ hr = captureDevices->Item( device - renderDeviceCount, &devicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device handle.";
+ goto Exit;
+ }
+ isCaptureDevice = true;
+ }
+ else {
+ hr = renderDevices->Item( device, &devicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device handle.";
+ goto Exit;
+ }
+ isCaptureDevice = false;
+ }
+
+ // get default device name
+ if ( isCaptureDevice ) {
+ hr = deviceEnumerator_->GetDefaultAudioEndpoint( eCapture, eConsole, &defaultDevicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default capture device handle.";
+ goto Exit;
+ }
+ }
+ else {
+ hr = deviceEnumerator_->GetDefaultAudioEndpoint( eRender, eConsole, &defaultDevicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default render device handle.";
+ goto Exit;
+ }
+ }
+
+ hr = defaultDevicePtr->OpenPropertyStore( STGM_READ, &defaultDevicePropStore );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to open default device property store.";
+ goto Exit;
+ }
+ PropVariantInit( &defaultDeviceNameProp );
+
+ hr = defaultDevicePropStore->GetValue( PKEY_Device_FriendlyName, &defaultDeviceNameProp );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default device property: PKEY_Device_FriendlyName.";
+ goto Exit;
+ }
+
+ defaultDeviceName = convertCharPointerToStdString(defaultDeviceNameProp.pwszVal);
+
+ // name
+ hr = devicePtr->OpenPropertyStore( STGM_READ, &devicePropStore );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to open device property store.";
+ goto Exit;
+ }
+
+ PropVariantInit( &deviceNameProp );
+
+ hr = devicePropStore->GetValue( PKEY_Device_FriendlyName, &deviceNameProp );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device property: PKEY_Device_FriendlyName.";
+ goto Exit;
+ }
+
+ info.name =convertCharPointerToStdString(deviceNameProp.pwszVal);
+
+ // is default
+ if ( isCaptureDevice ) {
+ info.isDefaultInput = info.name == defaultDeviceName;
+ info.isDefaultOutput = false;
+ }
+ else {
+ info.isDefaultInput = false;
+ info.isDefaultOutput = info.name == defaultDeviceName;
+ }
+
+ // channel count
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL, NULL, ( void** ) &audioClient );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device audio client.";
+ goto Exit;
+ }
+
+ hr = audioClient->GetMixFormat( &deviceFormat );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device mix format.";
+ goto Exit;
+ }
+
+ if ( isCaptureDevice ) {
+ info.inputChannels = deviceFormat->nChannels;
+ info.outputChannels = 0;
+ info.duplexChannels = 0;
+ }
+ else {
+ info.inputChannels = 0;
+ info.outputChannels = deviceFormat->nChannels;
+ info.duplexChannels = 0;
+ }
+
+ // sample rates
+ info.sampleRates.clear();
+
+ // allow support for all sample rates as we have a built-in sample rate converter
+ for ( unsigned int i = 0; i < MAX_SAMPLE_RATES; i++ ) {
+ info.sampleRates.push_back( SAMPLE_RATES[i] );
+ }
+ info.preferredSampleRate = deviceFormat->nSamplesPerSec;
+
+ // native format
+ info.nativeFormats = 0;
+
+ if ( deviceFormat->wFormatTag == WAVE_FORMAT_IEEE_FLOAT ||
+ ( deviceFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE &&
+ ( ( WAVEFORMATEXTENSIBLE* ) deviceFormat )->SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT ) )
+ {
+ if ( deviceFormat->wBitsPerSample == 32 ) {
+ info.nativeFormats |= RTAUDIO_FLOAT32;
+ }
+ else if ( deviceFormat->wBitsPerSample == 64 ) {
+ info.nativeFormats |= RTAUDIO_FLOAT64;
+ }
+ }
+ else if ( deviceFormat->wFormatTag == WAVE_FORMAT_PCM ||
+ ( deviceFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE &&
+ ( ( WAVEFORMATEXTENSIBLE* ) deviceFormat )->SubFormat == KSDATAFORMAT_SUBTYPE_PCM ) )
+ {
+ if ( deviceFormat->wBitsPerSample == 8 ) {
+ info.nativeFormats |= RTAUDIO_SINT8;
+ }
+ else if ( deviceFormat->wBitsPerSample == 16 ) {
+ info.nativeFormats |= RTAUDIO_SINT16;
+ }
+ else if ( deviceFormat->wBitsPerSample == 24 ) {
+ info.nativeFormats |= RTAUDIO_SINT24;
+ }
+ else if ( deviceFormat->wBitsPerSample == 32 ) {
+ info.nativeFormats |= RTAUDIO_SINT32;
+ }
+ }
+
+ // probed
+ info.probed = true;
+
+Exit:
+ // release all references
+ PropVariantClear( &deviceNameProp );
+ PropVariantClear( &defaultDeviceNameProp );
+
+ SAFE_RELEASE( captureDevices );
+ SAFE_RELEASE( renderDevices );
+ SAFE_RELEASE( devicePtr );
+ SAFE_RELEASE( defaultDevicePtr );
+ SAFE_RELEASE( audioClient );
+ SAFE_RELEASE( devicePropStore );
+ SAFE_RELEASE( defaultDevicePropStore );
+
+ CoTaskMemFree( deviceFormat );
+ CoTaskMemFree( closestMatchFormat );
+
+ if ( !errorText_.empty() )
+ error( errorType );
+ return info;
+}
+
+//-----------------------------------------------------------------------------
+
+unsigned int RtApiWasapi::getDefaultOutputDevice( void )
+{
+ for ( unsigned int i = 0; i < getDeviceCount(); i++ ) {
+ if ( getDeviceInfo( i ).isDefaultOutput ) {
+ return i;
+ }
+ }
+
+ return 0;
+}
+
+//-----------------------------------------------------------------------------
+
+unsigned int RtApiWasapi::getDefaultInputDevice( void )
+{
+ for ( unsigned int i = 0; i < getDeviceCount(); i++ ) {
+ if ( getDeviceInfo( i ).isDefaultInput ) {
+ return i;
+ }
+ }
+
+ return 0;
+}
+
+//-----------------------------------------------------------------------------
+
+void RtApiWasapi::closeStream( void )
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiWasapi::closeStream: No open stream to close.";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ if ( stream_.state != STREAM_STOPPED )
+ stopStream();
+
+ // clean up stream memory
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient )
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient )
+
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->captureClient )
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->renderClient )
+
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent )
+ CloseHandle( ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent );
+
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent )
+ CloseHandle( ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent );
+
+ delete ( WasapiHandle* ) stream_.apiHandle;
+ stream_.apiHandle = NULL;
+
+ for ( int i = 0; i < 2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ // update stream state
+ stream_.state = STREAM_CLOSED;
+}
+
+//-----------------------------------------------------------------------------
+
+void RtApiWasapi::startStream( void )
+{
+ verifyStream();
+
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiWasapi::startStream: The stream is already running.";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ // update stream state
+ stream_.state = STREAM_RUNNING;
+
+ // create WASAPI stream thread
+ stream_.callbackInfo.thread = ( ThreadHandle ) CreateThread( NULL, 0, runWasapiThread, this, CREATE_SUSPENDED, NULL );
+
+ if ( !stream_.callbackInfo.thread ) {
+ errorText_ = "RtApiWasapi::startStream: Unable to instantiate callback thread.";
+ error( RtAudioError::THREAD_ERROR );
+ }
+ else {
+ SetThreadPriority( ( void* ) stream_.callbackInfo.thread, stream_.callbackInfo.priority );
+ ResumeThread( ( void* ) stream_.callbackInfo.thread );
+ }
+}
+
+//-----------------------------------------------------------------------------
+
+void RtApiWasapi::stopStream( void )
+{
+ verifyStream();
+
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiWasapi::stopStream: The stream is already stopped.";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ // inform stream thread by setting stream state to STREAM_STOPPING
+ stream_.state = STREAM_STOPPING;
+
+ // wait until stream thread is stopped
+ while( stream_.state != STREAM_STOPPED ) {
+ Sleep( 1 );
+ }
+
+ // Wait for the last buffer to play before stopping.
+ Sleep( 1000 * stream_.bufferSize / stream_.sampleRate );
+
+ // close thread handle
+ if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {
+ errorText_ = "RtApiWasapi::stopStream: Unable to close callback thread.";
+ error( RtAudioError::THREAD_ERROR );
+ return;
+ }
+
+ stream_.callbackInfo.thread = (ThreadHandle) NULL;
+}
+
+//-----------------------------------------------------------------------------
+
+void RtApiWasapi::abortStream( void )
+{
+ verifyStream();
+
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiWasapi::abortStream: The stream is already stopped.";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ // inform stream thread by setting stream state to STREAM_STOPPING
+ stream_.state = STREAM_STOPPING;
+
+ // wait until stream thread is stopped
+ while ( stream_.state != STREAM_STOPPED ) {
+ Sleep( 1 );
+ }
+
+ // close thread handle
+ if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {
+ errorText_ = "RtApiWasapi::abortStream: Unable to close callback thread.";
+ error( RtAudioError::THREAD_ERROR );
+ return;
+ }
+
+ stream_.callbackInfo.thread = (ThreadHandle) NULL;
+}
+
+//-----------------------------------------------------------------------------
+
+bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int* bufferSize,
+ RtAudio::StreamOptions* options )
+{
+ bool methodResult = FAILURE;
+ unsigned int captureDeviceCount = 0;
+ unsigned int renderDeviceCount = 0;
+
+ IMMDeviceCollection* captureDevices = NULL;
+ IMMDeviceCollection* renderDevices = NULL;
+ IMMDevice* devicePtr = NULL;
+ WAVEFORMATEX* deviceFormat = NULL;
+ unsigned int bufferBytes;
+ stream_.state = STREAM_STOPPED;
+
+ // create API Handle if not already created
+ if ( !stream_.apiHandle )
+ stream_.apiHandle = ( void* ) new WasapiHandle();
+
+ // Count capture devices
+ errorText_.clear();
+ RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;
+ HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device collection.";
+ goto Exit;
+ }
+
+ hr = captureDevices->GetCount( &captureDeviceCount );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device count.";
+ goto Exit;
+ }
+
+ // Count render devices
+ hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device collection.";
+ goto Exit;
+ }
+
+ hr = renderDevices->GetCount( &renderDeviceCount );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device count.";
+ goto Exit;
+ }
+
+ // validate device index
+ if ( device >= captureDeviceCount + renderDeviceCount ) {
+ errorType = RtAudioError::INVALID_USE;
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Invalid device index.";
+ goto Exit;
+ }
+
+ // if device index falls within capture devices
+ if ( device >= renderDeviceCount ) {
+ if ( mode != INPUT ) {
+ errorType = RtAudioError::INVALID_USE;
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Capture device selected as output device.";
+ goto Exit;
+ }
+
+ // retrieve captureAudioClient from devicePtr
+ IAudioClient*& captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;
+
+ hr = captureDevices->Item( device - renderDeviceCount, &devicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device handle.";
+ goto Exit;
+ }
+
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,
+ NULL, ( void** ) &captureAudioClient );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device audio client.";
+ goto Exit;
+ }
+
+ hr = captureAudioClient->GetMixFormat( &deviceFormat );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device mix format.";
+ goto Exit;
+ }
+
+ stream_.nDeviceChannels[mode] = deviceFormat->nChannels;
+ captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );
+ }
+
+ // if device index falls within render devices and is configured for loopback
+ if ( device < renderDeviceCount && mode == INPUT )
+ {
+ // if renderAudioClient is not initialised, initialise it now
+ IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;
+ if ( !renderAudioClient )
+ {
+ probeDeviceOpen( device, OUTPUT, channels, firstChannel, sampleRate, format, bufferSize, options );
+ }
+
+ // retrieve captureAudioClient from devicePtr
+ IAudioClient*& captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;
+
+ hr = renderDevices->Item( device, &devicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device handle.";
+ goto Exit;
+ }
+
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,
+ NULL, ( void** ) &captureAudioClient );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device audio client.";
+ goto Exit;
+ }
+
+ hr = captureAudioClient->GetMixFormat( &deviceFormat );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device mix format.";
+ goto Exit;
+ }
+
+ stream_.nDeviceChannels[mode] = deviceFormat->nChannels;
+ captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );
+ }
+
+ // if device index falls within render devices and is configured for output
+ if ( device < renderDeviceCount && mode == OUTPUT )
+ {
+ // if renderAudioClient is already initialised, don't initialise it again
+ IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;
+ if ( renderAudioClient )
+ {
+ methodResult = SUCCESS;
+ goto Exit;
+ }
+
+ hr = renderDevices->Item( device, &devicePtr );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device handle.";
+ goto Exit;
+ }
+
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,
+ NULL, ( void** ) &renderAudioClient );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device audio client.";
+ goto Exit;
+ }
+
+ hr = renderAudioClient->GetMixFormat( &deviceFormat );
+ if ( FAILED( hr ) ) {
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device mix format.";
+ goto Exit;
+ }
+
+ stream_.nDeviceChannels[mode] = deviceFormat->nChannels;
+ renderAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );
+ }
+
+ // fill stream data
+ if ( ( stream_.mode == OUTPUT && mode == INPUT ) ||
+ ( stream_.mode == INPUT && mode == OUTPUT ) ) {
+ stream_.mode = DUPLEX;
+ }
+ else {
+ stream_.mode = mode;
+ }
+
+ stream_.device[mode] = device;
+ stream_.doByteSwap[mode] = false;
+ stream_.sampleRate = sampleRate;
+ stream_.bufferSize = *bufferSize;
+ stream_.nBuffers = 1;
+ stream_.nUserChannels[mode] = channels;
+ stream_.channelOffset[mode] = firstChannel;
+ stream_.userFormat = format;
+ stream_.deviceFormat[mode] = getDeviceInfo( device ).nativeFormats;
+
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
+ stream_.userInterleaved = false;
+ else
+ stream_.userInterleaved = true;
+ stream_.deviceInterleaved[mode] = true;
+
+ // Set flags for buffer conversion.
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] ||
+ stream_.nUserChannels[0] != stream_.nDeviceChannels[0] ||
+ stream_.nUserChannels[1] != stream_.nDeviceChannels[1] )
+ stream_.doConvertBuffer[mode] = true;
+ else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
+ stream_.doConvertBuffer[mode] = true;
+
+ if ( stream_.doConvertBuffer[mode] )
+ setConvertInfo( mode, firstChannel );
+
+ // Allocate necessary internal buffers
+ bufferBytes = stream_.nUserChannels[mode] * stream_.bufferSize * formatBytes( stream_.userFormat );
+
+ stream_.userBuffer[mode] = ( char* ) calloc( bufferBytes, 1 );
+ if ( !stream_.userBuffer[mode] ) {
+ errorType = RtAudioError::MEMORY_ERROR;
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Error allocating user buffer memory.";
+ goto Exit;
+ }
+
+ if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME )
+ stream_.callbackInfo.priority = 15;
+ else
+ stream_.callbackInfo.priority = 0;
+
+ ///! TODO: RTAUDIO_MINIMIZE_LATENCY // Provide stream buffers directly to callback
+ ///! TODO: RTAUDIO_HOG_DEVICE // Exclusive mode
+
+ methodResult = SUCCESS;
+
+Exit:
+ //clean up
+ SAFE_RELEASE( captureDevices );
+ SAFE_RELEASE( renderDevices );
+ SAFE_RELEASE( devicePtr );
+ CoTaskMemFree( deviceFormat );
+
+ // if method failed, close the stream
+ if ( methodResult == FAILURE )
+ closeStream();
+
+ if ( !errorText_.empty() )
+ error( errorType );
+ return methodResult;
+}
+
+//=============================================================================
+
+DWORD WINAPI RtApiWasapi::runWasapiThread( void* wasapiPtr )
+{
+ if ( wasapiPtr )
+ ( ( RtApiWasapi* ) wasapiPtr )->wasapiThread();
+
+ return 0;
+}
+
+DWORD WINAPI RtApiWasapi::stopWasapiThread( void* wasapiPtr )
+{
+ if ( wasapiPtr )
+ ( ( RtApiWasapi* ) wasapiPtr )->stopStream();
+
+ return 0;
+}
+
+DWORD WINAPI RtApiWasapi::abortWasapiThread( void* wasapiPtr )
+{
+ if ( wasapiPtr )
+ ( ( RtApiWasapi* ) wasapiPtr )->abortStream();
+
+ return 0;
+}
+
+//-----------------------------------------------------------------------------
+
+void RtApiWasapi::wasapiThread()
+{
+ // as this is a new thread, we must CoInitialize it
+ CoInitialize( NULL );
+
+ HRESULT hr;
+
+ IAudioClient* captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;
+ IAudioClient* renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;
+ IAudioCaptureClient* captureClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureClient;
+ IAudioRenderClient* renderClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderClient;
+ HANDLE captureEvent = ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent;
+ HANDLE renderEvent = ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent;
+
+ WAVEFORMATEX* captureFormat = NULL;
+ WAVEFORMATEX* renderFormat = NULL;
+ float captureSrRatio = 0.0f;
+ float renderSrRatio = 0.0f;
+ WasapiBuffer captureBuffer;
+ WasapiBuffer renderBuffer;
+ WasapiResampler* captureResampler = NULL;
+ WasapiResampler* renderResampler = NULL;
+
+ // declare local stream variables
+ RtAudioCallback callback = ( RtAudioCallback ) stream_.callbackInfo.callback;
+ BYTE* streamBuffer = NULL;
+ DWORD captureFlags = 0;
+ unsigned int bufferFrameCount = 0;
+ unsigned int numFramesPadding = 0;
+ unsigned int convBufferSize = 0;
+ bool loopbackEnabled = stream_.device[INPUT] == stream_.device[OUTPUT];
+ bool callbackPushed = true;
+ bool callbackPulled = false;
+ bool callbackStopped = false;
+ int callbackResult = 0;
+
+ // convBuffer is used to store converted buffers between WASAPI and the user
+ char* convBuffer = NULL;
+ unsigned int convBuffSize = 0;
+ unsigned int deviceBuffSize = 0;
+
+ std::string errorText;
+ RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;
+
+ // Attempt to assign "Pro Audio" characteristic to thread
+ HMODULE AvrtDll = LoadLibrary( (LPCTSTR) "AVRT.dll" );
+ if ( AvrtDll ) {
+ DWORD taskIndex = 0;
+ TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr =
+ ( TAvSetMmThreadCharacteristicsPtr ) (void(*)()) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );
+ AvSetMmThreadCharacteristicsPtr( L"Pro Audio", &taskIndex );
+ FreeLibrary( AvrtDll );
+ }
+
+ // start capture stream if applicable
+ if ( captureAudioClient ) {
+ hr = captureAudioClient->GetMixFormat( &captureFormat );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";
+ goto Exit;
+ }
+
+ // init captureResampler
+ captureResampler = new WasapiResampler( stream_.deviceFormat[INPUT] == RTAUDIO_FLOAT32 || stream_.deviceFormat[INPUT] == RTAUDIO_FLOAT64,
+ formatBytes( stream_.deviceFormat[INPUT] ) * 8, stream_.nDeviceChannels[INPUT],
+ captureFormat->nSamplesPerSec, stream_.sampleRate );
+
+ captureSrRatio = ( ( float ) captureFormat->nSamplesPerSec / stream_.sampleRate );
+
+ if ( !captureClient ) {
+ hr = captureAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,
+ loopbackEnabled ? AUDCLNT_STREAMFLAGS_LOOPBACK : AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
+ 0,
+ 0,
+ captureFormat,
+ NULL );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client.";
+ goto Exit;
+ }
+
+ hr = captureAudioClient->GetService( __uuidof( IAudioCaptureClient ),
+ ( void** ) &captureClient );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle.";
+ goto Exit;
+ }
+
+ // don't configure captureEvent if in loopback mode
+ if ( !loopbackEnabled )
+ {
+ // configure captureEvent to trigger on every available capture buffer
+ captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
+ if ( !captureEvent ) {
+ errorType = RtAudioError::SYSTEM_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to create capture event.";
+ goto Exit;
+ }
+
+ hr = captureAudioClient->SetEventHandle( captureEvent );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to set capture event handle.";
+ goto Exit;
+ }
+
+ ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent;
+ }
+
+ ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient;
+
+ // reset the capture stream
+ hr = captureAudioClient->Reset();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";
+ goto Exit;
+ }
+
+ // start the capture stream
+ hr = captureAudioClient->Start();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream.";
+ goto Exit;
+ }
+ }
+
+ unsigned int inBufferSize = 0;
+ hr = captureAudioClient->GetBufferSize( &inBufferSize );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to get capture buffer size.";
+ goto Exit;
+ }
+
+ // scale outBufferSize according to stream->user sample rate ratio
+ unsigned int outBufferSize = ( unsigned int ) ceilf( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT];
+ inBufferSize *= stream_.nDeviceChannels[INPUT];
+
+ // set captureBuffer size
+ captureBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[INPUT] ) );
+ }
+
+ // start render stream if applicable
+ if ( renderAudioClient ) {
+ hr = renderAudioClient->GetMixFormat( &renderFormat );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";
+ goto Exit;
+ }
+
+ // init renderResampler
+ renderResampler = new WasapiResampler( stream_.deviceFormat[OUTPUT] == RTAUDIO_FLOAT32 || stream_.deviceFormat[OUTPUT] == RTAUDIO_FLOAT64,
+ formatBytes( stream_.deviceFormat[OUTPUT] ) * 8, stream_.nDeviceChannels[OUTPUT],
+ stream_.sampleRate, renderFormat->nSamplesPerSec );
+
+ renderSrRatio = ( ( float ) renderFormat->nSamplesPerSec / stream_.sampleRate );
+
+ if ( !renderClient ) {
+ hr = renderAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
+ 0,
+ 0,
+ renderFormat,
+ NULL );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to initialize render audio client.";
+ goto Exit;
+ }
+
+ hr = renderAudioClient->GetService( __uuidof( IAudioRenderClient ),
+ ( void** ) &renderClient );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle.";
+ goto Exit;
+ }
+
+ // configure renderEvent to trigger on every available render buffer
+ renderEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
+ if ( !renderEvent ) {
+ errorType = RtAudioError::SYSTEM_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to create render event.";
+ goto Exit;
+ }
+
+ hr = renderAudioClient->SetEventHandle( renderEvent );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to set render event handle.";
+ goto Exit;
+ }
+
+ ( ( WasapiHandle* ) stream_.apiHandle )->renderClient = renderClient;
+ ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent = renderEvent;
+
+ // reset the render stream
+ hr = renderAudioClient->Reset();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream.";
+ goto Exit;
+ }
+
+ // start the render stream
+ hr = renderAudioClient->Start();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to start render stream.";
+ goto Exit;
+ }
+ }
+
+ unsigned int outBufferSize = 0;
+ hr = renderAudioClient->GetBufferSize( &outBufferSize );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to get render buffer size.";
+ goto Exit;
+ }
+
+ // scale inBufferSize according to user->stream sample rate ratio
+ unsigned int inBufferSize = ( unsigned int ) ceilf( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT];
+ outBufferSize *= stream_.nDeviceChannels[OUTPUT];
+
+ // set renderBuffer size
+ renderBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[OUTPUT] ) );
+ }
+
+ // malloc buffer memory
+ if ( stream_.mode == INPUT )
+ {
+ using namespace std; // for ceilf
+ convBuffSize = ( size_t ) ( ceilf( stream_.bufferSize * captureSrRatio ) ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );
+ deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );
+ }
+ else if ( stream_.mode == OUTPUT )
+ {
+ convBuffSize = ( size_t ) ( ceilf( stream_.bufferSize * renderSrRatio ) ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] );
+ deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] );
+ }
+ else if ( stream_.mode == DUPLEX )
+ {
+ convBuffSize = std::max( ( size_t ) ( ceilf( stream_.bufferSize * captureSrRatio ) ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ),
+ ( size_t ) ( ceilf( stream_.bufferSize * renderSrRatio ) ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) );
+ deviceBuffSize = std::max( stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ),
+ stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) );
+ }
+
+ convBuffSize *= 2; // allow overflow for *SrRatio remainders
+ convBuffer = ( char* ) calloc( convBuffSize, 1 );
+ stream_.deviceBuffer = ( char* ) calloc( deviceBuffSize, 1 );
+ if ( !convBuffer || !stream_.deviceBuffer ) {
+ errorType = RtAudioError::MEMORY_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Error allocating device buffer memory.";
+ goto Exit;
+ }
+
+ // stream process loop
+ while ( stream_.state != STREAM_STOPPING ) {
+ if ( !callbackPulled ) {
+ // Callback Input
+ // ==============
+ // 1. Pull callback buffer from inputBuffer
+ // 2. If 1. was successful: Convert callback buffer to user sample rate and channel count
+ // Convert callback buffer to user format
+
+ if ( captureAudioClient )
+ {
+ int samplesToPull = ( unsigned int ) floorf( stream_.bufferSize * captureSrRatio );
+
+ convBufferSize = 0;
+ while ( convBufferSize < stream_.bufferSize )
+ {
+ // Pull callback buffer from inputBuffer
+ callbackPulled = captureBuffer.pullBuffer( convBuffer,
+ samplesToPull * stream_.nDeviceChannels[INPUT],
+ stream_.deviceFormat[INPUT] );
+
+ if ( !callbackPulled )
+ {
+ break;
+ }
+
+ // Convert callback buffer to user sample rate
+ unsigned int deviceBufferOffset = convBufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );
+ unsigned int convSamples = 0;
+
+ captureResampler->Convert( stream_.deviceBuffer + deviceBufferOffset,
+ convBuffer,
+ samplesToPull,
+ convSamples,
+ convBufferSize == 0 ? -1 : stream_.bufferSize - convBufferSize );
+
+ convBufferSize += convSamples;
+ samplesToPull = 1; // now pull one sample at a time until we have stream_.bufferSize samples
+ }
+
+ if ( callbackPulled )
+ {
+ if ( stream_.doConvertBuffer[INPUT] ) {
+ // Convert callback buffer to user format
+ convertBuffer( stream_.userBuffer[INPUT],
+ stream_.deviceBuffer,
+ stream_.convertInfo[INPUT] );
+ }
+ else {
+ // no further conversion, simple copy deviceBuffer to userBuffer
+ memcpy( stream_.userBuffer[INPUT],
+ stream_.deviceBuffer,
+ stream_.bufferSize * stream_.nUserChannels[INPUT] * formatBytes( stream_.userFormat ) );
+ }
+ }
+ }
+ else {
+ // if there is no capture stream, set callbackPulled flag
+ callbackPulled = true;
+ }
+
+ // Execute Callback
+ // ================
+ // 1. Execute user callback method
+ // 2. Handle return value from callback
+
+ // if callback has not requested the stream to stop
+ if ( callbackPulled && !callbackStopped ) {
+ // Execute user callback method
+ callbackResult = callback( stream_.userBuffer[OUTPUT],
+ stream_.userBuffer[INPUT],
+ stream_.bufferSize,
+ getStreamTime(),
+ captureFlags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY ? RTAUDIO_INPUT_OVERFLOW : 0,
+ stream_.callbackInfo.userData );
+
+ // tick stream time
+ RtApi::tickStreamTime();
+
+ // Handle return value from callback
+ if ( callbackResult == 1 ) {
+ // instantiate a thread to stop this thread
+ HANDLE threadHandle = CreateThread( NULL, 0, stopWasapiThread, this, 0, NULL );
+ if ( !threadHandle ) {
+ errorType = RtAudioError::THREAD_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread.";
+ goto Exit;
+ }
+ else if ( !CloseHandle( threadHandle ) ) {
+ errorType = RtAudioError::THREAD_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle.";
+ goto Exit;
+ }
+
+ callbackStopped = true;
+ }
+ else if ( callbackResult == 2 ) {
+ // instantiate a thread to stop this thread
+ HANDLE threadHandle = CreateThread( NULL, 0, abortWasapiThread, this, 0, NULL );
+ if ( !threadHandle ) {
+ errorType = RtAudioError::THREAD_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread.";
+ goto Exit;
+ }
+ else if ( !CloseHandle( threadHandle ) ) {
+ errorType = RtAudioError::THREAD_ERROR;
+ errorText = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle.";
+ goto Exit;
+ }
+
+ callbackStopped = true;
+ }
+ }
+ }
+
+ // Callback Output
+ // ===============
+ // 1. Convert callback buffer to stream format
+ // 2. Convert callback buffer to stream sample rate and channel count
+ // 3. Push callback buffer into outputBuffer
+
+ if ( renderAudioClient && callbackPulled )
+ {
+ // if the last call to renderBuffer.PushBuffer() was successful
+ if ( callbackPushed || convBufferSize == 0 )
+ {
+ if ( stream_.doConvertBuffer[OUTPUT] )
+ {
+ // Convert callback buffer to stream format
+ convertBuffer( stream_.deviceBuffer,
+ stream_.userBuffer[OUTPUT],
+ stream_.convertInfo[OUTPUT] );
+
+ }
+ else {
+ // no further conversion, simple copy userBuffer to deviceBuffer
+ memcpy( stream_.deviceBuffer,
+ stream_.userBuffer[OUTPUT],
+ stream_.bufferSize * stream_.nUserChannels[OUTPUT] * formatBytes( stream_.userFormat ) );
+ }
+
+ // Convert callback buffer to stream sample rate
+ renderResampler->Convert( convBuffer,
+ stream_.deviceBuffer,
+ stream_.bufferSize,
+ convBufferSize );
+ }
+
+ // Push callback buffer into outputBuffer
+ callbackPushed = renderBuffer.pushBuffer( convBuffer,
+ convBufferSize * stream_.nDeviceChannels[OUTPUT],
+ stream_.deviceFormat[OUTPUT] );
+ }
+ else {
+ // if there is no render stream, set callbackPushed flag
+ callbackPushed = true;
+ }
+
+ // Stream Capture
+ // ==============
+ // 1. Get capture buffer from stream
+ // 2. Push capture buffer into inputBuffer
+ // 3. If 2. was successful: Release capture buffer
+
+ if ( captureAudioClient ) {
+ // if the callback input buffer was not pulled from captureBuffer, wait for next capture event
+ if ( !callbackPulled ) {
+ WaitForSingleObject( loopbackEnabled ? renderEvent : captureEvent, INFINITE );
+ }
+
+ // Get capture buffer from stream
+ hr = captureClient->GetBuffer( &streamBuffer,
+ &bufferFrameCount,
+ &captureFlags, NULL, NULL );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve capture buffer.";
+ goto Exit;
+ }
+
+ if ( bufferFrameCount != 0 ) {
+ // Push capture buffer into inputBuffer
+ if ( captureBuffer.pushBuffer( ( char* ) streamBuffer,
+ bufferFrameCount * stream_.nDeviceChannels[INPUT],
+ stream_.deviceFormat[INPUT] ) )
+ {
+ // Release capture buffer
+ hr = captureClient->ReleaseBuffer( bufferFrameCount );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
+ goto Exit;
+ }
+ }
+ else
+ {
+ // Inform WASAPI that capture was unsuccessful
+ hr = captureClient->ReleaseBuffer( 0 );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
+ goto Exit;
+ }
+ }
+ }
+ else
+ {
+ // Inform WASAPI that capture was unsuccessful
+ hr = captureClient->ReleaseBuffer( 0 );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer.";
+ goto Exit;
+ }
+ }
+ }
+
+ // Stream Render
+ // =============
+ // 1. Get render buffer from stream
+ // 2. Pull next buffer from outputBuffer
+ // 3. If 2. was successful: Fill render buffer with next buffer
+ // Release render buffer
+
+ if ( renderAudioClient ) {
+ // if the callback output buffer was not pushed to renderBuffer, wait for next render event
+ if ( callbackPulled && !callbackPushed ) {
+ WaitForSingleObject( renderEvent, INFINITE );
+ }
+
+ // Get render buffer from stream
+ hr = renderAudioClient->GetBufferSize( &bufferFrameCount );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer size.";
+ goto Exit;
+ }
+
+ hr = renderAudioClient->GetCurrentPadding( &numFramesPadding );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer padding.";
+ goto Exit;
+ }
+
+ bufferFrameCount -= numFramesPadding;
+
+ if ( bufferFrameCount != 0 ) {
+ hr = renderClient->GetBuffer( bufferFrameCount, &streamBuffer );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer.";
+ goto Exit;
+ }
+
+ // Pull next buffer from outputBuffer
+ // Fill render buffer with next buffer
+ if ( renderBuffer.pullBuffer( ( char* ) streamBuffer,
+ bufferFrameCount * stream_.nDeviceChannels[OUTPUT],
+ stream_.deviceFormat[OUTPUT] ) )
+ {
+ // Release render buffer
+ hr = renderClient->ReleaseBuffer( bufferFrameCount, 0 );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
+ goto Exit;
+ }
+ }
+ else
+ {
+ // Inform WASAPI that render was unsuccessful
+ hr = renderClient->ReleaseBuffer( 0, 0 );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
+ goto Exit;
+ }
+ }
+ }
+ else
+ {
+ // Inform WASAPI that render was unsuccessful
+ hr = renderClient->ReleaseBuffer( 0, 0 );
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer.";
+ goto Exit;
+ }
+ }
+ }
+
+ // if the callback buffer was pushed renderBuffer reset callbackPulled flag
+ if ( callbackPushed ) {
+ // unsetting the callbackPulled flag lets the stream know that
+ // the audio device is ready for another callback output buffer.
+ callbackPulled = false;
+ }
+
+ }
+
+Exit:
+ // clean up
+ CoTaskMemFree( captureFormat );
+ CoTaskMemFree( renderFormat );
+
+ free ( convBuffer );
+ delete renderResampler;
+ delete captureResampler;
+
+ CoUninitialize();
+
+ // update stream state
+ stream_.state = STREAM_STOPPED;
+
+ if ( !errorText.empty() )
+ {
+ errorText_ = errorText;
+ error( errorType );
+ }
+}
+
+//******************** End of __WINDOWS_WASAPI__ *********************//
+#endif
+
+
+#if defined(__WINDOWS_DS__) // Windows DirectSound API
+
+// Modified by Robin Davies, October 2005
+// - Improvements to DirectX pointer chasing.
+// - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
+// - Auto-call CoInitialize for DSOUND and ASIO platforms.
+// Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
+// Changed device query structure for RtAudio 4.0.7, January 2010
+
+#include <windows.h>
+#include <process.h>
+#include <mmsystem.h>
+#include <mmreg.h>
+#include <dsound.h>
+#include <assert.h>
+#include <algorithm>
+
+#if defined(__MINGW32__)
+ // missing from latest mingw winapi
+#define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
+#define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
+#define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
+#define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
+#endif
+
+#define MINIMUM_DEVICE_BUFFER_SIZE 32768
+
+#ifdef _MSC_VER // if Microsoft Visual C++
+#pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
+#endif
+
+static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
+{
+ if ( pointer > bufferSize ) pointer -= bufferSize;
+ if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
+ if ( pointer < earlierPointer ) pointer += bufferSize;
+ return pointer >= earlierPointer && pointer < laterPointer;
+}
+
+// A structure to hold various information related to the DirectSound
+// API implementation.
+struct DsHandle {
+ unsigned int drainCounter; // Tracks callback counts when draining
+ bool internalDrain; // Indicates if stop is initiated from callback or not.
+ void *id[2];
+ void *buffer[2];
+ bool xrun[2];
+ UINT bufferPointer[2];
+ DWORD dsBufferSize[2];
+ DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
+ HANDLE condition;
+
+ DsHandle()
+ :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
+};
+
+// Declarations for utility functions, callbacks, and structures
+// specific to the DirectSound implementation.
+static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
+ LPCTSTR description,
+ LPCTSTR module,
+ LPVOID lpContext );
+
+static const char* getErrorString( int code );
+
+static unsigned __stdcall callbackHandler( void *ptr );
+
+struct DsDevice {
+ LPGUID id[2];
+ bool validId[2];
+ bool found;
+ std::string name;
+
+ DsDevice()
+ : found(false) { validId[0] = false; validId[1] = false; }
+};
+
+struct DsProbeData {
+ bool isInput;
+ std::vector<struct DsDevice>* dsDevices;
+};
+
+RtApiDs :: RtApiDs()
+{
+ // Dsound will run both-threaded. If CoInitialize fails, then just
+ // accept whatever the mainline chose for a threading model.
+ coInitialized_ = false;
+ HRESULT hr = CoInitialize( NULL );
+ if ( !FAILED( hr ) ) coInitialized_ = true;
+}
+
+RtApiDs :: ~RtApiDs()
+{
+ if ( stream_.state != STREAM_CLOSED ) closeStream();
+ if ( coInitialized_ ) CoUninitialize(); // balanced call.
+}
+
+// The DirectSound default output is always the first device.
+unsigned int RtApiDs :: getDefaultOutputDevice( void )
+{
+ return 0;
+}
+
+// The DirectSound default input is always the first input device,
+// which is the first capture device enumerated.
+unsigned int RtApiDs :: getDefaultInputDevice( void )
+{
+ return 0;
+}
+
+unsigned int RtApiDs :: getDeviceCount( void )
+{
+ // Set query flag for previously found devices to false, so that we
+ // can check for any devices that have disappeared.
+ for ( unsigned int i=0; i<dsDevices.size(); i++ )
+ dsDevices[i].found = false;
+
+ // Query DirectSound devices.
+ struct DsProbeData probeInfo;
+ probeInfo.isInput = false;
+ probeInfo.dsDevices = &dsDevices;
+ HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &probeInfo );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ }
+
+ // Query DirectSoundCapture devices.
+ probeInfo.isInput = true;
+ result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &probeInfo );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ }
+
+ // Clean out any devices that may have disappeared (code update submitted by Eli Zehngut).
+ for ( unsigned int i=0; i<dsDevices.size(); ) {
+ if ( dsDevices[i].found == false ) dsDevices.erase( dsDevices.begin() + i );
+ else i++;
+ }
+
+ return static_cast<unsigned int>(dsDevices.size());
+}
+
+RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = false;
+
+ if ( dsDevices.size() == 0 ) {
+ // Force a query of all devices
+ getDeviceCount();
+ if ( dsDevices.size() == 0 ) {
+ errorText_ = "RtApiDs::getDeviceInfo: no devices found!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+ }
+
+ if ( device >= dsDevices.size() ) {
+ errorText_ = "RtApiDs::getDeviceInfo: device ID is invalid!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ HRESULT result;
+ if ( dsDevices[ device ].validId[0] == false ) goto probeInput;
+
+ LPDIRECTSOUND output;
+ DSCAPS outCaps;
+ result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto probeInput;
+ }
+
+ outCaps.dwSize = sizeof( outCaps );
+ result = output->GetCaps( &outCaps );
+ if ( FAILED( result ) ) {
+ output->Release();
+ errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto probeInput;
+ }
+
+ // Get output channel information.
+ info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
+
+ // Get sample rate information.
+ info.sampleRates.clear();
+ for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
+ if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
+ SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate ) {
+ info.sampleRates.push_back( SAMPLE_RATES[k] );
+
+ if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )
+ info.preferredSampleRate = SAMPLE_RATES[k];
+ }
+ }
+
+ // Get format information.
+ if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
+
+ output->Release();
+
+ if ( getDefaultOutputDevice() == device )
+ info.isDefaultOutput = true;
+
+ if ( dsDevices[ device ].validId[1] == false ) {
+ info.name = dsDevices[ device ].name;
+ info.probed = true;
+ return info;
+ }
+
+ probeInput:
+
+ LPDIRECTSOUNDCAPTURE input;
+ result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ DSCCAPS inCaps;
+ inCaps.dwSize = sizeof( inCaps );
+ result = input->GetCaps( &inCaps );
+ if ( FAILED( result ) ) {
+ input->Release();
+ errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Get input channel information.
+ info.inputChannels = inCaps.dwChannels;
+
+ // Get sample rate and format information.
+ std::vector<unsigned int> rates;
+ if ( inCaps.dwChannels >= 2 ) {
+ if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
+ if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
+ if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
+ if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
+
+ if ( info.nativeFormats & RTAUDIO_SINT16 ) {
+ if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) rates.push_back( 11025 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) rates.push_back( 22050 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) rates.push_back( 44100 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) rates.push_back( 96000 );
+ }
+ else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
+ if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) rates.push_back( 11025 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) rates.push_back( 22050 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) rates.push_back( 44100 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) rates.push_back( 96000 );
+ }
+ }
+ else if ( inCaps.dwChannels == 1 ) {
+ if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
+ if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
+ if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
+ if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
+ if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
+
+ if ( info.nativeFormats & RTAUDIO_SINT16 ) {
+ if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) rates.push_back( 11025 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) rates.push_back( 22050 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) rates.push_back( 44100 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) rates.push_back( 96000 );
+ }
+ else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
+ if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) rates.push_back( 11025 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) rates.push_back( 22050 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) rates.push_back( 44100 );
+ if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) rates.push_back( 96000 );
+ }
+ }
+ else info.inputChannels = 0; // technically, this would be an error
+
+ input->Release();
+
+ if ( info.inputChannels == 0 ) return info;
+
+ // Copy the supported rates to the info structure but avoid duplication.
+ bool found;
+ for ( unsigned int i=0; i<rates.size(); i++ ) {
+ found = false;
+ for ( unsigned int j=0; j<info.sampleRates.size(); j++ ) {
+ if ( rates[i] == info.sampleRates[j] ) {
+ found = true;
+ break;
+ }
+ }
+ if ( found == false ) info.sampleRates.push_back( rates[i] );
+ }
+ std::sort( info.sampleRates.begin(), info.sampleRates.end() );
+
+ // If device opens for both playback and capture, we determine the channels.
+ if ( info.outputChannels > 0 && info.inputChannels > 0 )
+ info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
+
+ if ( device == 0 ) info.isDefaultInput = true;
+
+ // Copy name and return.
+ info.name = dsDevices[ device ].name;
+ info.probed = true;
+ return info;
+}
+
+bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options )
+{
+ if ( channels + firstChannel > 2 ) {
+ errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
+ return FAILURE;
+ }
+
+ size_t nDevices = dsDevices.size();
+ if ( nDevices == 0 ) {
+ // This should not happen because a check is made before this function is called.
+ errorText_ = "RtApiDs::probeDeviceOpen: no devices found!";
+ return FAILURE;
+ }
+
+ if ( device >= nDevices ) {
+ // This should not happen because a check is made before this function is called.
+ errorText_ = "RtApiDs::probeDeviceOpen: device ID is invalid!";
+ return FAILURE;
+ }
+
+ if ( mode == OUTPUT ) {
+ if ( dsDevices[ device ].validId[0] == false ) {
+ errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+ else { // mode == INPUT
+ if ( dsDevices[ device ].validId[1] == false ) {
+ errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // According to a note in PortAudio, using GetDesktopWindow()
+ // instead of GetForegroundWindow() is supposed to avoid problems
+ // that occur when the application's window is not the foreground
+ // window. Also, if the application window closes before the
+ // DirectSound buffer, DirectSound can crash. In the past, I had
+ // problems when using GetDesktopWindow() but it seems fine now
+ // (January 2010). I'll leave it commented here.
+ // HWND hWnd = GetForegroundWindow();
+ HWND hWnd = GetDesktopWindow();
+
+ // Check the numberOfBuffers parameter and limit the lowest value to
+ // two. This is a judgement call and a value of two is probably too
+ // low for capture, but it should work for playback.
+ int nBuffers = 0;
+ if ( options ) nBuffers = options->numberOfBuffers;
+ if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
+ if ( nBuffers < 2 ) nBuffers = 3;
+
+ // Check the lower range of the user-specified buffer size and set
+ // (arbitrarily) to a lower bound of 32.
+ if ( *bufferSize < 32 ) *bufferSize = 32;
+
+ // Create the wave format structure. The data format setting will
+ // be determined later.
+ WAVEFORMATEX waveFormat;
+ ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
+ waveFormat.wFormatTag = WAVE_FORMAT_PCM;
+ waveFormat.nChannels = channels + firstChannel;
+ waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
+
+ // Determine the device buffer size. By default, we'll use the value
+ // defined above (32K), but we will grow it to make allowances for
+ // very large software buffer sizes.
+ DWORD dsBufferSize = MINIMUM_DEVICE_BUFFER_SIZE;
+ DWORD dsPointerLeadTime = 0;
+
+ void *ohandle = 0, *bhandle = 0;
+ HRESULT result;
+ if ( mode == OUTPUT ) {
+
+ LPDIRECTSOUND output;
+ result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ DSCAPS outCaps;
+ outCaps.dwSize = sizeof( outCaps );
+ result = output->GetCaps( &outCaps );
+ if ( FAILED( result ) ) {
+ output->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Check channel information.
+ if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
+ errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsDevices[ device ].name << ") does not support stereo playback.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Check format information. Use 16-bit format unless not
+ // supported or user requests 8-bit.
+ if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
+ !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
+ waveFormat.wBitsPerSample = 16;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ }
+ else {
+ waveFormat.wBitsPerSample = 8;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT8;
+ }
+ stream_.userFormat = format;
+
+ // Update wave format structure and buffer information.
+ waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+ waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+ dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
+
+ // If the user wants an even bigger buffer, increase the device buffer size accordingly.
+ while ( dsPointerLeadTime * 2U > dsBufferSize )
+ dsBufferSize *= 2;
+
+ // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
+ // result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
+ // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
+ result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
+ if ( FAILED( result ) ) {
+ output->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Even though we will write to the secondary buffer, we need to
+ // access the primary buffer to set the correct output format
+ // (since the default is 8-bit, 22 kHz!). Setup the DS primary
+ // buffer description.
+ DSBUFFERDESC bufferDescription;
+ ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
+ bufferDescription.dwSize = sizeof( DSBUFFERDESC );
+ bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
+
+ // Obtain the primary buffer
+ LPDIRECTSOUNDBUFFER buffer;
+ result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
+ if ( FAILED( result ) ) {
+ output->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Set the primary DS buffer sound format.
+ result = buffer->SetFormat( &waveFormat );
+ if ( FAILED( result ) ) {
+ output->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Setup the secondary DS buffer description.
+ ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
+ bufferDescription.dwSize = sizeof( DSBUFFERDESC );
+ bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
+ DSBCAPS_GLOBALFOCUS |
+ DSBCAPS_GETCURRENTPOSITION2 |
+ DSBCAPS_LOCHARDWARE ); // Force hardware mixing
+ bufferDescription.dwBufferBytes = dsBufferSize;
+ bufferDescription.lpwfxFormat = &waveFormat;
+
+ // Try to create the secondary DS buffer. If that doesn't work,
+ // try to use software mixing. Otherwise, there's a problem.
+ result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
+ if ( FAILED( result ) ) {
+ bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
+ DSBCAPS_GLOBALFOCUS |
+ DSBCAPS_GETCURRENTPOSITION2 |
+ DSBCAPS_LOCSOFTWARE ); // Force software mixing
+ result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
+ if ( FAILED( result ) ) {
+ output->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // Get the buffer size ... might be different from what we specified.
+ DSBCAPS dsbcaps;
+ dsbcaps.dwSize = sizeof( DSBCAPS );
+ result = buffer->GetCaps( &dsbcaps );
+ if ( FAILED( result ) ) {
+ output->Release();
+ buffer->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ dsBufferSize = dsbcaps.dwBufferBytes;
+
+ // Lock the DS buffer
+ LPVOID audioPtr;
+ DWORD dataLen;
+ result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );
+ if ( FAILED( result ) ) {
+ output->Release();
+ buffer->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Zero the DS buffer
+ ZeroMemory( audioPtr, dataLen );
+
+ // Unlock the DS buffer
+ result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
+ if ( FAILED( result ) ) {
+ output->Release();
+ buffer->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ ohandle = (void *) output;
+ bhandle = (void *) buffer;
+ }
+
+ if ( mode == INPUT ) {
+
+ LPDIRECTSOUNDCAPTURE input;
+ result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ DSCCAPS inCaps;
+ inCaps.dwSize = sizeof( inCaps );
+ result = input->GetCaps( &inCaps );
+ if ( FAILED( result ) ) {
+ input->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Check channel information.
+ if ( inCaps.dwChannels < channels + firstChannel ) {
+ errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
+ return FAILURE;
+ }
+
+ // Check format information. Use 16-bit format unless user
+ // requests 8-bit.
+ DWORD deviceFormats;
+ if ( channels + firstChannel == 2 ) {
+ deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
+ if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
+ waveFormat.wBitsPerSample = 8;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT8;
+ }
+ else { // assume 16-bit is supported
+ waveFormat.wBitsPerSample = 16;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ }
+ }
+ else { // channel == 1
+ deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
+ if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
+ waveFormat.wBitsPerSample = 8;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT8;
+ }
+ else { // assume 16-bit is supported
+ waveFormat.wBitsPerSample = 16;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ }
+ }
+ stream_.userFormat = format;
+
+ // Update wave format structure and buffer information.
+ waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+ waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+ dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
+
+ // If the user wants an even bigger buffer, increase the device buffer size accordingly.
+ while ( dsPointerLeadTime * 2U > dsBufferSize )
+ dsBufferSize *= 2;
+
+ // Setup the secondary DS buffer description.
+ DSCBUFFERDESC bufferDescription;
+ ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
+ bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
+ bufferDescription.dwFlags = 0;
+ bufferDescription.dwReserved = 0;
+ bufferDescription.dwBufferBytes = dsBufferSize;
+ bufferDescription.lpwfxFormat = &waveFormat;
+
+ // Create the capture buffer.
+ LPDIRECTSOUNDCAPTUREBUFFER buffer;
+ result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
+ if ( FAILED( result ) ) {
+ input->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Get the buffer size ... might be different from what we specified.
+ DSCBCAPS dscbcaps;
+ dscbcaps.dwSize = sizeof( DSCBCAPS );
+ result = buffer->GetCaps( &dscbcaps );
+ if ( FAILED( result ) ) {
+ input->Release();
+ buffer->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ dsBufferSize = dscbcaps.dwBufferBytes;
+
+ // NOTE: We could have a problem here if this is a duplex stream
+ // and the play and capture hardware buffer sizes are different
+ // (I'm actually not sure if that is a problem or not).
+ // Currently, we are not verifying that.
+
+ // Lock the capture buffer
+ LPVOID audioPtr;
+ DWORD dataLen;
+ result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );
+ if ( FAILED( result ) ) {
+ input->Release();
+ buffer->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Zero the buffer
+ ZeroMemory( audioPtr, dataLen );
+
+ // Unlock the buffer
+ result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
+ if ( FAILED( result ) ) {
+ input->Release();
+ buffer->Release();
+ errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsDevices[ device ].name << ")!";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ ohandle = (void *) input;
+ bhandle = (void *) buffer;
+ }
+
+ // Set various stream parameters
+ DsHandle *handle = 0;
+ stream_.nDeviceChannels[mode] = channels + firstChannel;
+ stream_.nUserChannels[mode] = channels;
+ stream_.bufferSize = *bufferSize;
+ stream_.channelOffset[mode] = firstChannel;
+ stream_.deviceInterleaved[mode] = true;
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
+ else stream_.userInterleaved = true;
+
+ // Set flag for buffer conversion
+ stream_.doConvertBuffer[mode] = false;
+ if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
+ stream_.doConvertBuffer[mode] = true;
+ if (stream_.userFormat != stream_.deviceFormat[mode])
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate necessary internal buffers
+ long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+
+ if ( stream_.doConvertBuffer[mode] ) {
+
+ bool makeBuffer = true;
+ bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
+ if ( mode == INPUT ) {
+ if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
+ }
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ // Allocate our DsHandle structures for the stream.
+ if ( stream_.apiHandle == 0 ) {
+ try {
+ handle = new DsHandle;
+ }
+ catch ( std::bad_alloc& ) {
+ errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
+ goto error;
+ }
+
+ // Create a manual-reset event.
+ handle->condition = CreateEvent( NULL, // no security
+ TRUE, // manual-reset
+ FALSE, // non-signaled initially
+ NULL ); // unnamed
+ stream_.apiHandle = (void *) handle;
+ }
+ else
+ handle = (DsHandle *) stream_.apiHandle;
+ handle->id[mode] = ohandle;
+ handle->buffer[mode] = bhandle;
+ handle->dsBufferSize[mode] = dsBufferSize;
+ handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
+
+ stream_.device[mode] = device;
+ stream_.state = STREAM_STOPPED;
+ if ( stream_.mode == OUTPUT && mode == INPUT )
+ // We had already set up an output stream.
+ stream_.mode = DUPLEX;
+ else
+ stream_.mode = mode;
+ stream_.nBuffers = nBuffers;
+ stream_.sampleRate = sampleRate;
+
+ // Setup the buffer conversion information structure.
+ if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
+
+ // Setup the callback thread.
+ if ( stream_.callbackInfo.isRunning == false ) {
+ unsigned threadId;
+ stream_.callbackInfo.isRunning = true;
+ stream_.callbackInfo.object = (void *) this;
+ stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
+ &stream_.callbackInfo, 0, &threadId );
+ if ( stream_.callbackInfo.thread == 0 ) {
+ errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
+ goto error;
+ }
+
+ // Boost DS thread priority
+ SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
+ }
+ return SUCCESS;
+
+ error:
+ if ( handle ) {
+ if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
+ LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
+ LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+ if ( buffer ) buffer->Release();
+ object->Release();
+ }
+ if ( handle->buffer[1] ) {
+ LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
+ LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
+ if ( buffer ) buffer->Release();
+ object->Release();
+ }
+ CloseHandle( handle->condition );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.state = STREAM_CLOSED;
+ return FAILURE;
+}
+
+void RtApiDs :: closeStream()
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiDs::closeStream(): no open stream to close!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ // Stop the callback thread.
+ stream_.callbackInfo.isRunning = false;
+ WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
+ CloseHandle( (HANDLE) stream_.callbackInfo.thread );
+
+ DsHandle *handle = (DsHandle *) stream_.apiHandle;
+ if ( handle ) {
+ if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
+ LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
+ LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+ if ( buffer ) {
+ buffer->Stop();
+ buffer->Release();
+ }
+ object->Release();
+ }
+ if ( handle->buffer[1] ) {
+ LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
+ LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
+ if ( buffer ) {
+ buffer->Stop();
+ buffer->Release();
+ }
+ object->Release();
+ }
+ CloseHandle( handle->condition );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+}
+
+void RtApiDs :: startStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiDs::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ DsHandle *handle = (DsHandle *) stream_.apiHandle;
+
+ // Increase scheduler frequency on lesser windows (a side-effect of
+ // increasing timer accuracy). On greater windows (Win2K or later),
+ // this is already in effect.
+ timeBeginPeriod( 1 );
+
+ buffersRolling = false;
+ duplexPrerollBytes = 0;
+
+ if ( stream_.mode == DUPLEX ) {
+ // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
+ duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
+ }
+
+ HRESULT result = 0;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+ result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+
+ LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
+ result = buffer->Start( DSCBSTART_LOOPING );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ handle->drainCounter = 0;
+ handle->internalDrain = false;
+ ResetEvent( handle->condition );
+ stream_.state = STREAM_RUNNING;
+
+ unlock:
+ if ( FAILED( result ) ) error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiDs :: stopStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ HRESULT result = 0;
+ LPVOID audioPtr;
+ DWORD dataLen;
+ DsHandle *handle = (DsHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ if ( handle->drainCounter == 0 ) {
+ handle->drainCounter = 2;
+ WaitForSingleObject( handle->condition, INFINITE ); // block until signaled
+ }
+
+ stream_.state = STREAM_STOPPED;
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ // Stop the buffer and clear memory
+ LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+ result = buffer->Stop();
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping output buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ // Lock the buffer and clear it so that if we start to play again,
+ // we won't have old data playing.
+ result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking output buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ // Zero the DS buffer
+ ZeroMemory( audioPtr, dataLen );
+
+ // Unlock the DS buffer
+ result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ // If we start playing again, we must begin at beginning of buffer.
+ handle->bufferPointer[0] = 0;
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+ LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
+ audioPtr = NULL;
+ dataLen = 0;
+
+ stream_.state = STREAM_STOPPED;
+
+ if ( stream_.mode != DUPLEX )
+ MUTEX_LOCK( &stream_.mutex );
+
+ result = buffer->Stop();
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping input buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ // Lock the buffer and clear it so that if we start to play again,
+ // we won't have old data playing.
+ result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking input buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ // Zero the DS buffer
+ ZeroMemory( audioPtr, dataLen );
+
+ // Unlock the DS buffer
+ result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+
+ // If we start recording again, we must begin at beginning of buffer.
+ handle->bufferPointer[1] = 0;
+ }
+
+ unlock:
+ timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ if ( FAILED( result ) ) error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiDs :: abortStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ DsHandle *handle = (DsHandle *) stream_.apiHandle;
+ handle->drainCounter = 2;
+
+ stopStream();
+}
+
+void RtApiDs :: callbackEvent()
+{
+ if ( stream_.state == STREAM_STOPPED || stream_.state == STREAM_STOPPING ) {
+ Sleep( 50 ); // sleep 50 milliseconds
+ return;
+ }
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
+ DsHandle *handle = (DsHandle *) stream_.apiHandle;
+
+ // Check if we were draining the stream and signal is finished.
+ if ( handle->drainCounter > stream_.nBuffers + 2 ) {
+
+ stream_.state = STREAM_STOPPING;
+ if ( handle->internalDrain == false )
+ SetEvent( handle->condition );
+ else
+ stopStream();
+ return;
+ }
+
+ // Invoke user callback to get fresh output data UNLESS we are
+ // draining stream.
+ if ( handle->drainCounter == 0 ) {
+ RtAudioCallback callback = (RtAudioCallback) info->callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
+ status |= RTAUDIO_OUTPUT_UNDERFLOW;
+ handle->xrun[0] = false;
+ }
+ if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
+ status |= RTAUDIO_INPUT_OVERFLOW;
+ handle->xrun[1] = false;
+ }
+ int cbReturnValue = callback( stream_.userBuffer[0], stream_.userBuffer[1],
+ stream_.bufferSize, streamTime, status, info->userData );
+ if ( cbReturnValue == 2 ) {
+ stream_.state = STREAM_STOPPING;
+ handle->drainCounter = 2;
+ abortStream();
+ return;
+ }
+ else if ( cbReturnValue == 1 ) {
+ handle->drainCounter = 1;
+ handle->internalDrain = true;
+ }
+ }
+
+ HRESULT result;
+ DWORD currentWritePointer, safeWritePointer;
+ DWORD currentReadPointer, safeReadPointer;
+ UINT nextWritePointer;
+
+ LPVOID buffer1 = NULL;
+ LPVOID buffer2 = NULL;
+ DWORD bufferSize1 = 0;
+ DWORD bufferSize2 = 0;
+
+ char *buffer;
+ long bufferBytes;
+
+ MUTEX_LOCK( &stream_.mutex );
+ if ( stream_.state == STREAM_STOPPED ) {
+ MUTEX_UNLOCK( &stream_.mutex );
+ return;
+ }
+
+ if ( buffersRolling == false ) {
+ if ( stream_.mode == DUPLEX ) {
+ //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
+
+ // It takes a while for the devices to get rolling. As a result,
+ // there's no guarantee that the capture and write device pointers
+ // will move in lockstep. Wait here for both devices to start
+ // rolling, and then set our buffer pointers accordingly.
+ // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
+ // bytes later than the write buffer.
+
+ // Stub: a serious risk of having a pre-emptive scheduling round
+ // take place between the two GetCurrentPosition calls... but I'm
+ // really not sure how to solve the problem. Temporarily boost to
+ // Realtime priority, maybe; but I'm not sure what priority the
+ // DirectSound service threads run at. We *should* be roughly
+ // within a ms or so of correct.
+
+ LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+ LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
+
+ DWORD startSafeWritePointer, startSafeReadPointer;
+
+ result = dsWriteBuffer->GetCurrentPosition( NULL, &startSafeWritePointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ result = dsCaptureBuffer->GetCurrentPosition( NULL, &startSafeReadPointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ while ( true ) {
+ result = dsWriteBuffer->GetCurrentPosition( NULL, &safeWritePointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ result = dsCaptureBuffer->GetCurrentPosition( NULL, &safeReadPointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ if ( safeWritePointer != startSafeWritePointer && safeReadPointer != startSafeReadPointer ) break;
+ Sleep( 1 );
+ }
+
+ //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
+
+ handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];
+ if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];
+ handle->bufferPointer[1] = safeReadPointer;
+ }
+ else if ( stream_.mode == OUTPUT ) {
+
+ // Set the proper nextWritePosition after initial startup.
+ LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+ result = dsWriteBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];
+ if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];
+ }
+
+ buffersRolling = true;
+ }
+
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
+
+ if ( handle->drainCounter > 1 ) { // write zeros to the output stream
+ bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
+ bufferBytes *= formatBytes( stream_.userFormat );
+ memset( stream_.userBuffer[0], 0, bufferBytes );
+ }
+
+ // Setup parameters and do buffer conversion if necessary.
+ if ( stream_.doConvertBuffer[0] ) {
+ buffer = stream_.deviceBuffer;
+ convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
+ bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
+ bufferBytes *= formatBytes( stream_.deviceFormat[0] );
+ }
+ else {
+ buffer = stream_.userBuffer[0];
+ bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
+ bufferBytes *= formatBytes( stream_.userFormat );
+ }
+
+ // No byte swapping necessary in DirectSound implementation.
+
+ // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
+ // unsigned. So, we need to convert our signed 8-bit data here to
+ // unsigned.
+ if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
+ for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
+
+ DWORD dsBufferSize = handle->dsBufferSize[0];
+ nextWritePointer = handle->bufferPointer[0];
+
+ DWORD endWrite, leadPointer;
+ while ( true ) {
+ // Find out where the read and "safe write" pointers are.
+ result = dsBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+
+ // We will copy our output buffer into the region between
+ // safeWritePointer and leadPointer. If leadPointer is not
+ // beyond the next endWrite position, wait until it is.
+ leadPointer = safeWritePointer + handle->dsPointerLeadTime[0];
+ //std::cout << "safeWritePointer = " << safeWritePointer << ", leadPointer = " << leadPointer << ", nextWritePointer = " << nextWritePointer << std::endl;
+ if ( leadPointer > dsBufferSize ) leadPointer -= dsBufferSize;
+ if ( leadPointer < nextWritePointer ) leadPointer += dsBufferSize; // unwrap offset
+ endWrite = nextWritePointer + bufferBytes;
+
+ // Check whether the entire write region is behind the play pointer.
+ if ( leadPointer >= endWrite ) break;
+
+ // If we are here, then we must wait until the leadPointer advances
+ // beyond the end of our next write region. We use the
+ // Sleep() function to suspend operation until that happens.
+ double millis = ( endWrite - leadPointer ) * 1000.0;
+ millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
+ if ( millis < 1.0 ) millis = 1.0;
+ Sleep( (DWORD) millis );
+ }
+
+ if ( dsPointerBetween( nextWritePointer, safeWritePointer, currentWritePointer, dsBufferSize )
+ || dsPointerBetween( endWrite, safeWritePointer, currentWritePointer, dsBufferSize ) ) {
+ // We've strayed into the forbidden zone ... resync the read pointer.
+ handle->xrun[0] = true;
+ nextWritePointer = safeWritePointer + handle->dsPointerLeadTime[0] - bufferBytes;
+ if ( nextWritePointer >= dsBufferSize ) nextWritePointer -= dsBufferSize;
+ handle->bufferPointer[0] = nextWritePointer;
+ endWrite = nextWritePointer + bufferBytes;
+ }
+
+ // Lock free space in the buffer
+ result = dsBuffer->Lock( nextWritePointer, bufferBytes, &buffer1,
+ &bufferSize1, &buffer2, &bufferSize2, 0 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+
+ // Copy our buffer into the DS buffer
+ CopyMemory( buffer1, buffer, bufferSize1 );
+ if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
+
+ // Update our buffer offset and unlock sound buffer
+ dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ nextWritePointer = ( nextWritePointer + bufferSize1 + bufferSize2 ) % dsBufferSize;
+ handle->bufferPointer[0] = nextWritePointer;
+ }
+
+ // Don't bother draining input
+ if ( handle->drainCounter ) {
+ handle->drainCounter++;
+ goto unlock;
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+
+ // Setup parameters.
+ if ( stream_.doConvertBuffer[1] ) {
+ buffer = stream_.deviceBuffer;
+ bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
+ bufferBytes *= formatBytes( stream_.deviceFormat[1] );
+ }
+ else {
+ buffer = stream_.userBuffer[1];
+ bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
+ bufferBytes *= formatBytes( stream_.userFormat );
+ }
+
+ LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
+ long nextReadPointer = handle->bufferPointer[1];
+ DWORD dsBufferSize = handle->dsBufferSize[1];
+
+ // Find out where the write and "safe read" pointers are.
+ result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+
+ if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset
+ DWORD endRead = nextReadPointer + bufferBytes;
+
+ // Handling depends on whether we are INPUT or DUPLEX.
+ // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
+ // then a wait here will drag the write pointers into the forbidden zone.
+ //
+ // In DUPLEX mode, rather than wait, we will back off the read pointer until
+ // it's in a safe position. This causes dropouts, but it seems to be the only
+ // practical way to sync up the read and write pointers reliably, given the
+ // the very complex relationship between phase and increment of the read and write
+ // pointers.
+ //
+ // In order to minimize audible dropouts in DUPLEX mode, we will
+ // provide a pre-roll period of 0.5 seconds in which we return
+ // zeros from the read buffer while the pointers sync up.
+
+ if ( stream_.mode == DUPLEX ) {
+ if ( safeReadPointer < endRead ) {
+ if ( duplexPrerollBytes <= 0 ) {
+ // Pre-roll time over. Be more agressive.
+ int adjustment = endRead-safeReadPointer;
+
+ handle->xrun[1] = true;
+ // Two cases:
+ // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
+ // and perform fine adjustments later.
+ // - small adjustments: back off by twice as much.
+ if ( adjustment >= 2*bufferBytes )
+ nextReadPointer = safeReadPointer-2*bufferBytes;
+ else
+ nextReadPointer = safeReadPointer-bufferBytes-adjustment;
+
+ if ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;
+
+ }
+ else {
+ // In pre=roll time. Just do it.
+ nextReadPointer = safeReadPointer - bufferBytes;
+ while ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;
+ }
+ endRead = nextReadPointer + bufferBytes;
+ }
+ }
+ else { // mode == INPUT
+ while ( safeReadPointer < endRead && stream_.callbackInfo.isRunning ) {
+ // See comments for playback.
+ double millis = (endRead - safeReadPointer) * 1000.0;
+ millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
+ if ( millis < 1.0 ) millis = 1.0;
+ Sleep( (DWORD) millis );
+
+ // Wake up and find out where we are now.
+ result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+
+ if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset
+ }
+ }
+
+ // Lock free space in the buffer
+ result = dsBuffer->Lock( nextReadPointer, bufferBytes, &buffer1,
+ &bufferSize1, &buffer2, &bufferSize2, 0 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+
+ if ( duplexPrerollBytes <= 0 ) {
+ // Copy our buffer into the DS buffer
+ CopyMemory( buffer, buffer1, bufferSize1 );
+ if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
+ }
+ else {
+ memset( buffer, 0, bufferSize1 );
+ if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
+ duplexPrerollBytes -= bufferSize1 + bufferSize2;
+ }
+
+ // Update our buffer offset and unlock sound buffer
+ nextReadPointer = ( nextReadPointer + bufferSize1 + bufferSize2 ) % dsBufferSize;
+ dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
+ if ( FAILED( result ) ) {
+ errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ handle->bufferPointer[1] = nextReadPointer;
+
+ // No byte swapping necessary in DirectSound implementation.
+
+ // If necessary, convert 8-bit data from unsigned to signed.
+ if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
+ for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
+
+ // Do buffer conversion if necessary.
+ if ( stream_.doConvertBuffer[1] )
+ convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
+ }
+
+ unlock:
+ MUTEX_UNLOCK( &stream_.mutex );
+ RtApi::tickStreamTime();
+}
+
+// Definitions for utility functions and callbacks
+// specific to the DirectSound implementation.
+
+static unsigned __stdcall callbackHandler( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiDs *object = (RtApiDs *) info->object;
+ bool* isRunning = &info->isRunning;
+
+ while ( *isRunning == true ) {
+ object->callbackEvent();
+ }
+
+ _endthreadex( 0 );
+ return 0;
+}
+
+static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
+ LPCTSTR description,
+ LPCTSTR /*module*/,
+ LPVOID lpContext )
+{
+ struct DsProbeData& probeInfo = *(struct DsProbeData*) lpContext;
+ std::vector<struct DsDevice>& dsDevices = *probeInfo.dsDevices;
+
+ HRESULT hr;
+ bool validDevice = false;
+ if ( probeInfo.isInput == true ) {
+ DSCCAPS caps;
+ LPDIRECTSOUNDCAPTURE object;
+
+ hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
+ if ( hr != DS_OK ) return TRUE;
+
+ caps.dwSize = sizeof(caps);
+ hr = object->GetCaps( &caps );
+ if ( hr == DS_OK ) {
+ if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
+ validDevice = true;
+ }
+ object->Release();
+ }
+ else {
+ DSCAPS caps;
+ LPDIRECTSOUND object;
+ hr = DirectSoundCreate( lpguid, &object, NULL );
+ if ( hr != DS_OK ) return TRUE;
+
+ caps.dwSize = sizeof(caps);
+ hr = object->GetCaps( &caps );
+ if ( hr == DS_OK ) {
+ if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
+ validDevice = true;
+ }
+ object->Release();
+ }
+
+ // If good device, then save its name and guid.
+ std::string name = convertCharPointerToStdString( description );
+ //if ( name == "Primary Sound Driver" || name == "Primary Sound Capture Driver" )
+ if ( lpguid == NULL )
+ name = "Default Device";
+ if ( validDevice ) {
+ for ( unsigned int i=0; i<dsDevices.size(); i++ ) {
+ if ( dsDevices[i].name == name ) {
+ dsDevices[i].found = true;
+ if ( probeInfo.isInput ) {
+ dsDevices[i].id[1] = lpguid;
+ dsDevices[i].validId[1] = true;
+ }
+ else {
+ dsDevices[i].id[0] = lpguid;
+ dsDevices[i].validId[0] = true;
+ }
+ return TRUE;
+ }
+ }
+
+ DsDevice device;
+ device.name = name;
+ device.found = true;
+ if ( probeInfo.isInput ) {
+ device.id[1] = lpguid;
+ device.validId[1] = true;
+ }
+ else {
+ device.id[0] = lpguid;
+ device.validId[0] = true;
+ }
+ dsDevices.push_back( device );
+ }
+
+ return TRUE;
+}
+
+static const char* getErrorString( int code )
+{
+ switch ( code ) {
+
+ case DSERR_ALLOCATED:
+ return "Already allocated";
+
+ case DSERR_CONTROLUNAVAIL:
+ return "Control unavailable";
+
+ case DSERR_INVALIDPARAM:
+ return "Invalid parameter";
+
+ case DSERR_INVALIDCALL:
+ return "Invalid call";
+
+ case DSERR_GENERIC:
+ return "Generic error";
+
+ case DSERR_PRIOLEVELNEEDED:
+ return "Priority level needed";
+
+ case DSERR_OUTOFMEMORY:
+ return "Out of memory";
+
+ case DSERR_BADFORMAT:
+ return "The sample rate or the channel format is not supported";
+
+ case DSERR_UNSUPPORTED:
+ return "Not supported";
+
+ case DSERR_NODRIVER:
+ return "No driver";
+
+ case DSERR_ALREADYINITIALIZED:
+ return "Already initialized";
+
+ case DSERR_NOAGGREGATION:
+ return "No aggregation";
+
+ case DSERR_BUFFERLOST:
+ return "Buffer lost";
+
+ case DSERR_OTHERAPPHASPRIO:
+ return "Another application already has priority";
+
+ case DSERR_UNINITIALIZED:
+ return "Uninitialized";
+
+ default:
+ return "DirectSound unknown error";
+ }
+}
+//******************** End of __WINDOWS_DS__ *********************//
+#endif
+
+
+#if defined(__LINUX_ALSA__)
+
+#include <alsa/asoundlib.h>
+#include <unistd.h>
+
+ // A structure to hold various information related to the ALSA API
+ // implementation.
+struct AlsaHandle {
+ snd_pcm_t *handles[2];
+ bool synchronized;
+ bool xrun[2];
+ pthread_cond_t runnable_cv;
+ bool runnable;
+
+ AlsaHandle()
+ :synchronized(false), runnable(false) { xrun[0] = false; xrun[1] = false; }
+};
+
+static void *alsaCallbackHandler( void * ptr );
+
+RtApiAlsa :: RtApiAlsa()
+{
+ // Nothing to do here.
+}
+
+RtApiAlsa :: ~RtApiAlsa()
+{
+ if ( stream_.state != STREAM_CLOSED ) closeStream();
+}
+
+unsigned int RtApiAlsa :: getDeviceCount( void )
+{
+ unsigned nDevices = 0;
+ int result, subdevice, card;
+ char name[64];
+ snd_ctl_t *handle = 0;
+
+ // Count cards and devices
+ card = -1;
+ snd_card_next( &card );
+ while ( card >= 0 ) {
+ sprintf( name, "hw:%d", card );
+ result = snd_ctl_open( &handle, name, 0 );
+ if ( result < 0 ) {
+ handle = 0;
+ errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto nextcard;
+ }
+ subdevice = -1;
+ while( 1 ) {
+ result = snd_ctl_pcm_next_device( handle, &subdevice );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ break;
+ }
+ if ( subdevice < 0 )
+ break;
+ nDevices++;
+ }
+ nextcard:
+ if ( handle )
+ snd_ctl_close( handle );
+ snd_card_next( &card );
+ }
+
+ result = snd_ctl_open( &handle, "default", 0 );
+ if (result == 0) {
+ nDevices++;
+ snd_ctl_close( handle );
+ }
+
+ return nDevices;
+}
+
+RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = false;
+
+ unsigned nDevices = 0;
+ int result, subdevice, card;
+ char name[64];
+ snd_ctl_t *chandle = 0;
+
+ // Count cards and devices
+ card = -1;
+ subdevice = -1;
+ snd_card_next( &card );
+ while ( card >= 0 ) {
+ sprintf( name, "hw:%d", card );
+ result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
+ if ( result < 0 ) {
+ chandle = 0;
+ errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto nextcard;
+ }
+ subdevice = -1;
+ while( 1 ) {
+ result = snd_ctl_pcm_next_device( chandle, &subdevice );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ break;
+ }
+ if ( subdevice < 0 ) break;
+ if ( nDevices == device ) {
+ sprintf( name, "hw:%d,%d", card, subdevice );
+ goto foundDevice;
+ }
+ nDevices++;
+ }
+ nextcard:
+ if ( chandle )
+ snd_ctl_close( chandle );
+ snd_card_next( &card );
+ }
+
+ result = snd_ctl_open( &chandle, "default", SND_CTL_NONBLOCK );
+ if ( result == 0 ) {
+ if ( nDevices == device ) {
+ strcpy( name, "default" );
+ goto foundDevice;
+ }
+ nDevices++;
+ }
+
+ if ( nDevices == 0 ) {
+ errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ if ( device >= nDevices ) {
+ errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ foundDevice:
+
+ // If a stream is already open, we cannot probe the stream devices.
+ // Thus, use the saved results.
+ if ( stream_.state != STREAM_CLOSED &&
+ ( stream_.device[0] == device || stream_.device[1] == device ) ) {
+ snd_ctl_close( chandle );
+ if ( device >= devices_.size() ) {
+ errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+ return devices_[ device ];
+ }
+
+ int openMode = SND_PCM_ASYNC;
+ snd_pcm_stream_t stream;
+ snd_pcm_info_t *pcminfo;
+ snd_pcm_info_alloca( &pcminfo );
+ snd_pcm_t *phandle;
+ snd_pcm_hw_params_t *params;
+ snd_pcm_hw_params_alloca( ¶ms );
+
+ // First try for playback unless default device (which has subdev -1)
+ stream = SND_PCM_STREAM_PLAYBACK;
+ snd_pcm_info_set_stream( pcminfo, stream );
+ if ( subdevice != -1 ) {
+ snd_pcm_info_set_device( pcminfo, subdevice );
+ snd_pcm_info_set_subdevice( pcminfo, 0 );
+
+ result = snd_ctl_pcm_info( chandle, pcminfo );
+ if ( result < 0 ) {
+ // Device probably doesn't support playback.
+ goto captureProbe;
+ }
+ }
+
+ result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto captureProbe;
+ }
+
+ // The device is open ... fill the parameter structure.
+ result = snd_pcm_hw_params_any( phandle, params );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto captureProbe;
+ }
+
+ // Get output channel information.
+ unsigned int value;
+ result = snd_pcm_hw_params_get_channels_max( params, &value );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ goto captureProbe;
+ }
+ info.outputChannels = value;
+ snd_pcm_close( phandle );
+
+ captureProbe:
+ stream = SND_PCM_STREAM_CAPTURE;
+ snd_pcm_info_set_stream( pcminfo, stream );
+
+ // Now try for capture unless default device (with subdev = -1)
+ if ( subdevice != -1 ) {
+ result = snd_ctl_pcm_info( chandle, pcminfo );
+ snd_ctl_close( chandle );
+ if ( result < 0 ) {
+ // Device probably doesn't support capture.
+ if ( info.outputChannels == 0 ) return info;
+ goto probeParameters;
+ }
+ }
+ else
+ snd_ctl_close( chandle );
+
+ result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ if ( info.outputChannels == 0 ) return info;
+ goto probeParameters;
+ }
+
+ // The device is open ... fill the parameter structure.
+ result = snd_pcm_hw_params_any( phandle, params );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ if ( info.outputChannels == 0 ) return info;
+ goto probeParameters;
+ }
+
+ result = snd_pcm_hw_params_get_channels_max( params, &value );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ if ( info.outputChannels == 0 ) return info;
+ goto probeParameters;
+ }
+ info.inputChannels = value;
+ snd_pcm_close( phandle );
+
+ // If device opens for both playback and capture, we determine the channels.
+ if ( info.outputChannels > 0 && info.inputChannels > 0 )
+ info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
+
+ // ALSA doesn't provide default devices so we'll use the first available one.
+ if ( device == 0 && info.outputChannels > 0 )
+ info.isDefaultOutput = true;
+ if ( device == 0 && info.inputChannels > 0 )
+ info.isDefaultInput = true;
+
+ probeParameters:
+ // At this point, we just need to figure out the supported data
+ // formats and sample rates. We'll proceed by opening the device in
+ // the direction with the maximum number of channels, or playback if
+ // they are equal. This might limit our sample rate options, but so
+ // be it.
+
+ if ( info.outputChannels >= info.inputChannels )
+ stream = SND_PCM_STREAM_PLAYBACK;
+ else
+ stream = SND_PCM_STREAM_CAPTURE;
+ snd_pcm_info_set_stream( pcminfo, stream );
+
+ result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // The device is open ... fill the parameter structure.
+ result = snd_pcm_hw_params_any( phandle, params );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Test our discrete set of sample rate values.
+ info.sampleRates.clear();
+ for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
+ if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 ) {
+ info.sampleRates.push_back( SAMPLE_RATES[i] );
+
+ if ( !info.preferredSampleRate || ( SAMPLE_RATES[i] <= 48000 && SAMPLE_RATES[i] > info.preferredSampleRate ) )
+ info.preferredSampleRate = SAMPLE_RATES[i];
+ }
+ }
+ if ( info.sampleRates.size() == 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Probe the supported data formats ... we don't care about endian-ness just yet
+ snd_pcm_format_t format;
+ info.nativeFormats = 0;
+ format = SND_PCM_FORMAT_S8;
+ if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
+ info.nativeFormats |= RTAUDIO_SINT8;
+ format = SND_PCM_FORMAT_S16;
+ if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
+ info.nativeFormats |= RTAUDIO_SINT16;
+ format = SND_PCM_FORMAT_S24;
+ if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
+ info.nativeFormats |= RTAUDIO_SINT24;
+ format = SND_PCM_FORMAT_S32;
+ if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
+ info.nativeFormats |= RTAUDIO_SINT32;
+ format = SND_PCM_FORMAT_FLOAT;
+ if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
+ info.nativeFormats |= RTAUDIO_FLOAT32;
+ format = SND_PCM_FORMAT_FLOAT64;
+ if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
+ info.nativeFormats |= RTAUDIO_FLOAT64;
+
+ // Check that we have at least one supported format
+ if ( info.nativeFormats == 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Get the device name
+ char *cardname;
+ result = snd_card_get_name( card, &cardname );
+ if ( result >= 0 ) {
+ sprintf( name, "hw:%s,%d", cardname, subdevice );
+ free( cardname );
+ }
+ info.name = name;
+
+ // That's all ... close the device and return
+ snd_pcm_close( phandle );
+ info.probed = true;
+ return info;
+}
+
+void RtApiAlsa :: saveDeviceInfo( void )
+{
+ devices_.clear();
+
+ unsigned int nDevices = getDeviceCount();
+ devices_.resize( nDevices );
+ for ( unsigned int i=0; i<nDevices; i++ )
+ devices_[i] = getDeviceInfo( i );
+}
+
+bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options )
+
+{
+#if defined(__RTAUDIO_DEBUG__)
+ snd_output_t *out;
+ snd_output_stdio_attach(&out, stderr, 0);
+#endif
+
+ // I'm not using the "plug" interface ... too much inconsistent behavior.
+
+ unsigned nDevices = 0;
+ int result, subdevice, card;
+ char name[64];
+ snd_ctl_t *chandle;
+
+ if ( options && options->flags & RTAUDIO_ALSA_USE_DEFAULT )
+ snprintf(name, sizeof(name), "%s", "default");
+ else {
+ // Count cards and devices
+ card = -1;
+ snd_card_next( &card );
+ while ( card >= 0 ) {
+ sprintf( name, "hw:%d", card );
+ result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ subdevice = -1;
+ while( 1 ) {
+ result = snd_ctl_pcm_next_device( chandle, &subdevice );
+ if ( result < 0 ) break;
+ if ( subdevice < 0 ) break;
+ if ( nDevices == device ) {
+ sprintf( name, "hw:%d,%d", card, subdevice );
+ snd_ctl_close( chandle );
+ goto foundDevice;
+ }
+ nDevices++;
+ }
+ snd_ctl_close( chandle );
+ snd_card_next( &card );
+ }
+
+ result = snd_ctl_open( &chandle, "default", SND_CTL_NONBLOCK );
+ if ( result == 0 ) {
+ if ( nDevices == device ) {
+ strcpy( name, "default" );
+ snd_ctl_close( chandle );
+ goto foundDevice;
+ }
+ nDevices++;
+ }
+ snd_ctl_close( chandle );
+
+ if ( nDevices == 0 ) {
+ // This should not happen because a check is made before this function is called.
+ errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
+ return FAILURE;
+ }
+
+ if ( device >= nDevices ) {
+ // This should not happen because a check is made before this function is called.
+ errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
+ return FAILURE;
+ }
+ }
+
+ foundDevice:
+
+ // The getDeviceInfo() function will not work for a device that is
+ // already open. Thus, we'll probe the system before opening a
+ // stream and save the results for use by getDeviceInfo().
+ if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
+ this->saveDeviceInfo();
+
+ snd_pcm_stream_t stream;
+ if ( mode == OUTPUT )
+ stream = SND_PCM_STREAM_PLAYBACK;
+ else
+ stream = SND_PCM_STREAM_CAPTURE;
+
+ snd_pcm_t *phandle;
+ int openMode = SND_PCM_ASYNC;
+ result = snd_pcm_open( &phandle, name, stream, openMode );
+ if ( result < 0 ) {
+ if ( mode == OUTPUT )
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
+ else
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Fill the parameter structure.
+ snd_pcm_hw_params_t *hw_params;
+ snd_pcm_hw_params_alloca( &hw_params );
+ result = snd_pcm_hw_params_any( phandle, hw_params );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+#if defined(__RTAUDIO_DEBUG__)
+ fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
+ snd_pcm_hw_params_dump( hw_params, out );
+#endif
+
+ // Set access ... check user preference.
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
+ stream_.userInterleaved = false;
+ result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
+ if ( result < 0 ) {
+ result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
+ stream_.deviceInterleaved[mode] = true;
+ }
+ else
+ stream_.deviceInterleaved[mode] = false;
+ }
+ else {
+ stream_.userInterleaved = true;
+ result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
+ if ( result < 0 ) {
+ result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
+ stream_.deviceInterleaved[mode] = false;
+ }
+ else
+ stream_.deviceInterleaved[mode] = true;
+ }
+
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Determine how to set the device format.
+ stream_.userFormat = format;
+ snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
+
+ if ( format == RTAUDIO_SINT8 )
+ deviceFormat = SND_PCM_FORMAT_S8;
+ else if ( format == RTAUDIO_SINT16 )
+ deviceFormat = SND_PCM_FORMAT_S16;
+ else if ( format == RTAUDIO_SINT24 )
+ deviceFormat = SND_PCM_FORMAT_S24;
+ else if ( format == RTAUDIO_SINT32 )
+ deviceFormat = SND_PCM_FORMAT_S32;
+ else if ( format == RTAUDIO_FLOAT32 )
+ deviceFormat = SND_PCM_FORMAT_FLOAT;
+ else if ( format == RTAUDIO_FLOAT64 )
+ deviceFormat = SND_PCM_FORMAT_FLOAT64;
+
+ if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
+ stream_.deviceFormat[mode] = format;
+ goto setFormat;
+ }
+
+ // The user requested format is not natively supported by the device.
+ deviceFormat = SND_PCM_FORMAT_FLOAT64;
+ if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
+ goto setFormat;
+ }
+
+ deviceFormat = SND_PCM_FORMAT_FLOAT;
+ if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
+ goto setFormat;
+ }
+
+ deviceFormat = SND_PCM_FORMAT_S32;
+ if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT32;
+ goto setFormat;
+ }
+
+ deviceFormat = SND_PCM_FORMAT_S24;
+ if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT24;
+ goto setFormat;
+ }
+
+ deviceFormat = SND_PCM_FORMAT_S16;
+ if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ goto setFormat;
+ }
+
+ deviceFormat = SND_PCM_FORMAT_S8;
+ if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
+ stream_.deviceFormat[mode] = RTAUDIO_SINT8;
+ goto setFormat;
+ }
+
+ // If we get here, no supported format was found.
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+
+ setFormat:
+ result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Determine whether byte-swaping is necessary.
+ stream_.doByteSwap[mode] = false;
+ if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
+ result = snd_pcm_format_cpu_endian( deviceFormat );
+ if ( result == 0 )
+ stream_.doByteSwap[mode] = true;
+ else if (result < 0) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+
+ // Set the sample rate.
+ result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Determine the number of channels for this device. We support a possible
+ // minimum device channel number > than the value requested by the user.
+ stream_.nUserChannels[mode] = channels;
+ unsigned int value;
+ result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
+ unsigned int deviceChannels = value;
+ if ( result < 0 || deviceChannels < channels + firstChannel ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ deviceChannels = value;
+ if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
+ stream_.nDeviceChannels[mode] = deviceChannels;
+
+ // Set the device channels.
+ result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Set the buffer (or period) size.
+ int dir = 0;
+ snd_pcm_uframes_t periodSize = *bufferSize;
+ result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ *bufferSize = periodSize;
+
+ // Set the buffer number, which in ALSA is referred to as the "period".
+ unsigned int periods = 0;
+ if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
+ if ( options && options->numberOfBuffers > 0 ) periods = options->numberOfBuffers;
+ if ( periods < 2 ) periods = 4; // a fairly safe default value
+ result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // If attempting to setup a duplex stream, the bufferSize parameter
+ // MUST be the same in both directions!
+ if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ stream_.bufferSize = *bufferSize;
+
+ // Install the hardware configuration
+ result = snd_pcm_hw_params( phandle, hw_params );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+#if defined(__RTAUDIO_DEBUG__)
+ fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
+ snd_pcm_hw_params_dump( hw_params, out );
+#endif
+
+ // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
+ snd_pcm_sw_params_t *sw_params = NULL;
+ snd_pcm_sw_params_alloca( &sw_params );
+ snd_pcm_sw_params_current( phandle, sw_params );
+ snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
+ snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );
+ snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
+
+ // The following two settings were suggested by Theo Veenker
+ //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );
+ //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );
+
+ // here are two options for a fix
+ //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );
+ snd_pcm_uframes_t val;
+ snd_pcm_sw_params_get_boundary( sw_params, &val );
+ snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );
+
+ result = snd_pcm_sw_params( phandle, sw_params );
+ if ( result < 0 ) {
+ snd_pcm_close( phandle );
+ errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+#if defined(__RTAUDIO_DEBUG__)
+ fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
+ snd_pcm_sw_params_dump( sw_params, out );
+#endif
+
+ // Set flags for buffer conversion
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate the ApiHandle if necessary and then save.
+ AlsaHandle *apiInfo = 0;
+ if ( stream_.apiHandle == 0 ) {
+ try {
+ apiInfo = (AlsaHandle *) new AlsaHandle;
+ }
+ catch ( std::bad_alloc& ) {
+ errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
+ goto error;
+ }
+
+ if ( pthread_cond_init( &apiInfo->runnable_cv, NULL ) ) {
+ errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";
+ goto error;
+ }
+
+ stream_.apiHandle = (void *) apiInfo;
+ apiInfo->handles[0] = 0;
+ apiInfo->handles[1] = 0;
+ }
+ else {
+ apiInfo = (AlsaHandle *) stream_.apiHandle;
+ }
+ apiInfo->handles[mode] = phandle;
+ phandle = 0;
+
+ // Allocate necessary internal buffers.
+ unsigned long bufferBytes;
+ bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+
+ if ( stream_.doConvertBuffer[mode] ) {
+
+ bool makeBuffer = true;
+ bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
+ if ( mode == INPUT ) {
+ if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ if ( bufferBytes <= bytesOut ) makeBuffer = false;
+ }
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ stream_.sampleRate = sampleRate;
+ stream_.nBuffers = periods;
+ stream_.device[mode] = device;
+ stream_.state = STREAM_STOPPED;
+
+ // Setup the buffer conversion information structure.
+ if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
+
+ // Setup thread if necessary.
+ if ( stream_.mode == OUTPUT && mode == INPUT ) {
+ // We had already set up an output stream.
+ stream_.mode = DUPLEX;
+ // Link the streams if possible.
+ apiInfo->synchronized = false;
+ if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
+ apiInfo->synchronized = true;
+ else {
+ errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
+ error( RtAudioError::WARNING );
+ }
+ }
+ else {
+ stream_.mode = mode;
+
+ // Setup callback thread.
+ stream_.callbackInfo.object = (void *) this;
+
+ // Set the thread attributes for joinable and realtime scheduling
+ // priority (optional). The higher priority will only take affect
+ // if the program is run as root or suid. Note, under Linux
+ // processes with CAP_SYS_NICE privilege, a user can change
+ // scheduling policy and priority (thus need not be root). See
+ // POSIX "capabilities".
+ pthread_attr_t attr;
+ pthread_attr_init( &attr );
+ pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
+ if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
+ stream_.callbackInfo.doRealtime = true;
+ struct sched_param param;
+ int priority = options->priority;
+ int min = sched_get_priority_min( SCHED_RR );
+ int max = sched_get_priority_max( SCHED_RR );
+ if ( priority < min ) priority = min;
+ else if ( priority > max ) priority = max;
+ param.sched_priority = priority;
+
+ // Set the policy BEFORE the priority. Otherwise it fails.
+ pthread_attr_setschedpolicy(&attr, SCHED_RR);
+ pthread_attr_setscope (&attr, PTHREAD_SCOPE_SYSTEM);
+ // This is definitely required. Otherwise it fails.
+ pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED);
+ pthread_attr_setschedparam(&attr, ¶m);
+ }
+ else
+ pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
+#else
+ pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
+#endif
+
+ stream_.callbackInfo.isRunning = true;
+ result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
+ pthread_attr_destroy( &attr );
+ if ( result ) {
+ // Failed. Try instead with default attributes.
+ result = pthread_create( &stream_.callbackInfo.thread, NULL, alsaCallbackHandler, &stream_.callbackInfo );
+ if ( result ) {
+ stream_.callbackInfo.isRunning = false;
+ errorText_ = "RtApiAlsa::error creating callback thread!";
+ goto error;
+ }
+ }
+ }
+
+ return SUCCESS;
+
+ error:
+ if ( apiInfo ) {
+ pthread_cond_destroy( &apiInfo->runnable_cv );
+ if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
+ if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
+ delete apiInfo;
+ stream_.apiHandle = 0;
+ }
+
+ if ( phandle) snd_pcm_close( phandle );
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.state = STREAM_CLOSED;
+ return FAILURE;
+}
+
+void RtApiAlsa :: closeStream()
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
+ stream_.callbackInfo.isRunning = false;
+ MUTEX_LOCK( &stream_.mutex );
+ if ( stream_.state == STREAM_STOPPED ) {
+ apiInfo->runnable = true;
+ pthread_cond_signal( &apiInfo->runnable_cv );
+ }
+ MUTEX_UNLOCK( &stream_.mutex );
+ pthread_join( stream_.callbackInfo.thread, NULL );
+
+ if ( stream_.state == STREAM_RUNNING ) {
+ stream_.state = STREAM_STOPPED;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
+ snd_pcm_drop( apiInfo->handles[0] );
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
+ snd_pcm_drop( apiInfo->handles[1] );
+ }
+
+ if ( apiInfo ) {
+ pthread_cond_destroy( &apiInfo->runnable_cv );
+ if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
+ if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
+ delete apiInfo;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+}
+
+void RtApiAlsa :: startStream()
+{
+ // This method calls snd_pcm_prepare if the device isn't already in that state.
+
+ verifyStream();
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ int result = 0;
+ snd_pcm_state_t state;
+ AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
+ snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ state = snd_pcm_state( handle[0] );
+ if ( state != SND_PCM_STATE_PREPARED ) {
+ result = snd_pcm_prepare( handle[0] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+ }
+
+ if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
+ result = snd_pcm_drop(handle[1]); // fix to remove stale data received since device has been open
+ state = snd_pcm_state( handle[1] );
+ if ( state != SND_PCM_STATE_PREPARED ) {
+ result = snd_pcm_prepare( handle[1] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+ }
+
+ stream_.state = STREAM_RUNNING;
+
+ unlock:
+ apiInfo->runnable = true;
+ pthread_cond_signal( &apiInfo->runnable_cv );
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ if ( result >= 0 ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiAlsa :: stopStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ stream_.state = STREAM_STOPPED;
+ MUTEX_LOCK( &stream_.mutex );
+
+ int result = 0;
+ AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
+ snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ if ( apiInfo->synchronized )
+ result = snd_pcm_drop( handle[0] );
+ else
+ result = snd_pcm_drain( handle[0] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
+ result = snd_pcm_drop( handle[1] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ unlock:
+ apiInfo->runnable = false; // fixes high CPU usage when stopped
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ if ( result >= 0 ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiAlsa :: abortStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ stream_.state = STREAM_STOPPED;
+ MUTEX_LOCK( &stream_.mutex );
+
+ int result = 0;
+ AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
+ snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ result = snd_pcm_drop( handle[0] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
+ result = snd_pcm_drop( handle[1] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ unlock:
+ apiInfo->runnable = false; // fixes high CPU usage when stopped
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ if ( result >= 0 ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiAlsa :: callbackEvent()
+{
+ AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
+ if ( stream_.state == STREAM_STOPPED ) {
+ MUTEX_LOCK( &stream_.mutex );
+ while ( !apiInfo->runnable )
+ pthread_cond_wait( &apiInfo->runnable_cv, &stream_.mutex );
+
+ if ( stream_.state != STREAM_RUNNING ) {
+ MUTEX_UNLOCK( &stream_.mutex );
+ return;
+ }
+ MUTEX_UNLOCK( &stream_.mutex );
+ }
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ int doStopStream = 0;
+ RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
+ status |= RTAUDIO_OUTPUT_UNDERFLOW;
+ apiInfo->xrun[0] = false;
+ }
+ if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
+ status |= RTAUDIO_INPUT_OVERFLOW;
+ apiInfo->xrun[1] = false;
+ }
+ doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
+ stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
+
+ if ( doStopStream == 2 ) {
+ abortStream();
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ // The state might change while waiting on a mutex.
+ if ( stream_.state == STREAM_STOPPED ) goto unlock;
+
+ int result;
+ char *buffer;
+ int channels;
+ snd_pcm_t **handle;
+ snd_pcm_sframes_t frames;
+ RtAudioFormat format;
+ handle = (snd_pcm_t **) apiInfo->handles;
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+
+ // Setup parameters.
+ if ( stream_.doConvertBuffer[1] ) {
+ buffer = stream_.deviceBuffer;
+ channels = stream_.nDeviceChannels[1];
+ format = stream_.deviceFormat[1];
+ }
+ else {
+ buffer = stream_.userBuffer[1];
+ channels = stream_.nUserChannels[1];
+ format = stream_.userFormat;
+ }
+
+ // Read samples from device in interleaved/non-interleaved format.
+ if ( stream_.deviceInterleaved[1] )
+ result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
+ else {
+ void *bufs[channels];
+ size_t offset = stream_.bufferSize * formatBytes( format );
+ for ( int i=0; i<channels; i++ )
+ bufs[i] = (void *) (buffer + (i * offset));
+ result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
+ }
+
+ if ( result < (int) stream_.bufferSize ) {
+ // Either an error or overrun occured.
+ if ( result == -EPIPE ) {
+ snd_pcm_state_t state = snd_pcm_state( handle[1] );
+ if ( state == SND_PCM_STATE_XRUN ) {
+ apiInfo->xrun[1] = true;
+ result = snd_pcm_prepare( handle[1] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ }
+ }
+ else {
+ errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ }
+ }
+ else {
+ errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ }
+ error( RtAudioError::WARNING );
+ goto tryOutput;
+ }
+
+ // Do byte swapping if necessary.
+ if ( stream_.doByteSwap[1] )
+ byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
+
+ // Do buffer conversion if necessary.
+ if ( stream_.doConvertBuffer[1] )
+ convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
+
+ // Check stream latency
+ result = snd_pcm_delay( handle[1], &frames );
+ if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
+ }
+
+ tryOutput:
+
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ // Setup parameters and do buffer conversion if necessary.
+ if ( stream_.doConvertBuffer[0] ) {
+ buffer = stream_.deviceBuffer;
+ convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
+ channels = stream_.nDeviceChannels[0];
+ format = stream_.deviceFormat[0];
+ }
+ else {
+ buffer = stream_.userBuffer[0];
+ channels = stream_.nUserChannels[0];
+ format = stream_.userFormat;
+ }
+
+ // Do byte swapping if necessary.
+ if ( stream_.doByteSwap[0] )
+ byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
+
+ // Write samples to device in interleaved/non-interleaved format.
+ if ( stream_.deviceInterleaved[0] )
+ result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
+ else {
+ void *bufs[channels];
+ size_t offset = stream_.bufferSize * formatBytes( format );
+ for ( int i=0; i<channels; i++ )
+ bufs[i] = (void *) (buffer + (i * offset));
+ result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
+ }
+
+ if ( result < (int) stream_.bufferSize ) {
+ // Either an error or underrun occured.
+ if ( result == -EPIPE ) {
+ snd_pcm_state_t state = snd_pcm_state( handle[0] );
+ if ( state == SND_PCM_STATE_XRUN ) {
+ apiInfo->xrun[0] = true;
+ result = snd_pcm_prepare( handle[0] );
+ if ( result < 0 ) {
+ errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ }
+ else
+ errorText_ = "RtApiAlsa::callbackEvent: audio write error, underrun.";
+ }
+ else {
+ errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ }
+ }
+ else {
+ errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
+ errorText_ = errorStream_.str();
+ }
+ error( RtAudioError::WARNING );
+ goto unlock;
+ }
+
+ // Check stream latency
+ result = snd_pcm_delay( handle[0], &frames );
+ if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
+ }
+
+ unlock:
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ RtApi::tickStreamTime();
+ if ( doStopStream == 1 ) this->stopStream();
+}
+
+static void *alsaCallbackHandler( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiAlsa *object = (RtApiAlsa *) info->object;
+ bool *isRunning = &info->isRunning;
+
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
+ if ( info->doRealtime ) {
+ std::cerr << "RtAudio alsa: " <<
+ (sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<
+ "running realtime scheduling" << std::endl;
+ }
+#endif
+
+ while ( *isRunning == true ) {
+ pthread_testcancel();
+ object->callbackEvent();
+ }
+
+ pthread_exit( NULL );
+}
+
+//******************** End of __LINUX_ALSA__ *********************//
+#endif
+
+#if defined(__LINUX_PULSE__)
+
+// Code written by Peter Meerwald, pmeerw@pmeerw.net
+// and Tristan Matthews.
+
+#include <pulse/error.h>
+#include <pulse/simple.h>
+#include <cstdio>
+
+static const unsigned int SUPPORTED_SAMPLERATES[] = { 8000, 16000, 22050, 32000,
+ 44100, 48000, 96000, 0};
+
+struct rtaudio_pa_format_mapping_t {
+ RtAudioFormat rtaudio_format;
+ pa_sample_format_t pa_format;
+};
+
+static const rtaudio_pa_format_mapping_t supported_sampleformats[] = {
+ {RTAUDIO_SINT16, PA_SAMPLE_S16LE},
+ {RTAUDIO_SINT32, PA_SAMPLE_S32LE},
+ {RTAUDIO_FLOAT32, PA_SAMPLE_FLOAT32LE},
+ {0, PA_SAMPLE_INVALID}};
+
+struct PulseAudioHandle {
+ pa_simple *s_play;
+ pa_simple *s_rec;
+ pthread_t thread;
+ pthread_cond_t runnable_cv;
+ bool runnable;
+ PulseAudioHandle() : s_play(0), s_rec(0), runnable(false) { }
+};
+
+RtApiPulse::~RtApiPulse()
+{
+ if ( stream_.state != STREAM_CLOSED )
+ closeStream();
+}
+
+unsigned int RtApiPulse::getDeviceCount( void )
+{
+ return 1;
+}
+
+RtAudio::DeviceInfo RtApiPulse::getDeviceInfo( unsigned int /*device*/ )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = true;
+ info.name = "PulseAudio";
+ info.outputChannels = 2;
+ info.inputChannels = 2;
+ info.duplexChannels = 2;
+ info.isDefaultOutput = true;
+ info.isDefaultInput = true;
+
+ for ( const unsigned int *sr = SUPPORTED_SAMPLERATES; *sr; ++sr )
+ info.sampleRates.push_back( *sr );
+
+ info.preferredSampleRate = 48000;
+ info.nativeFormats = RTAUDIO_SINT16 | RTAUDIO_SINT32 | RTAUDIO_FLOAT32;
+
+ return info;
+}
+
+static void *pulseaudio_callback( void * user )
+{
+ CallbackInfo *cbi = static_cast<CallbackInfo *>( user );
+ RtApiPulse *context = static_cast<RtApiPulse *>( cbi->object );
+ volatile bool *isRunning = &cbi->isRunning;
+
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
+ if (cbi->doRealtime) {
+ std::cerr << "RtAudio pulse: " <<
+ (sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<
+ "running realtime scheduling" << std::endl;
+ }
+#endif
+
+ while ( *isRunning ) {
+ pthread_testcancel();
+ context->callbackEvent();
+ }
+
+ pthread_exit( NULL );
+}
+
+void RtApiPulse::closeStream( void )
+{
+ PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );
+
+ stream_.callbackInfo.isRunning = false;
+ if ( pah ) {
+ MUTEX_LOCK( &stream_.mutex );
+ if ( stream_.state == STREAM_STOPPED ) {
+ pah->runnable = true;
+ pthread_cond_signal( &pah->runnable_cv );
+ }
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ pthread_join( pah->thread, 0 );
+ if ( pah->s_play ) {
+ pa_simple_flush( pah->s_play, NULL );
+ pa_simple_free( pah->s_play );
+ }
+ if ( pah->s_rec )
+ pa_simple_free( pah->s_rec );
+
+ pthread_cond_destroy( &pah->runnable_cv );
+ delete pah;
+ stream_.apiHandle = 0;
+ }
+
+ if ( stream_.userBuffer[0] ) {
+ free( stream_.userBuffer[0] );
+ stream_.userBuffer[0] = 0;
+ }
+ if ( stream_.userBuffer[1] ) {
+ free( stream_.userBuffer[1] );
+ stream_.userBuffer[1] = 0;
+ }
+
+ stream_.state = STREAM_CLOSED;
+ stream_.mode = UNINITIALIZED;
+}
+
+void RtApiPulse::callbackEvent( void )
+{
+ PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );
+
+ if ( stream_.state == STREAM_STOPPED ) {
+ MUTEX_LOCK( &stream_.mutex );
+ while ( !pah->runnable )
+ pthread_cond_wait( &pah->runnable_cv, &stream_.mutex );
+
+ if ( stream_.state != STREAM_RUNNING ) {
+ MUTEX_UNLOCK( &stream_.mutex );
+ return;
+ }
+ MUTEX_UNLOCK( &stream_.mutex );
+ }
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiPulse::callbackEvent(): the stream is closed ... "
+ "this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ int doStopStream = callback( stream_.userBuffer[OUTPUT], stream_.userBuffer[INPUT],
+ stream_.bufferSize, streamTime, status,
+ stream_.callbackInfo.userData );
+
+ if ( doStopStream == 2 ) {
+ abortStream();
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+ void *pulse_in = stream_.doConvertBuffer[INPUT] ? stream_.deviceBuffer : stream_.userBuffer[INPUT];
+ void *pulse_out = stream_.doConvertBuffer[OUTPUT] ? stream_.deviceBuffer : stream_.userBuffer[OUTPUT];
+
+ if ( stream_.state != STREAM_RUNNING )
+ goto unlock;
+
+ int pa_error;
+ size_t bytes;
+ if (stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ if ( stream_.doConvertBuffer[OUTPUT] ) {
+ convertBuffer( stream_.deviceBuffer,
+ stream_.userBuffer[OUTPUT],
+ stream_.convertInfo[OUTPUT] );
+ bytes = stream_.nDeviceChannels[OUTPUT] * stream_.bufferSize *
+ formatBytes( stream_.deviceFormat[OUTPUT] );
+ } else
+ bytes = stream_.nUserChannels[OUTPUT] * stream_.bufferSize *
+ formatBytes( stream_.userFormat );
+
+ if ( pa_simple_write( pah->s_play, pulse_out, bytes, &pa_error ) < 0 ) {
+ errorStream_ << "RtApiPulse::callbackEvent: audio write error, " <<
+ pa_strerror( pa_error ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ }
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX) {
+ if ( stream_.doConvertBuffer[INPUT] )
+ bytes = stream_.nDeviceChannels[INPUT] * stream_.bufferSize *
+ formatBytes( stream_.deviceFormat[INPUT] );
+ else
+ bytes = stream_.nUserChannels[INPUT] * stream_.bufferSize *
+ formatBytes( stream_.userFormat );
+
+ if ( pa_simple_read( pah->s_rec, pulse_in, bytes, &pa_error ) < 0 ) {
+ errorStream_ << "RtApiPulse::callbackEvent: audio read error, " <<
+ pa_strerror( pa_error ) << ".";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ }
+ if ( stream_.doConvertBuffer[INPUT] ) {
+ convertBuffer( stream_.userBuffer[INPUT],
+ stream_.deviceBuffer,
+ stream_.convertInfo[INPUT] );
+ }
+ }
+
+ unlock:
+ MUTEX_UNLOCK( &stream_.mutex );
+ RtApi::tickStreamTime();
+
+ if ( doStopStream == 1 )
+ stopStream();
+}
+
+void RtApiPulse::startStream( void )
+{
+ PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiPulse::startStream(): the stream is not open!";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiPulse::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ stream_.state = STREAM_RUNNING;
+
+ pah->runnable = true;
+ pthread_cond_signal( &pah->runnable_cv );
+ MUTEX_UNLOCK( &stream_.mutex );
+}
+
+void RtApiPulse::stopStream( void )
+{
+ PulseAudioHandle *pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiPulse::stopStream(): the stream is not open!";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiPulse::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ stream_.state = STREAM_STOPPED;
+ MUTEX_LOCK( &stream_.mutex );
+
+ if ( pah ) {
+ pah->runnable = false;
+ if ( pah->s_play ) {
+ int pa_error;
+ if ( pa_simple_drain( pah->s_play, &pa_error ) < 0 ) {
+ errorStream_ << "RtApiPulse::stopStream: error draining output device, " <<
+ pa_strerror( pa_error ) << ".";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ }
+ }
+
+ stream_.state = STREAM_STOPPED;
+ MUTEX_UNLOCK( &stream_.mutex );
+}
+
+void RtApiPulse::abortStream( void )
+{
+ PulseAudioHandle *pah = static_cast<PulseAudioHandle*>( stream_.apiHandle );
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiPulse::abortStream(): the stream is not open!";
+ error( RtAudioError::INVALID_USE );
+ return;
+ }
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiPulse::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ stream_.state = STREAM_STOPPED;
+ MUTEX_LOCK( &stream_.mutex );
+
+ if ( pah ) {
+ pah->runnable = false;
+ if ( pah->s_play ) {
+ int pa_error;
+ if ( pa_simple_flush( pah->s_play, &pa_error ) < 0 ) {
+ errorStream_ << "RtApiPulse::abortStream: error flushing output device, " <<
+ pa_strerror( pa_error ) << ".";
+ errorText_ = errorStream_.str();
+ MUTEX_UNLOCK( &stream_.mutex );
+ error( RtAudioError::SYSTEM_ERROR );
+ return;
+ }
+ }
+ }
+
+ stream_.state = STREAM_STOPPED;
+ MUTEX_UNLOCK( &stream_.mutex );
+}
+
+bool RtApiPulse::probeDeviceOpen( unsigned int device, StreamMode mode,
+ unsigned int channels, unsigned int firstChannel,
+ unsigned int sampleRate, RtAudioFormat format,
+ unsigned int *bufferSize, RtAudio::StreamOptions *options )
+{
+ PulseAudioHandle *pah = 0;
+ unsigned long bufferBytes = 0;
+ pa_sample_spec ss;
+
+ if ( device != 0 ) return false;
+ if ( mode != INPUT && mode != OUTPUT ) return false;
+ if ( channels != 1 && channels != 2 ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: unsupported number of channels.";
+ return false;
+ }
+ ss.channels = channels;
+
+ if ( firstChannel != 0 ) return false;
+
+ bool sr_found = false;
+ for ( const unsigned int *sr = SUPPORTED_SAMPLERATES; *sr; ++sr ) {
+ if ( sampleRate == *sr ) {
+ sr_found = true;
+ stream_.sampleRate = sampleRate;
+ ss.rate = sampleRate;
+ break;
+ }
+ }
+ if ( !sr_found ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: unsupported sample rate.";
+ return false;
+ }
+
+ bool sf_found = 0;
+ for ( const rtaudio_pa_format_mapping_t *sf = supported_sampleformats;
+ sf->rtaudio_format && sf->pa_format != PA_SAMPLE_INVALID; ++sf ) {
+ if ( format == sf->rtaudio_format ) {
+ sf_found = true;
+ stream_.userFormat = sf->rtaudio_format;
+ stream_.deviceFormat[mode] = stream_.userFormat;
+ ss.format = sf->pa_format;
+ break;
+ }
+ }
+ if ( !sf_found ) { // Use internal data format conversion.
+ stream_.userFormat = format;
+ stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
+ ss.format = PA_SAMPLE_FLOAT32LE;
+ }
+
+ // Set other stream parameters.
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
+ else stream_.userInterleaved = true;
+ stream_.deviceInterleaved[mode] = true;
+ stream_.nBuffers = 1;
+ stream_.doByteSwap[mode] = false;
+ stream_.nUserChannels[mode] = channels;
+ stream_.nDeviceChannels[mode] = channels + firstChannel;
+ stream_.channelOffset[mode] = 0;
+ std::string streamName = "RtAudio";
+
+ // Set flags for buffer conversion.
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate necessary internal buffers.
+ bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+ stream_.bufferSize = *bufferSize;
+
+ if ( stream_.doConvertBuffer[mode] ) {
+
+ bool makeBuffer = true;
+ bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
+ if ( mode == INPUT ) {
+ if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ if ( bufferBytes <= bytesOut ) makeBuffer = false;
+ }
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ stream_.device[mode] = device;
+
+ // Setup the buffer conversion information structure.
+ if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
+
+ if ( !stream_.apiHandle ) {
+ PulseAudioHandle *pah = new PulseAudioHandle;
+ if ( !pah ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: error allocating memory for handle.";
+ goto error;
+ }
+
+ stream_.apiHandle = pah;
+ if ( pthread_cond_init( &pah->runnable_cv, NULL ) != 0 ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: error creating condition variable.";
+ goto error;
+ }
+ }
+ pah = static_cast<PulseAudioHandle *>( stream_.apiHandle );
+
+ int error;
+ if ( options && !options->streamName.empty() ) streamName = options->streamName;
+ switch ( mode ) {
+ case INPUT:
+ pa_buffer_attr buffer_attr;
+ buffer_attr.fragsize = bufferBytes;
+ buffer_attr.maxlength = -1;
+
+ pah->s_rec = pa_simple_new( NULL, streamName.c_str(), PA_STREAM_RECORD, NULL, "Record", &ss, NULL, &buffer_attr, &error );
+ if ( !pah->s_rec ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: error connecting input to PulseAudio server.";
+ goto error;
+ }
+ break;
+ case OUTPUT:
+ pah->s_play = pa_simple_new( NULL, streamName.c_str(), PA_STREAM_PLAYBACK, NULL, "Playback", &ss, NULL, NULL, &error );
+ if ( !pah->s_play ) {
+ errorText_ = "RtApiPulse::probeDeviceOpen: error connecting output to PulseAudio server.";
+ goto error;
+ }
+ break;
+ default:
+ goto error;
+ }
+
+ if ( stream_.mode == UNINITIALIZED )
+ stream_.mode = mode;
+ else if ( stream_.mode == mode )
+ goto error;
+ else
+ stream_.mode = DUPLEX;
+
+ if ( !stream_.callbackInfo.isRunning ) {
+ stream_.callbackInfo.object = this;
+
+ stream_.state = STREAM_STOPPED;
+ // Set the thread attributes for joinable and realtime scheduling
+ // priority (optional). The higher priority will only take affect
+ // if the program is run as root or suid. Note, under Linux
+ // processes with CAP_SYS_NICE privilege, a user can change
+ // scheduling policy and priority (thus need not be root). See
+ // POSIX "capabilities".
+ pthread_attr_t attr;
+ pthread_attr_init( &attr );
+ pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
+ if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
+ stream_.callbackInfo.doRealtime = true;
+ struct sched_param param;
+ int priority = options->priority;
+ int min = sched_get_priority_min( SCHED_RR );
+ int max = sched_get_priority_max( SCHED_RR );
+ if ( priority < min ) priority = min;
+ else if ( priority > max ) priority = max;
+ param.sched_priority = priority;
+
+ // Set the policy BEFORE the priority. Otherwise it fails.
+ pthread_attr_setschedpolicy(&attr, SCHED_RR);
+ pthread_attr_setscope (&attr, PTHREAD_SCOPE_SYSTEM);
+ // This is definitely required. Otherwise it fails.
+ pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED);
+ pthread_attr_setschedparam(&attr, ¶m);
+ }
+ else
+ pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
+#else
+ pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
+#endif
+
+ stream_.callbackInfo.isRunning = true;
+ int result = pthread_create( &pah->thread, &attr, pulseaudio_callback, (void *)&stream_.callbackInfo);
+ pthread_attr_destroy(&attr);
+ if(result != 0) {
+ // Failed. Try instead with default attributes.
+ result = pthread_create( &pah->thread, NULL, pulseaudio_callback, (void *)&stream_.callbackInfo);
+ if(result != 0) {
+ stream_.callbackInfo.isRunning = false;
+ errorText_ = "RtApiPulse::probeDeviceOpen: error creating thread.";
+ goto error;
+ }
+ }
+ }
+
+ return SUCCESS;
+
+ error:
+ if ( pah && stream_.callbackInfo.isRunning ) {
+ pthread_cond_destroy( &pah->runnable_cv );
+ delete pah;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.state = STREAM_CLOSED;
+ return FAILURE;
+}
+
+//******************** End of __LINUX_PULSE__ *********************//
+#endif
+
+#if defined(__LINUX_OSS__)
+
+#include <unistd.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/soundcard.h>
+#include <errno.h>
+#include <math.h>
+
+static void *ossCallbackHandler(void * ptr);
+
+// A structure to hold various information related to the OSS API
+// implementation.
+struct OssHandle {
+ int id[2]; // device ids
+ bool xrun[2];
+ bool triggered;
+ pthread_cond_t runnable;
+
+ OssHandle()
+ :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
+};
+
+RtApiOss :: RtApiOss()
+{
+ // Nothing to do here.
+}
+
+RtApiOss :: ~RtApiOss()
+{
+ if ( stream_.state != STREAM_CLOSED ) closeStream();
+}
+
+unsigned int RtApiOss :: getDeviceCount( void )
+{
+ int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
+ if ( mixerfd == -1 ) {
+ errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ oss_sysinfo sysinfo;
+ if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
+ close( mixerfd );
+ errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
+ error( RtAudioError::WARNING );
+ return 0;
+ }
+
+ close( mixerfd );
+ return sysinfo.numaudios;
+}
+
+RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
+{
+ RtAudio::DeviceInfo info;
+ info.probed = false;
+
+ int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
+ if ( mixerfd == -1 ) {
+ errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ oss_sysinfo sysinfo;
+ int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
+ if ( result == -1 ) {
+ close( mixerfd );
+ errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ unsigned nDevices = sysinfo.numaudios;
+ if ( nDevices == 0 ) {
+ close( mixerfd );
+ errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ if ( device >= nDevices ) {
+ close( mixerfd );
+ errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
+ error( RtAudioError::INVALID_USE );
+ return info;
+ }
+
+ oss_audioinfo ainfo;
+ ainfo.dev = device;
+ result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
+ close( mixerfd );
+ if ( result == -1 ) {
+ errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Probe channels
+ if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
+ if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
+ if ( ainfo.caps & PCM_CAP_DUPLEX ) {
+ if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
+ info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
+ }
+
+ // Probe data formats ... do for input
+ unsigned long mask = ainfo.iformats;
+ if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
+ info.nativeFormats |= RTAUDIO_SINT16;
+ if ( mask & AFMT_S8 )
+ info.nativeFormats |= RTAUDIO_SINT8;
+ if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
+ info.nativeFormats |= RTAUDIO_SINT32;
+#ifdef AFMT_FLOAT
+ if ( mask & AFMT_FLOAT )
+ info.nativeFormats |= RTAUDIO_FLOAT32;
+#endif
+ if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
+ info.nativeFormats |= RTAUDIO_SINT24;
+
+ // Check that we have at least one supported format
+ if ( info.nativeFormats == 0 ) {
+ errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ return info;
+ }
+
+ // Probe the supported sample rates.
+ info.sampleRates.clear();
+ if ( ainfo.nrates ) {
+ for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
+ for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
+ if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
+ info.sampleRates.push_back( SAMPLE_RATES[k] );
+
+ if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )
+ info.preferredSampleRate = SAMPLE_RATES[k];
+
+ break;
+ }
+ }
+ }
+ }
+ else {
+ // Check min and max rate values;
+ for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
+ if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] ) {
+ info.sampleRates.push_back( SAMPLE_RATES[k] );
+
+ if ( !info.preferredSampleRate || ( SAMPLE_RATES[k] <= 48000 && SAMPLE_RATES[k] > info.preferredSampleRate ) )
+ info.preferredSampleRate = SAMPLE_RATES[k];
+ }
+ }
+ }
+
+ if ( info.sampleRates.size() == 0 ) {
+ errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ error( RtAudioError::WARNING );
+ }
+ else {
+ info.probed = true;
+ info.name = ainfo.name;
+ }
+
+ return info;
+}
+
+
+bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options )
+{
+ int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
+ if ( mixerfd == -1 ) {
+ errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
+ return FAILURE;
+ }
+
+ oss_sysinfo sysinfo;
+ int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
+ if ( result == -1 ) {
+ close( mixerfd );
+ errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
+ return FAILURE;
+ }
+
+ unsigned nDevices = sysinfo.numaudios;
+ if ( nDevices == 0 ) {
+ // This should not happen because a check is made before this function is called.
+ close( mixerfd );
+ errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
+ return FAILURE;
+ }
+
+ if ( device >= nDevices ) {
+ // This should not happen because a check is made before this function is called.
+ close( mixerfd );
+ errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
+ return FAILURE;
+ }
+
+ oss_audioinfo ainfo;
+ ainfo.dev = device;
+ result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
+ close( mixerfd );
+ if ( result == -1 ) {
+ errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Check if device supports input or output
+ if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
+ ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
+ if ( mode == OUTPUT )
+ errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
+ else
+ errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ int flags = 0;
+ OssHandle *handle = (OssHandle *) stream_.apiHandle;
+ if ( mode == OUTPUT )
+ flags |= O_WRONLY;
+ else { // mode == INPUT
+ if (stream_.mode == OUTPUT && stream_.device[0] == device) {
+ // We just set the same device for playback ... close and reopen for duplex (OSS only).
+ close( handle->id[0] );
+ handle->id[0] = 0;
+ if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
+ errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ // Check that the number previously set channels is the same.
+ if ( stream_.nUserChannels[0] != channels ) {
+ errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ flags |= O_RDWR;
+ }
+ else
+ flags |= O_RDONLY;
+ }
+
+ // Set exclusive access if specified.
+ if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
+
+ // Try to open the device.
+ int fd;
+ fd = open( ainfo.devnode, flags, 0 );
+ if ( fd == -1 ) {
+ if ( errno == EBUSY )
+ errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
+ else
+ errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // For duplex operation, specifically set this mode (this doesn't seem to work).
+ /*
+ if ( flags | O_RDWR ) {
+ result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
+ if ( result == -1) {
+ errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ }
+ */
+
+ // Check the device channel support.
+ stream_.nUserChannels[mode] = channels;
+ if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Set the number of channels.
+ int deviceChannels = channels + firstChannel;
+ result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
+ if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ stream_.nDeviceChannels[mode] = deviceChannels;
+
+ // Get the data format mask
+ int mask;
+ result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
+ if ( result == -1 ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Determine how to set the device format.
+ stream_.userFormat = format;
+ int deviceFormat = -1;
+ stream_.doByteSwap[mode] = false;
+ if ( format == RTAUDIO_SINT8 ) {
+ if ( mask & AFMT_S8 ) {
+ deviceFormat = AFMT_S8;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT8;
+ }
+ }
+ else if ( format == RTAUDIO_SINT16 ) {
+ if ( mask & AFMT_S16_NE ) {
+ deviceFormat = AFMT_S16_NE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ }
+ else if ( mask & AFMT_S16_OE ) {
+ deviceFormat = AFMT_S16_OE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ stream_.doByteSwap[mode] = true;
+ }
+ }
+ else if ( format == RTAUDIO_SINT24 ) {
+ if ( mask & AFMT_S24_NE ) {
+ deviceFormat = AFMT_S24_NE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT24;
+ }
+ else if ( mask & AFMT_S24_OE ) {
+ deviceFormat = AFMT_S24_OE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT24;
+ stream_.doByteSwap[mode] = true;
+ }
+ }
+ else if ( format == RTAUDIO_SINT32 ) {
+ if ( mask & AFMT_S32_NE ) {
+ deviceFormat = AFMT_S32_NE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT32;
+ }
+ else if ( mask & AFMT_S32_OE ) {
+ deviceFormat = AFMT_S32_OE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT32;
+ stream_.doByteSwap[mode] = true;
+ }
+ }
+
+ if ( deviceFormat == -1 ) {
+ // The user requested format is not natively supported by the device.
+ if ( mask & AFMT_S16_NE ) {
+ deviceFormat = AFMT_S16_NE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ }
+ else if ( mask & AFMT_S32_NE ) {
+ deviceFormat = AFMT_S32_NE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT32;
+ }
+ else if ( mask & AFMT_S24_NE ) {
+ deviceFormat = AFMT_S24_NE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT24;
+ }
+ else if ( mask & AFMT_S16_OE ) {
+ deviceFormat = AFMT_S16_OE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT16;
+ stream_.doByteSwap[mode] = true;
+ }
+ else if ( mask & AFMT_S32_OE ) {
+ deviceFormat = AFMT_S32_OE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT32;
+ stream_.doByteSwap[mode] = true;
+ }
+ else if ( mask & AFMT_S24_OE ) {
+ deviceFormat = AFMT_S24_OE;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT24;
+ stream_.doByteSwap[mode] = true;
+ }
+ else if ( mask & AFMT_S8) {
+ deviceFormat = AFMT_S8;
+ stream_.deviceFormat[mode] = RTAUDIO_SINT8;
+ }
+ }
+
+ if ( stream_.deviceFormat[mode] == 0 ) {
+ // This really shouldn't happen ...
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Set the data format.
+ int temp = deviceFormat;
+ result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
+ if ( result == -1 || deviceFormat != temp ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Attempt to set the buffer size. According to OSS, the minimum
+ // number of buffers is two. The supposed minimum buffer size is 16
+ // bytes, so that will be our lower bound. The argument to this
+ // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
+ // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
+ // We'll check the actual value used near the end of the setup
+ // procedure.
+ int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
+ if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
+ int buffers = 0;
+ if ( options ) buffers = options->numberOfBuffers;
+ if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
+ if ( buffers < 2 ) buffers = 3;
+ temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
+ result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
+ if ( result == -1 ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ stream_.nBuffers = buffers;
+
+ // Save buffer size (in sample frames).
+ *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
+ stream_.bufferSize = *bufferSize;
+
+ // Set the sample rate.
+ int srate = sampleRate;
+ result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
+ if ( result == -1 ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+
+ // Verify the sample rate setup worked.
+ if ( abs( srate - (int)sampleRate ) > 100 ) {
+ close( fd );
+ errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
+ errorText_ = errorStream_.str();
+ return FAILURE;
+ }
+ stream_.sampleRate = sampleRate;
+
+ if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
+ // We're doing duplex setup here.
+ stream_.deviceFormat[0] = stream_.deviceFormat[1];
+ stream_.nDeviceChannels[0] = deviceChannels;
+ }
+
+ // Set interleaving parameters.
+ stream_.userInterleaved = true;
+ stream_.deviceInterleaved[mode] = true;
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
+ stream_.userInterleaved = false;
+
+ // Set flags for buffer conversion
+ stream_.doConvertBuffer[mode] = false;
+ if ( stream_.userFormat != stream_.deviceFormat[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
+ stream_.doConvertBuffer[mode] = true;
+ if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
+ stream_.nUserChannels[mode] > 1 )
+ stream_.doConvertBuffer[mode] = true;
+
+ // Allocate the stream handles if necessary and then save.
+ if ( stream_.apiHandle == 0 ) {
+ try {
+ handle = new OssHandle;
+ }
+ catch ( std::bad_alloc& ) {
+ errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
+ goto error;
+ }
+
+ if ( pthread_cond_init( &handle->runnable, NULL ) ) {
+ errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";
+ goto error;
+ }
+
+ stream_.apiHandle = (void *) handle;
+ }
+ else {
+ handle = (OssHandle *) stream_.apiHandle;
+ }
+ handle->id[mode] = fd;
+
+ // Allocate necessary internal buffers.
+ unsigned long bufferBytes;
+ bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
+ stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.userBuffer[mode] == NULL ) {
+ errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
+ goto error;
+ }
+
+ if ( stream_.doConvertBuffer[mode] ) {
+
+ bool makeBuffer = true;
+ bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
+ if ( mode == INPUT ) {
+ if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
+ unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
+ if ( bufferBytes <= bytesOut ) makeBuffer = false;
+ }
+ }
+
+ if ( makeBuffer ) {
+ bufferBytes *= *bufferSize;
+ if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
+ stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
+ if ( stream_.deviceBuffer == NULL ) {
+ errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
+ goto error;
+ }
+ }
+ }
+
+ stream_.device[mode] = device;
+ stream_.state = STREAM_STOPPED;
+
+ // Setup the buffer conversion information structure.
+ if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
+
+ // Setup thread if necessary.
+ if ( stream_.mode == OUTPUT && mode == INPUT ) {
+ // We had already set up an output stream.
+ stream_.mode = DUPLEX;
+ if ( stream_.device[0] == device ) handle->id[0] = fd;
+ }
+ else {
+ stream_.mode = mode;
+
+ // Setup callback thread.
+ stream_.callbackInfo.object = (void *) this;
+
+ // Set the thread attributes for joinable and realtime scheduling
+ // priority. The higher priority will only take affect if the
+ // program is run as root or suid.
+ pthread_attr_t attr;
+ pthread_attr_init( &attr );
+ pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
+ if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
+ stream_.callbackInfo.doRealtime = true;
+ struct sched_param param;
+ int priority = options->priority;
+ int min = sched_get_priority_min( SCHED_RR );
+ int max = sched_get_priority_max( SCHED_RR );
+ if ( priority < min ) priority = min;
+ else if ( priority > max ) priority = max;
+ param.sched_priority = priority;
+
+ // Set the policy BEFORE the priority. Otherwise it fails.
+ pthread_attr_setschedpolicy(&attr, SCHED_RR);
+ pthread_attr_setscope (&attr, PTHREAD_SCOPE_SYSTEM);
+ // This is definitely required. Otherwise it fails.
+ pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED);
+ pthread_attr_setschedparam(&attr, ¶m);
+ }
+ else
+ pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
+#else
+ pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
+#endif
+
+ stream_.callbackInfo.isRunning = true;
+ result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
+ pthread_attr_destroy( &attr );
+ if ( result ) {
+ // Failed. Try instead with default attributes.
+ result = pthread_create( &stream_.callbackInfo.thread, NULL, ossCallbackHandler, &stream_.callbackInfo );
+ if ( result ) {
+ stream_.callbackInfo.isRunning = false;
+ errorText_ = "RtApiOss::error creating callback thread!";
+ goto error;
+ }
+ }
+ }
+
+ return SUCCESS;
+
+ error:
+ if ( handle ) {
+ pthread_cond_destroy( &handle->runnable );
+ if ( handle->id[0] ) close( handle->id[0] );
+ if ( handle->id[1] ) close( handle->id[1] );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.state = STREAM_CLOSED;
+ return FAILURE;
+}
+
+void RtApiOss :: closeStream()
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiOss::closeStream(): no open stream to close!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ OssHandle *handle = (OssHandle *) stream_.apiHandle;
+ stream_.callbackInfo.isRunning = false;
+ MUTEX_LOCK( &stream_.mutex );
+ if ( stream_.state == STREAM_STOPPED )
+ pthread_cond_signal( &handle->runnable );
+ MUTEX_UNLOCK( &stream_.mutex );
+ pthread_join( stream_.callbackInfo.thread, NULL );
+
+ if ( stream_.state == STREAM_RUNNING ) {
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
+ ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
+ else
+ ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
+ stream_.state = STREAM_STOPPED;
+ }
+
+ if ( handle ) {
+ pthread_cond_destroy( &handle->runnable );
+ if ( handle->id[0] ) close( handle->id[0] );
+ if ( handle->id[1] ) close( handle->id[1] );
+ delete handle;
+ stream_.apiHandle = 0;
+ }
+
+ for ( int i=0; i<2; i++ ) {
+ if ( stream_.userBuffer[i] ) {
+ free( stream_.userBuffer[i] );
+ stream_.userBuffer[i] = 0;
+ }
+ }
+
+ if ( stream_.deviceBuffer ) {
+ free( stream_.deviceBuffer );
+ stream_.deviceBuffer = 0;
+ }
+
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+}
+
+void RtApiOss :: startStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_RUNNING ) {
+ errorText_ = "RtApiOss::startStream(): the stream is already running!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
+ stream_.state = STREAM_RUNNING;
+
+ // No need to do anything else here ... OSS automatically starts
+ // when fed samples.
+
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ OssHandle *handle = (OssHandle *) stream_.apiHandle;
+ pthread_cond_signal( &handle->runnable );
+}
+
+void RtApiOss :: stopStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ // The state might change while waiting on a mutex.
+ if ( stream_.state == STREAM_STOPPED ) {
+ MUTEX_UNLOCK( &stream_.mutex );
+ return;
+ }
+
+ int result = 0;
+ OssHandle *handle = (OssHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ // Flush the output with zeros a few times.
+ char *buffer;
+ int samples;
+ RtAudioFormat format;
+
+ if ( stream_.doConvertBuffer[0] ) {
+ buffer = stream_.deviceBuffer;
+ samples = stream_.bufferSize * stream_.nDeviceChannels[0];
+ format = stream_.deviceFormat[0];
+ }
+ else {
+ buffer = stream_.userBuffer[0];
+ samples = stream_.bufferSize * stream_.nUserChannels[0];
+ format = stream_.userFormat;
+ }
+
+ memset( buffer, 0, samples * formatBytes(format) );
+ for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
+ result = write( handle->id[0], buffer, samples * formatBytes(format) );
+ if ( result == -1 ) {
+ errorText_ = "RtApiOss::stopStream: audio write error.";
+ error( RtAudioError::WARNING );
+ }
+ }
+
+ result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
+ if ( result == -1 ) {
+ errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ handle->triggered = false;
+ }
+
+ if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
+ result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
+ if ( result == -1 ) {
+ errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ unlock:
+ stream_.state = STREAM_STOPPED;
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ if ( result != -1 ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiOss :: abortStream()
+{
+ verifyStream();
+ if ( stream_.state == STREAM_STOPPED ) {
+ errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ // The state might change while waiting on a mutex.
+ if ( stream_.state == STREAM_STOPPED ) {
+ MUTEX_UNLOCK( &stream_.mutex );
+ return;
+ }
+
+ int result = 0;
+ OssHandle *handle = (OssHandle *) stream_.apiHandle;
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+ result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
+ if ( result == -1 ) {
+ errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ handle->triggered = false;
+ }
+
+ if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
+ result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
+ if ( result == -1 ) {
+ errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
+ errorText_ = errorStream_.str();
+ goto unlock;
+ }
+ }
+
+ unlock:
+ stream_.state = STREAM_STOPPED;
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ if ( result != -1 ) return;
+ error( RtAudioError::SYSTEM_ERROR );
+}
+
+void RtApiOss :: callbackEvent()
+{
+ OssHandle *handle = (OssHandle *) stream_.apiHandle;
+ if ( stream_.state == STREAM_STOPPED ) {
+ MUTEX_LOCK( &stream_.mutex );
+ pthread_cond_wait( &handle->runnable, &stream_.mutex );
+ if ( stream_.state != STREAM_RUNNING ) {
+ MUTEX_UNLOCK( &stream_.mutex );
+ return;
+ }
+ MUTEX_UNLOCK( &stream_.mutex );
+ }
+
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
+ error( RtAudioError::WARNING );
+ return;
+ }
+
+ // Invoke user callback to get fresh output data.
+ int doStopStream = 0;
+ RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
+ double streamTime = getStreamTime();
+ RtAudioStreamStatus status = 0;
+ if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
+ status |= RTAUDIO_OUTPUT_UNDERFLOW;
+ handle->xrun[0] = false;
+ }
+ if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
+ status |= RTAUDIO_INPUT_OVERFLOW;
+ handle->xrun[1] = false;
+ }
+ doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
+ stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
+ if ( doStopStream == 2 ) {
+ this->abortStream();
+ return;
+ }
+
+ MUTEX_LOCK( &stream_.mutex );
+
+ // The state might change while waiting on a mutex.
+ if ( stream_.state == STREAM_STOPPED ) goto unlock;
+
+ int result;
+ char *buffer;
+ int samples;
+ RtAudioFormat format;
+
+ if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
+
+ // Setup parameters and do buffer conversion if necessary.
+ if ( stream_.doConvertBuffer[0] ) {
+ buffer = stream_.deviceBuffer;
+ convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
+ samples = stream_.bufferSize * stream_.nDeviceChannels[0];
+ format = stream_.deviceFormat[0];
+ }
+ else {
+ buffer = stream_.userBuffer[0];
+ samples = stream_.bufferSize * stream_.nUserChannels[0];
+ format = stream_.userFormat;
+ }
+
+ // Do byte swapping if necessary.
+ if ( stream_.doByteSwap[0] )
+ byteSwapBuffer( buffer, samples, format );
+
+ if ( stream_.mode == DUPLEX && handle->triggered == false ) {
+ int trig = 0;
+ ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
+ result = write( handle->id[0], buffer, samples * formatBytes(format) );
+ trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
+ ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
+ handle->triggered = true;
+ }
+ else
+ // Write samples to device.
+ result = write( handle->id[0], buffer, samples * formatBytes(format) );
+
+ if ( result == -1 ) {
+ // We'll assume this is an underrun, though there isn't a
+ // specific means for determining that.
+ handle->xrun[0] = true;
+ errorText_ = "RtApiOss::callbackEvent: audio write error.";
+ error( RtAudioError::WARNING );
+ // Continue on to input section.
+ }
+ }
+
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
+
+ // Setup parameters.
+ if ( stream_.doConvertBuffer[1] ) {
+ buffer = stream_.deviceBuffer;
+ samples = stream_.bufferSize * stream_.nDeviceChannels[1];
+ format = stream_.deviceFormat[1];
+ }
+ else {
+ buffer = stream_.userBuffer[1];
+ samples = stream_.bufferSize * stream_.nUserChannels[1];
+ format = stream_.userFormat;
+ }
+
+ // Read samples from device.
+ result = read( handle->id[1], buffer, samples * formatBytes(format) );
+
+ if ( result == -1 ) {
+ // We'll assume this is an overrun, though there isn't a
+ // specific means for determining that.
+ handle->xrun[1] = true;
+ errorText_ = "RtApiOss::callbackEvent: audio read error.";
+ error( RtAudioError::WARNING );
+ goto unlock;
+ }
+
+ // Do byte swapping if necessary.
+ if ( stream_.doByteSwap[1] )
+ byteSwapBuffer( buffer, samples, format );
+
+ // Do buffer conversion if necessary.
+ if ( stream_.doConvertBuffer[1] )
+ convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
+ }
+
+ unlock:
+ MUTEX_UNLOCK( &stream_.mutex );
+
+ RtApi::tickStreamTime();
+ if ( doStopStream == 1 ) this->stopStream();
+}
+
+static void *ossCallbackHandler( void *ptr )
+{
+ CallbackInfo *info = (CallbackInfo *) ptr;
+ RtApiOss *object = (RtApiOss *) info->object;
+ bool *isRunning = &info->isRunning;
+
+#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread)
+ if (info->doRealtime) {
+ std::cerr << "RtAudio oss: " <<
+ (sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<
+ "running realtime scheduling" << std::endl;
+ }
+#endif
+
+ while ( *isRunning == true ) {
+ pthread_testcancel();
+ object->callbackEvent();
+ }
+
+ pthread_exit( NULL );
+}
+
+//******************** End of __LINUX_OSS__ *********************//
+#endif
+
+
+// *************************************************** //
+//
+// Protected common (OS-independent) RtAudio methods.
+//
+// *************************************************** //
+
+// This method can be modified to control the behavior of error
+// message printing.
+void RtApi :: error( RtAudioError::Type type )
+{
+ errorStream_.str(""); // clear the ostringstream
+
+ RtAudioErrorCallback errorCallback = (RtAudioErrorCallback) stream_.callbackInfo.errorCallback;
+ if ( errorCallback ) {
+ // abortStream() can generate new error messages. Ignore them. Just keep original one.
+
+ if ( firstErrorOccurred_ )
+ return;
+
+ firstErrorOccurred_ = true;
+ const std::string errorMessage = errorText_;
+
+ if ( type != RtAudioError::WARNING && stream_.state != STREAM_STOPPED) {
+ stream_.callbackInfo.isRunning = false; // exit from the thread
+ abortStream();
+ }
+
+ errorCallback( type, errorMessage );
+ firstErrorOccurred_ = false;
+ return;
+ }
+
+ if ( type == RtAudioError::WARNING && showWarnings_ == true )
+ std::cerr << '\n' << errorText_ << "\n\n";
+ else if ( type != RtAudioError::WARNING )
+ throw( RtAudioError( errorText_, type ) );
+}
+
+void RtApi :: verifyStream()
+{
+ if ( stream_.state == STREAM_CLOSED ) {
+ errorText_ = "RtApi:: a stream is not open!";
+ error( RtAudioError::INVALID_USE );
+ }
+}
+
+void RtApi :: clearStreamInfo()
+{
+ stream_.mode = UNINITIALIZED;
+ stream_.state = STREAM_CLOSED;
+ stream_.sampleRate = 0;
+ stream_.bufferSize = 0;
+ stream_.nBuffers = 0;
+ stream_.userFormat = 0;
+ stream_.userInterleaved = true;
+ stream_.streamTime = 0.0;
+ stream_.apiHandle = 0;
+ stream_.deviceBuffer = 0;
+ stream_.callbackInfo.callback = 0;
+ stream_.callbackInfo.userData = 0;
+ stream_.callbackInfo.isRunning = false;
+ stream_.callbackInfo.errorCallback = 0;
+ for ( int i=0; i<2; i++ ) {
+ stream_.device[i] = 11111;
+ stream_.doConvertBuffer[i] = false;
+ stream_.deviceInterleaved[i] = true;
+ stream_.doByteSwap[i] = false;
+ stream_.nUserChannels[i] = 0;
+ stream_.nDeviceChannels[i] = 0;
+ stream_.channelOffset[i] = 0;
+ stream_.deviceFormat[i] = 0;
+ stream_.latency[i] = 0;
+ stream_.userBuffer[i] = 0;
+ stream_.convertInfo[i].channels = 0;
+ stream_.convertInfo[i].inJump = 0;
+ stream_.convertInfo[i].outJump = 0;
+ stream_.convertInfo[i].inFormat = 0;
+ stream_.convertInfo[i].outFormat = 0;
+ stream_.convertInfo[i].inOffset.clear();
+ stream_.convertInfo[i].outOffset.clear();
+ }
+}
+
+unsigned int RtApi :: formatBytes( RtAudioFormat format )
+{
+ if ( format == RTAUDIO_SINT16 )
+ return 2;
+ else if ( format == RTAUDIO_SINT32 || format == RTAUDIO_FLOAT32 )
+ return 4;
+ else if ( format == RTAUDIO_FLOAT64 )
+ return 8;
+ else if ( format == RTAUDIO_SINT24 )
+ return 3;
+ else if ( format == RTAUDIO_SINT8 )
+ return 1;
+
+ errorText_ = "RtApi::formatBytes: undefined format.";
+ error( RtAudioError::WARNING );
+
+ return 0;
+}
+
+void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
+{
+ if ( mode == INPUT ) { // convert device to user buffer
+ stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
+ stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
+ stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
+ stream_.convertInfo[mode].outFormat = stream_.userFormat;
+ }
+ else { // convert user to device buffer
+ stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
+ stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
+ stream_.convertInfo[mode].inFormat = stream_.userFormat;
+ stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
+ }
+
+ if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
+ stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
+ else
+ stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
+
+ // Set up the interleave/deinterleave offsets.
+ if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
+ if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
+ ( mode == INPUT && stream_.userInterleaved ) ) {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
+ stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
+ stream_.convertInfo[mode].outOffset.push_back( k );
+ stream_.convertInfo[mode].inJump = 1;
+ }
+ }
+ else {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
+ stream_.convertInfo[mode].inOffset.push_back( k );
+ stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
+ stream_.convertInfo[mode].outJump = 1;
+ }
+ }
+ }
+ else { // no (de)interleaving
+ if ( stream_.userInterleaved ) {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
+ stream_.convertInfo[mode].inOffset.push_back( k );
+ stream_.convertInfo[mode].outOffset.push_back( k );
+ }
+ }
+ else {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
+ stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
+ stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
+ stream_.convertInfo[mode].inJump = 1;
+ stream_.convertInfo[mode].outJump = 1;
+ }
+ }
+ }
+
+ // Add channel offset.
+ if ( firstChannel > 0 ) {
+ if ( stream_.deviceInterleaved[mode] ) {
+ if ( mode == OUTPUT ) {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
+ stream_.convertInfo[mode].outOffset[k] += firstChannel;
+ }
+ else {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
+ stream_.convertInfo[mode].inOffset[k] += firstChannel;
+ }
+ }
+ else {
+ if ( mode == OUTPUT ) {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
+ stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
+ }
+ else {
+ for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
+ stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
+ }
+ }
+ }
+}
+
+void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
+{
+ // This function does format conversion, input/output channel compensation, and
+ // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
+ // the lower three bytes of a 32-bit integer.
+
+ // Clear our device buffer when in/out duplex device channels are different
+ if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
+ ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
+ memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
+
+ int j;
+ if (info.outFormat == RTAUDIO_FLOAT64) {
+ Float64 scale;
+ Float64 *out = (Float64 *)outBuffer;
+
+ if (info.inFormat == RTAUDIO_SINT8) {
+ signed char *in = (signed char *)inBuffer;
+ scale = 1.0 / 127.5;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT16) {
+ Int16 *in = (Int16 *)inBuffer;
+ scale = 1.0 / 32767.5;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT24) {
+ Int24 *in = (Int24 *)inBuffer;
+ scale = 1.0 / 8388607.5;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]].asInt());
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT32) {
+ Int32 *in = (Int32 *)inBuffer;
+ scale = 1.0 / 2147483647.5;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT32) {
+ Float32 *in = (Float32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT64) {
+ // Channel compensation and/or (de)interleaving only.
+ Float64 *in = (Float64 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ }
+ else if (info.outFormat == RTAUDIO_FLOAT32) {
+ Float32 scale;
+ Float32 *out = (Float32 *)outBuffer;
+
+ if (info.inFormat == RTAUDIO_SINT8) {
+ signed char *in = (signed char *)inBuffer;
+ scale = (Float32) ( 1.0 / 127.5 );
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT16) {
+ Int16 *in = (Int16 *)inBuffer;
+ scale = (Float32) ( 1.0 / 32767.5 );
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT24) {
+ Int24 *in = (Int24 *)inBuffer;
+ scale = (Float32) ( 1.0 / 8388607.5 );
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]].asInt());
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT32) {
+ Int32 *in = (Int32 *)inBuffer;
+ scale = (Float32) ( 1.0 / 2147483647.5 );
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
+ out[info.outOffset[j]] += 0.5;
+ out[info.outOffset[j]] *= scale;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT32) {
+ // Channel compensation and/or (de)interleaving only.
+ Float32 *in = (Float32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT64) {
+ Float64 *in = (Float64 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ }
+ else if (info.outFormat == RTAUDIO_SINT32) {
+ Int32 *out = (Int32 *)outBuffer;
+ if (info.inFormat == RTAUDIO_SINT8) {
+ signed char *in = (signed char *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
+ out[info.outOffset[j]] <<= 24;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT16) {
+ Int16 *in = (Int16 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
+ out[info.outOffset[j]] <<= 16;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT24) {
+ Int24 *in = (Int24 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) in[info.inOffset[j]].asInt();
+ out[info.outOffset[j]] <<= 8;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT32) {
+ // Channel compensation and/or (de)interleaving only.
+ Int32 *in = (Int32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT32) {
+ Float32 *in = (Float32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT64) {
+ Float64 *in = (Float64 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ }
+ else if (info.outFormat == RTAUDIO_SINT24) {
+ Int24 *out = (Int24 *)outBuffer;
+ if (info.inFormat == RTAUDIO_SINT8) {
+ signed char *in = (signed char *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] << 16);
+ //out[info.outOffset[j]] <<= 16;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT16) {
+ Int16 *in = (Int16 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] << 8);
+ //out[info.outOffset[j]] <<= 8;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT24) {
+ // Channel compensation and/or (de)interleaving only.
+ Int24 *in = (Int24 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT32) {
+ Int32 *in = (Int32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] >> 8);
+ //out[info.outOffset[j]] >>= 8;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT32) {
+ Float32 *in = (Float32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT64) {
+ Float64 *in = (Float64 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ }
+ else if (info.outFormat == RTAUDIO_SINT16) {
+ Int16 *out = (Int16 *)outBuffer;
+ if (info.inFormat == RTAUDIO_SINT8) {
+ signed char *in = (signed char *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
+ out[info.outOffset[j]] <<= 8;
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT16) {
+ // Channel compensation and/or (de)interleaving only.
+ Int16 *in = (Int16 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT24) {
+ Int24 *in = (Int24 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]].asInt() >> 8);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT32) {
+ Int32 *in = (Int32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT32) {
+ Float32 *in = (Float32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT64) {
+ Float64 *in = (Float64 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ }
+ else if (info.outFormat == RTAUDIO_SINT8) {
+ signed char *out = (signed char *)outBuffer;
+ if (info.inFormat == RTAUDIO_SINT8) {
+ // Channel compensation and/or (de)interleaving only.
+ signed char *in = (signed char *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = in[info.inOffset[j]];
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ if (info.inFormat == RTAUDIO_SINT16) {
+ Int16 *in = (Int16 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT24) {
+ Int24 *in = (Int24 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]].asInt() >> 16);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_SINT32) {
+ Int32 *in = (Int32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT32) {
+ Float32 *in = (Float32 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ else if (info.inFormat == RTAUDIO_FLOAT64) {
+ Float64 *in = (Float64 *)inBuffer;
+ for (unsigned int i=0; i<stream_.bufferSize; i++) {
+ for (j=0; j<info.channels; j++) {
+ out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
+ }
+ in += info.inJump;
+ out += info.outJump;
+ }
+ }
+ }
+}
+
+//static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }
+//static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }
+//static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }
+
+void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
+{
+ char val;
+ char *ptr;
+
+ ptr = buffer;
+ if ( format == RTAUDIO_SINT16 ) {
+ for ( unsigned int i=0; i<samples; i++ ) {
+ // Swap 1st and 2nd bytes.
+ val = *(ptr);
+ *(ptr) = *(ptr+1);
+ *(ptr+1) = val;
+
+ // Increment 2 bytes.
+ ptr += 2;
+ }
+ }
+ else if ( format == RTAUDIO_SINT32 ||
+ format == RTAUDIO_FLOAT32 ) {
+ for ( unsigned int i=0; i<samples; i++ ) {
+ // Swap 1st and 4th bytes.
+ val = *(ptr);
+ *(ptr) = *(ptr+3);
+ *(ptr+3) = val;
+
+ // Swap 2nd and 3rd bytes.
+ ptr += 1;
+ val = *(ptr);
+ *(ptr) = *(ptr+1);
+ *(ptr+1) = val;
+
+ // Increment 3 more bytes.
+ ptr += 3;
+ }
+ }
+ else if ( format == RTAUDIO_SINT24 ) {
+ for ( unsigned int i=0; i<samples; i++ ) {
+ // Swap 1st and 3rd bytes.
+ val = *(ptr);
+ *(ptr) = *(ptr+2);
+ *(ptr+2) = val;
+
+ // Increment 2 more bytes.
+ ptr += 2;
+ }
+ }
+ else if ( format == RTAUDIO_FLOAT64 ) {
+ for ( unsigned int i=0; i<samples; i++ ) {
+ // Swap 1st and 8th bytes
+ val = *(ptr);
+ *(ptr) = *(ptr+7);
+ *(ptr+7) = val;
+
+ // Swap 2nd and 7th bytes
+ ptr += 1;
+ val = *(ptr);
+ *(ptr) = *(ptr+5);
+ *(ptr+5) = val;
+
+ // Swap 3rd and 6th bytes
+ ptr += 1;
+ val = *(ptr);
+ *(ptr) = *(ptr+3);
+ *(ptr+3) = val;
+
+ // Swap 4th and 5th bytes
+ ptr += 1;
+ val = *(ptr);
+ *(ptr) = *(ptr+1);
+ *(ptr+1) = val;
+
+ // Increment 5 more bytes.
+ ptr += 5;
+ }
+ }
+}
+
+ // Indentation settings for Vim and Emacs
+ //
+ // Local Variables:
+ // c-basic-offset: 2
+ // indent-tabs-mode: nil
+ // End:
+ //
+ // vim: et sts=2 sw=2
+
--- /dev/null
+/************************************************************************/
+/*! \class RtAudio
+ \brief Realtime audio i/o C++ classes.
+
+ RtAudio provides a common API (Application Programming Interface)
+ for realtime audio input/output across Linux (native ALSA, Jack,
+ and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
+ (DirectSound, ASIO and WASAPI) operating systems.
+
+ RtAudio GitHub site: https://github.com/thestk/rtaudio
+ RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
+
+ RtAudio: realtime audio i/o C++ classes
+ Copyright (c) 2001-2019 Gary P. Scavone
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation files
+ (the "Software"), to deal in the Software without restriction,
+ including without limitation the rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of the Software,
+ and to permit persons to whom the Software is furnished to do so,
+ subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ Any person wishing to distribute modifications to the Software is
+ asked to send the modifications to the original developer so that
+ they can be incorporated into the canonical version. This is,
+ however, not a binding provision of this license.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+/************************************************************************/
+
+/*!
+ \file RtAudio.h
+ */
+
+#ifndef __RTAUDIO_H
+#define __RTAUDIO_H
+
+#define RTAUDIO_VERSION "5.1.0"
+
+#if defined _WIN32 || defined __CYGWIN__
+ #if defined(RTAUDIO_EXPORT)
+ #define RTAUDIO_DLL_PUBLIC __declspec(dllexport)
+ #else
+ #define RTAUDIO_DLL_PUBLIC
+ #endif
+#else
+ #if __GNUC__ >= 4
+ #define RTAUDIO_DLL_PUBLIC __attribute__( (visibility( "default" )) )
+ #else
+ #define RTAUDIO_DLL_PUBLIC
+ #endif
+#endif
+
+#include <string>
+#include <vector>
+#include <stdexcept>
+#include <iostream>
+
+/*! \typedef typedef unsigned long RtAudioFormat;
+ \brief RtAudio data format type.
+
+ Support for signed integers and floats. Audio data fed to/from an
+ RtAudio stream is assumed to ALWAYS be in host byte order. The
+ internal routines will automatically take care of any necessary
+ byte-swapping between the host format and the soundcard. Thus,
+ endian-ness is not a concern in the following format definitions.
+
+ - \e RTAUDIO_SINT8: 8-bit signed integer.
+ - \e RTAUDIO_SINT16: 16-bit signed integer.
+ - \e RTAUDIO_SINT24: 24-bit signed integer.
+ - \e RTAUDIO_SINT32: 32-bit signed integer.
+ - \e RTAUDIO_FLOAT32: Normalized between plus/minus 1.0.
+ - \e RTAUDIO_FLOAT64: Normalized between plus/minus 1.0.
+*/
+typedef unsigned long RtAudioFormat;
+static const RtAudioFormat RTAUDIO_SINT8 = 0x1; // 8-bit signed integer.
+static const RtAudioFormat RTAUDIO_SINT16 = 0x2; // 16-bit signed integer.
+static const RtAudioFormat RTAUDIO_SINT24 = 0x4; // 24-bit signed integer.
+static const RtAudioFormat RTAUDIO_SINT32 = 0x8; // 32-bit signed integer.
+static const RtAudioFormat RTAUDIO_FLOAT32 = 0x10; // Normalized between plus/minus 1.0.
+static const RtAudioFormat RTAUDIO_FLOAT64 = 0x20; // Normalized between plus/minus 1.0.
+
+/*! \typedef typedef unsigned long RtAudioStreamFlags;
+ \brief RtAudio stream option flags.
+
+ The following flags can be OR'ed together to allow a client to
+ make changes to the default stream behavior:
+
+ - \e RTAUDIO_NONINTERLEAVED: Use non-interleaved buffers (default = interleaved).
+ - \e RTAUDIO_MINIMIZE_LATENCY: Attempt to set stream parameters for lowest possible latency.
+ - \e RTAUDIO_HOG_DEVICE: Attempt grab device for exclusive use.
+ - \e RTAUDIO_ALSA_USE_DEFAULT: Use the "default" PCM device (ALSA only).
+ - \e RTAUDIO_JACK_DONT_CONNECT: Do not automatically connect ports (JACK only).
+
+ By default, RtAudio streams pass and receive audio data from the
+ client in an interleaved format. By passing the
+ RTAUDIO_NONINTERLEAVED flag to the openStream() function, audio
+ data will instead be presented in non-interleaved buffers. In
+ this case, each buffer argument in the RtAudioCallback function
+ will point to a single array of data, with \c nFrames samples for
+ each channel concatenated back-to-back. For example, the first
+ sample of data for the second channel would be located at index \c
+ nFrames (assuming the \c buffer pointer was recast to the correct
+ data type for the stream).
+
+ Certain audio APIs offer a number of parameters that influence the
+ I/O latency of a stream. By default, RtAudio will attempt to set
+ these parameters internally for robust (glitch-free) performance
+ (though some APIs, like Windows DirectSound, make this difficult).
+ By passing the RTAUDIO_MINIMIZE_LATENCY flag to the openStream()
+ function, internal stream settings will be influenced in an attempt
+ to minimize stream latency, though possibly at the expense of stream
+ performance.
+
+ If the RTAUDIO_HOG_DEVICE flag is set, RtAudio will attempt to
+ open the input and/or output stream device(s) for exclusive use.
+ Note that this is not possible with all supported audio APIs.
+
+ If the RTAUDIO_SCHEDULE_REALTIME flag is set, RtAudio will attempt
+ to select realtime scheduling (round-robin) for the callback thread.
+
+ If the RTAUDIO_ALSA_USE_DEFAULT flag is set, RtAudio will attempt to
+ open the "default" PCM device when using the ALSA API. Note that this
+ will override any specified input or output device id.
+
+ If the RTAUDIO_JACK_DONT_CONNECT flag is set, RtAudio will not attempt
+ to automatically connect the ports of the client to the audio device.
+*/
+typedef unsigned int RtAudioStreamFlags;
+static const RtAudioStreamFlags RTAUDIO_NONINTERLEAVED = 0x1; // Use non-interleaved buffers (default = interleaved).
+static const RtAudioStreamFlags RTAUDIO_MINIMIZE_LATENCY = 0x2; // Attempt to set stream parameters for lowest possible latency.
+static const RtAudioStreamFlags RTAUDIO_HOG_DEVICE = 0x4; // Attempt grab device and prevent use by others.
+static const RtAudioStreamFlags RTAUDIO_SCHEDULE_REALTIME = 0x8; // Try to select realtime scheduling for callback thread.
+static const RtAudioStreamFlags RTAUDIO_ALSA_USE_DEFAULT = 0x10; // Use the "default" PCM device (ALSA only).
+static const RtAudioStreamFlags RTAUDIO_JACK_DONT_CONNECT = 0x20; // Do not automatically connect ports (JACK only).
+
+/*! \typedef typedef unsigned long RtAudioStreamStatus;
+ \brief RtAudio stream status (over- or underflow) flags.
+
+ Notification of a stream over- or underflow is indicated by a
+ non-zero stream \c status argument in the RtAudioCallback function.
+ The stream status can be one of the following two options,
+ depending on whether the stream is open for output and/or input:
+
+ - \e RTAUDIO_INPUT_OVERFLOW: Input data was discarded because of an overflow condition at the driver.
+ - \e RTAUDIO_OUTPUT_UNDERFLOW: The output buffer ran low, likely producing a break in the output sound.
+*/
+typedef unsigned int RtAudioStreamStatus;
+static const RtAudioStreamStatus RTAUDIO_INPUT_OVERFLOW = 0x1; // Input data was discarded because of an overflow condition at the driver.
+static const RtAudioStreamStatus RTAUDIO_OUTPUT_UNDERFLOW = 0x2; // The output buffer ran low, likely causing a gap in the output sound.
+
+//! RtAudio callback function prototype.
+/*!
+ All RtAudio clients must create a function of type RtAudioCallback
+ to read and/or write data from/to the audio stream. When the
+ underlying audio system is ready for new input or output data, this
+ function will be invoked.
+
+ \param outputBuffer For output (or duplex) streams, the client
+ should write \c nFrames of audio sample frames into this
+ buffer. This argument should be recast to the datatype
+ specified when the stream was opened. For input-only
+ streams, this argument will be NULL.
+
+ \param inputBuffer For input (or duplex) streams, this buffer will
+ hold \c nFrames of input audio sample frames. This
+ argument should be recast to the datatype specified when the
+ stream was opened. For output-only streams, this argument
+ will be NULL.
+
+ \param nFrames The number of sample frames of input or output
+ data in the buffers. The actual buffer size in bytes is
+ dependent on the data type and number of channels in use.
+
+ \param streamTime The number of seconds that have elapsed since the
+ stream was started.
+
+ \param status If non-zero, this argument indicates a data overflow
+ or underflow condition for the stream. The particular
+ condition can be determined by comparison with the
+ RtAudioStreamStatus flags.
+
+ \param userData A pointer to optional data provided by the client
+ when opening the stream (default = NULL).
+
+ \return
+ To continue normal stream operation, the RtAudioCallback function
+ should return a value of zero. To stop the stream and drain the
+ output buffer, the function should return a value of one. To abort
+ the stream immediately, the client should return a value of two.
+ */
+typedef int (*RtAudioCallback)( void *outputBuffer, void *inputBuffer,
+ unsigned int nFrames,
+ double streamTime,
+ RtAudioStreamStatus status,
+ void *userData );
+
+/************************************************************************/
+/*! \class RtAudioError
+ \brief Exception handling class for RtAudio.
+
+ The RtAudioError class is quite simple but it does allow errors to be
+ "caught" by RtAudioError::Type. See the RtAudio documentation to know
+ which methods can throw an RtAudioError.
+*/
+/************************************************************************/
+
+class RTAUDIO_DLL_PUBLIC RtAudioError : public std::runtime_error
+{
+ public:
+ //! Defined RtAudioError types.
+ enum Type {
+ WARNING, /*!< A non-critical error. */
+ DEBUG_WARNING, /*!< A non-critical error which might be useful for debugging. */
+ UNSPECIFIED, /*!< The default, unspecified error type. */
+ NO_DEVICES_FOUND, /*!< No devices found on system. */
+ INVALID_DEVICE, /*!< An invalid device ID was specified. */
+ MEMORY_ERROR, /*!< An error occured during memory allocation. */
+ INVALID_PARAMETER, /*!< An invalid parameter was specified to a function. */
+ INVALID_USE, /*!< The function was called incorrectly. */
+ DRIVER_ERROR, /*!< A system driver error occured. */
+ SYSTEM_ERROR, /*!< A system error occured. */
+ THREAD_ERROR /*!< A thread error occured. */
+ };
+
+ //! The constructor.
+ RtAudioError( const std::string& message,
+ Type type = RtAudioError::UNSPECIFIED )
+ : std::runtime_error(message), type_(type) {}
+
+ //! Prints thrown error message to stderr.
+ virtual void printMessage( void ) const
+ { std::cerr << '\n' << what() << "\n\n"; }
+
+ //! Returns the thrown error message type.
+ virtual const Type& getType(void) const { return type_; }
+
+ //! Returns the thrown error message string.
+ virtual const std::string getMessage(void) const
+ { return std::string(what()); }
+
+ protected:
+ Type type_;
+};
+
+//! RtAudio error callback function prototype.
+/*!
+ \param type Type of error.
+ \param errorText Error description.
+ */
+typedef void (*RtAudioErrorCallback)( RtAudioError::Type type, const std::string &errorText );
+
+// **************************************************************** //
+//
+// RtAudio class declaration.
+//
+// RtAudio is a "controller" used to select an available audio i/o
+// interface. It presents a common API for the user to call but all
+// functionality is implemented by the class RtApi and its
+// subclasses. RtAudio creates an instance of an RtApi subclass
+// based on the user's API choice. If no choice is made, RtAudio
+// attempts to make a "logical" API selection.
+//
+// **************************************************************** //
+
+class RtApi;
+
+class RTAUDIO_DLL_PUBLIC RtAudio
+{
+ public:
+
+ //! Audio API specifier arguments.
+ enum Api {
+ UNSPECIFIED, /*!< Search for a working compiled API. */
+ LINUX_ALSA, /*!< The Advanced Linux Sound Architecture API. */
+ LINUX_PULSE, /*!< The Linux PulseAudio API. */
+ LINUX_OSS, /*!< The Linux Open Sound System API. */
+ UNIX_JACK, /*!< The Jack Low-Latency Audio Server API. */
+ MACOSX_CORE, /*!< Macintosh OS-X Core Audio API. */
+ WINDOWS_WASAPI, /*!< The Microsoft WASAPI API. */
+ WINDOWS_ASIO, /*!< The Steinberg Audio Stream I/O API. */
+ WINDOWS_DS, /*!< The Microsoft DirectSound API. */
+ RTAUDIO_DUMMY, /*!< A compilable but non-functional API. */
+ NUM_APIS /*!< Number of values in this enum. */
+ };
+
+ //! The public device information structure for returning queried values.
+ struct DeviceInfo {
+ bool probed; /*!< true if the device capabilities were successfully probed. */
+ std::string name; /*!< Character string device identifier. */
+ unsigned int outputChannels; /*!< Maximum output channels supported by device. */
+ unsigned int inputChannels; /*!< Maximum input channels supported by device. */
+ unsigned int duplexChannels; /*!< Maximum simultaneous input/output channels supported by device. */
+ bool isDefaultOutput; /*!< true if this is the default output device. */
+ bool isDefaultInput; /*!< true if this is the default input device. */
+ std::vector<unsigned int> sampleRates; /*!< Supported sample rates (queried from list of standard rates). */
+ unsigned int preferredSampleRate; /*!< Preferred sample rate, e.g. for WASAPI the system sample rate. */
+ RtAudioFormat nativeFormats; /*!< Bit mask of supported data formats. */
+
+ // Default constructor.
+ DeviceInfo()
+ :probed(false), outputChannels(0), inputChannels(0), duplexChannels(0),
+ isDefaultOutput(false), isDefaultInput(false), preferredSampleRate(0), nativeFormats(0) {}
+ };
+
+ //! The structure for specifying input or ouput stream parameters.
+ struct StreamParameters {
+ unsigned int deviceId; /*!< Device index (0 to getDeviceCount() - 1). */
+ unsigned int nChannels; /*!< Number of channels. */
+ unsigned int firstChannel; /*!< First channel index on device (default = 0). */
+
+ // Default constructor.
+ StreamParameters()
+ : deviceId(0), nChannels(0), firstChannel(0) {}
+ };
+
+ //! The structure for specifying stream options.
+ /*!
+ The following flags can be OR'ed together to allow a client to
+ make changes to the default stream behavior:
+
+ - \e RTAUDIO_NONINTERLEAVED: Use non-interleaved buffers (default = interleaved).
+ - \e RTAUDIO_MINIMIZE_LATENCY: Attempt to set stream parameters for lowest possible latency.
+ - \e RTAUDIO_HOG_DEVICE: Attempt grab device for exclusive use.
+ - \e RTAUDIO_SCHEDULE_REALTIME: Attempt to select realtime scheduling for callback thread.
+ - \e RTAUDIO_ALSA_USE_DEFAULT: Use the "default" PCM device (ALSA only).
+
+ By default, RtAudio streams pass and receive audio data from the
+ client in an interleaved format. By passing the
+ RTAUDIO_NONINTERLEAVED flag to the openStream() function, audio
+ data will instead be presented in non-interleaved buffers. In
+ this case, each buffer argument in the RtAudioCallback function
+ will point to a single array of data, with \c nFrames samples for
+ each channel concatenated back-to-back. For example, the first
+ sample of data for the second channel would be located at index \c
+ nFrames (assuming the \c buffer pointer was recast to the correct
+ data type for the stream).
+
+ Certain audio APIs offer a number of parameters that influence the
+ I/O latency of a stream. By default, RtAudio will attempt to set
+ these parameters internally for robust (glitch-free) performance
+ (though some APIs, like Windows DirectSound, make this difficult).
+ By passing the RTAUDIO_MINIMIZE_LATENCY flag to the openStream()
+ function, internal stream settings will be influenced in an attempt
+ to minimize stream latency, though possibly at the expense of stream
+ performance.
+
+ If the RTAUDIO_HOG_DEVICE flag is set, RtAudio will attempt to
+ open the input and/or output stream device(s) for exclusive use.
+ Note that this is not possible with all supported audio APIs.
+
+ If the RTAUDIO_SCHEDULE_REALTIME flag is set, RtAudio will attempt
+ to select realtime scheduling (round-robin) for the callback thread.
+ The \c priority parameter will only be used if the RTAUDIO_SCHEDULE_REALTIME
+ flag is set. It defines the thread's realtime priority.
+
+ If the RTAUDIO_ALSA_USE_DEFAULT flag is set, RtAudio will attempt to
+ open the "default" PCM device when using the ALSA API. Note that this
+ will override any specified input or output device id.
+
+ The \c numberOfBuffers parameter can be used to control stream
+ latency in the Windows DirectSound, Linux OSS, and Linux Alsa APIs
+ only. A value of two is usually the smallest allowed. Larger
+ numbers can potentially result in more robust stream performance,
+ though likely at the cost of stream latency. The value set by the
+ user is replaced during execution of the RtAudio::openStream()
+ function by the value actually used by the system.
+
+ The \c streamName parameter can be used to set the client name
+ when using the Jack API. By default, the client name is set to
+ RtApiJack. However, if you wish to create multiple instances of
+ RtAudio with Jack, each instance must have a unique client name.
+ */
+ struct StreamOptions {
+ RtAudioStreamFlags flags; /*!< A bit-mask of stream flags (RTAUDIO_NONINTERLEAVED, RTAUDIO_MINIMIZE_LATENCY, RTAUDIO_HOG_DEVICE, RTAUDIO_ALSA_USE_DEFAULT). */
+ unsigned int numberOfBuffers; /*!< Number of stream buffers. */
+ std::string streamName; /*!< A stream name (currently used only in Jack). */
+ int priority; /*!< Scheduling priority of callback thread (only used with flag RTAUDIO_SCHEDULE_REALTIME). */
+
+ // Default constructor.
+ StreamOptions()
+ : flags(0), numberOfBuffers(0), priority(0) {}
+ };
+
+ //! A static function to determine the current RtAudio version.
+ static std::string getVersion( void );
+
+ //! A static function to determine the available compiled audio APIs.
+ /*!
+ The values returned in the std::vector can be compared against
+ the enumerated list values. Note that there can be more than one
+ API compiled for certain operating systems.
+ */
+ static void getCompiledApi( std::vector<RtAudio::Api> &apis );
+
+ //! Return the name of a specified compiled audio API.
+ /*!
+ This obtains a short lower-case name used for identification purposes.
+ This value is guaranteed to remain identical across library versions.
+ If the API is unknown, this function will return the empty string.
+ */
+ static std::string getApiName( RtAudio::Api api );
+
+ //! Return the display name of a specified compiled audio API.
+ /*!
+ This obtains a long name used for display purposes.
+ If the API is unknown, this function will return the empty string.
+ */
+ static std::string getApiDisplayName( RtAudio::Api api );
+
+ //! Return the compiled audio API having the given name.
+ /*!
+ A case insensitive comparison will check the specified name
+ against the list of compiled APIs, and return the one which
+ matches. On failure, the function returns UNSPECIFIED.
+ */
+ static RtAudio::Api getCompiledApiByName( const std::string &name );
+
+ //! The class constructor.
+ /*!
+ The constructor performs minor initialization tasks. An exception
+ can be thrown if no API support is compiled.
+
+ If no API argument is specified and multiple API support has been
+ compiled, the default order of use is JACK, ALSA, OSS (Linux
+ systems) and ASIO, DS (Windows systems).
+ */
+ RtAudio( RtAudio::Api api=UNSPECIFIED );
+
+ //! The destructor.
+ /*!
+ If a stream is running or open, it will be stopped and closed
+ automatically.
+ */
+ ~RtAudio();
+
+ //! Returns the audio API specifier for the current instance of RtAudio.
+ RtAudio::Api getCurrentApi( void );
+
+ //! A public function that queries for the number of audio devices available.
+ /*!
+ This function performs a system query of available devices each time it
+ is called, thus supporting devices connected \e after instantiation. If
+ a system error occurs during processing, a warning will be issued.
+ */
+ unsigned int getDeviceCount( void );
+
+ //! Return an RtAudio::DeviceInfo structure for a specified device number.
+ /*!
+
+ Any device integer between 0 and getDeviceCount() - 1 is valid.
+ If an invalid argument is provided, an RtAudioError (type = INVALID_USE)
+ will be thrown. If a device is busy or otherwise unavailable, the
+ structure member "probed" will have a value of "false" and all
+ other members are undefined. If the specified device is the
+ current default input or output device, the corresponding
+ "isDefault" member will have a value of "true".
+ */
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+
+ //! A function that returns the index of the default output device.
+ /*!
+ If the underlying audio API does not provide a "default
+ device", or if no devices are available, the return value will be
+ 0. Note that this is a valid device identifier and it is the
+ client's responsibility to verify that a device is available
+ before attempting to open a stream.
+ */
+ unsigned int getDefaultOutputDevice( void );
+
+ //! A function that returns the index of the default input device.
+ /*!
+ If the underlying audio API does not provide a "default
+ device", or if no devices are available, the return value will be
+ 0. Note that this is a valid device identifier and it is the
+ client's responsibility to verify that a device is available
+ before attempting to open a stream.
+ */
+ unsigned int getDefaultInputDevice( void );
+
+ //! A public function for opening a stream with the specified parameters.
+ /*!
+ An RtAudioError (type = SYSTEM_ERROR) is thrown if a stream cannot be
+ opened with the specified parameters or an error occurs during
+ processing. An RtAudioError (type = INVALID_USE) is thrown if any
+ invalid device ID or channel number parameters are specified.
+
+ \param outputParameters Specifies output stream parameters to use
+ when opening a stream, including a device ID, number of channels,
+ and starting channel number. For input-only streams, this
+ argument should be NULL. The device ID is an index value between
+ 0 and getDeviceCount() - 1.
+ \param inputParameters Specifies input stream parameters to use
+ when opening a stream, including a device ID, number of channels,
+ and starting channel number. For output-only streams, this
+ argument should be NULL. The device ID is an index value between
+ 0 and getDeviceCount() - 1.
+ \param format An RtAudioFormat specifying the desired sample data format.
+ \param sampleRate The desired sample rate (sample frames per second).
+ \param *bufferFrames A pointer to a value indicating the desired
+ internal buffer size in sample frames. The actual value
+ used by the device is returned via the same pointer. A
+ value of zero can be specified, in which case the lowest
+ allowable value is determined.
+ \param callback A client-defined function that will be invoked
+ when input data is available and/or output data is needed.
+ \param userData An optional pointer to data that can be accessed
+ from within the callback function.
+ \param options An optional pointer to a structure containing various
+ global stream options, including a list of OR'ed RtAudioStreamFlags
+ and a suggested number of stream buffers that can be used to
+ control stream latency. More buffers typically result in more
+ robust performance, though at a cost of greater latency. If a
+ value of zero is specified, a system-specific median value is
+ chosen. If the RTAUDIO_MINIMIZE_LATENCY flag bit is set, the
+ lowest allowable value is used. The actual value used is
+ returned via the structure argument. The parameter is API dependent.
+ \param errorCallback A client-defined function that will be invoked
+ when an error has occured.
+ */
+ void openStream( RtAudio::StreamParameters *outputParameters,
+ RtAudio::StreamParameters *inputParameters,
+ RtAudioFormat format, unsigned int sampleRate,
+ unsigned int *bufferFrames, RtAudioCallback callback,
+ void *userData = NULL, RtAudio::StreamOptions *options = NULL, RtAudioErrorCallback errorCallback = NULL );
+
+ //! A function that closes a stream and frees any associated stream memory.
+ /*!
+ If a stream is not open, this function issues a warning and
+ returns (no exception is thrown).
+ */
+ void closeStream( void );
+
+ //! A function that starts a stream.
+ /*!
+ An RtAudioError (type = SYSTEM_ERROR) is thrown if an error occurs
+ during processing. An RtAudioError (type = INVALID_USE) is thrown if a
+ stream is not open. A warning is issued if the stream is already
+ running.
+ */
+ void startStream( void );
+
+ //! Stop a stream, allowing any samples remaining in the output queue to be played.
+ /*!
+ An RtAudioError (type = SYSTEM_ERROR) is thrown if an error occurs
+ during processing. An RtAudioError (type = INVALID_USE) is thrown if a
+ stream is not open. A warning is issued if the stream is already
+ stopped.
+ */
+ void stopStream( void );
+
+ //! Stop a stream, discarding any samples remaining in the input/output queue.
+ /*!
+ An RtAudioError (type = SYSTEM_ERROR) is thrown if an error occurs
+ during processing. An RtAudioError (type = INVALID_USE) is thrown if a
+ stream is not open. A warning is issued if the stream is already
+ stopped.
+ */
+ void abortStream( void );
+
+ //! Returns true if a stream is open and false if not.
+ bool isStreamOpen( void ) const;
+
+ //! Returns true if the stream is running and false if it is stopped or not open.
+ bool isStreamRunning( void ) const;
+
+ //! Returns the number of elapsed seconds since the stream was started.
+ /*!
+ If a stream is not open, an RtAudioError (type = INVALID_USE) will be thrown.
+ */
+ double getStreamTime( void );
+
+ //! Set the stream time to a time in seconds greater than or equal to 0.0.
+ /*!
+ If a stream is not open, an RtAudioError (type = INVALID_USE) will be thrown.
+ */
+ void setStreamTime( double time );
+
+ //! Returns the internal stream latency in sample frames.
+ /*!
+ The stream latency refers to delay in audio input and/or output
+ caused by internal buffering by the audio system and/or hardware.
+ For duplex streams, the returned value will represent the sum of
+ the input and output latencies. If a stream is not open, an
+ RtAudioError (type = INVALID_USE) will be thrown. If the API does not
+ report latency, the return value will be zero.
+ */
+ long getStreamLatency( void );
+
+ //! Returns actual sample rate in use by the stream.
+ /*!
+ On some systems, the sample rate used may be slightly different
+ than that specified in the stream parameters. If a stream is not
+ open, an RtAudioError (type = INVALID_USE) will be thrown.
+ */
+ unsigned int getStreamSampleRate( void );
+
+ //! Specify whether warning messages should be printed to stderr.
+ void showWarnings( bool value = true );
+
+#if defined(__UNIX_JACK__)
+ void* HACK__getJackClient();
+#endif
+
+ protected:
+
+ void openRtApi( RtAudio::Api api );
+ RtApi *rtapi_;
+};
+
+// Operating system dependent thread functionality.
+#if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) || defined(__WINDOWS_WASAPI__)
+
+ #ifndef NOMINMAX
+ #define NOMINMAX
+ #endif
+ #include <windows.h>
+ #include <process.h>
+ #include <stdint.h>
+
+ typedef uintptr_t ThreadHandle;
+ typedef CRITICAL_SECTION StreamMutex;
+
+#elif defined(__LINUX_ALSA__) || defined(__LINUX_PULSE__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
+ // Using pthread library for various flavors of unix.
+ #include <pthread.h>
+
+ typedef pthread_t ThreadHandle;
+ typedef pthread_mutex_t StreamMutex;
+
+#else // Setup for "dummy" behavior
+
+ #define __RTAUDIO_DUMMY__
+ typedef int ThreadHandle;
+ typedef int StreamMutex;
+
+#endif
+
+// This global structure type is used to pass callback information
+// between the private RtAudio stream structure and global callback
+// handling functions.
+struct CallbackInfo {
+ void *object; // Used as a "this" pointer.
+ ThreadHandle thread;
+ void *callback;
+ void *userData;
+ void *errorCallback;
+ void *apiInfo; // void pointer for API specific callback information
+ bool isRunning;
+ bool doRealtime;
+ int priority;
+
+ // Default constructor.
+ CallbackInfo()
+ :object(0), callback(0), userData(0), errorCallback(0), apiInfo(0), isRunning(false), doRealtime(false), priority(0) {}
+};
+
+// **************************************************************** //
+//
+// RtApi class declaration.
+//
+// Subclasses of RtApi contain all API- and OS-specific code necessary
+// to fully implement the RtAudio API.
+//
+// Note that RtApi is an abstract base class and cannot be
+// explicitly instantiated. The class RtAudio will create an
+// instance of an RtApi subclass (RtApiOss, RtApiAlsa,
+// RtApiJack, RtApiCore, RtApiDs, or RtApiAsio).
+//
+// **************************************************************** //
+
+#pragma pack(push, 1)
+class S24 {
+
+ protected:
+ unsigned char c3[3];
+
+ public:
+ S24() {}
+
+ S24& operator = ( const int& i ) {
+ c3[0] = (i & 0x000000ff);
+ c3[1] = (i & 0x0000ff00) >> 8;
+ c3[2] = (i & 0x00ff0000) >> 16;
+ return *this;
+ }
+
+ S24( const double& d ) { *this = (int) d; }
+ S24( const float& f ) { *this = (int) f; }
+ S24( const signed short& s ) { *this = (int) s; }
+ S24( const char& c ) { *this = (int) c; }
+
+ int asInt() {
+ int i = c3[0] | (c3[1] << 8) | (c3[2] << 16);
+ if (i & 0x800000) i |= ~0xffffff;
+ return i;
+ }
+};
+#pragma pack(pop)
+
+#if defined( HAVE_GETTIMEOFDAY )
+ #include <sys/time.h>
+#endif
+
+#include <sstream>
+
+class RTAUDIO_DLL_PUBLIC RtApi
+{
+friend RtAudio; // HACK
+
+public:
+
+ RtApi();
+ virtual ~RtApi();
+ virtual RtAudio::Api getCurrentApi( void ) = 0;
+ virtual unsigned int getDeviceCount( void ) = 0;
+ virtual RtAudio::DeviceInfo getDeviceInfo( unsigned int device ) = 0;
+ virtual unsigned int getDefaultInputDevice( void );
+ virtual unsigned int getDefaultOutputDevice( void );
+ void openStream( RtAudio::StreamParameters *outputParameters,
+ RtAudio::StreamParameters *inputParameters,
+ RtAudioFormat format, unsigned int sampleRate,
+ unsigned int *bufferFrames, RtAudioCallback callback,
+ void *userData, RtAudio::StreamOptions *options,
+ RtAudioErrorCallback errorCallback );
+ virtual void closeStream( void );
+ virtual void startStream( void ) = 0;
+ virtual void stopStream( void ) = 0;
+ virtual void abortStream( void ) = 0;
+ long getStreamLatency( void );
+ unsigned int getStreamSampleRate( void );
+ virtual double getStreamTime( void );
+ virtual void setStreamTime( double time );
+ bool isStreamOpen( void ) const { return stream_.state != STREAM_CLOSED; }
+ bool isStreamRunning( void ) const { return stream_.state == STREAM_RUNNING; }
+ void showWarnings( bool value ) { showWarnings_ = value; }
+
+
+protected:
+
+ static const unsigned int MAX_SAMPLE_RATES;
+ static const unsigned int SAMPLE_RATES[];
+
+ enum { FAILURE, SUCCESS };
+
+ enum StreamState {
+ STREAM_STOPPED,
+ STREAM_STOPPING,
+ STREAM_RUNNING,
+ STREAM_CLOSED = -50
+ };
+
+ enum StreamMode {
+ OUTPUT,
+ INPUT,
+ DUPLEX,
+ UNINITIALIZED = -75
+ };
+
+ // A protected structure used for buffer conversion.
+ struct ConvertInfo {
+ int channels;
+ int inJump, outJump;
+ RtAudioFormat inFormat, outFormat;
+ std::vector<int> inOffset;
+ std::vector<int> outOffset;
+ };
+
+ // A protected structure for audio streams.
+ struct RtApiStream {
+ unsigned int device[2]; // Playback and record, respectively.
+ void *apiHandle; // void pointer for API specific stream handle information
+ StreamMode mode; // OUTPUT, INPUT, or DUPLEX.
+ StreamState state; // STOPPED, RUNNING, or CLOSED
+ char *userBuffer[2]; // Playback and record, respectively.
+ char *deviceBuffer;
+ bool doConvertBuffer[2]; // Playback and record, respectively.
+ bool userInterleaved;
+ bool deviceInterleaved[2]; // Playback and record, respectively.
+ bool doByteSwap[2]; // Playback and record, respectively.
+ unsigned int sampleRate;
+ unsigned int bufferSize;
+ unsigned int nBuffers;
+ unsigned int nUserChannels[2]; // Playback and record, respectively.
+ unsigned int nDeviceChannels[2]; // Playback and record channels, respectively.
+ unsigned int channelOffset[2]; // Playback and record, respectively.
+ unsigned long latency[2]; // Playback and record, respectively.
+ RtAudioFormat userFormat;
+ RtAudioFormat deviceFormat[2]; // Playback and record, respectively.
+ StreamMutex mutex;
+ CallbackInfo callbackInfo;
+ ConvertInfo convertInfo[2];
+ double streamTime; // Number of elapsed seconds since the stream started.
+
+#if defined(HAVE_GETTIMEOFDAY)
+ struct timeval lastTickTimestamp;
+#endif
+
+ RtApiStream()
+ :apiHandle(0), deviceBuffer(0) { device[0] = 11111; device[1] = 11111; }
+ };
+
+ typedef S24 Int24;
+ typedef signed short Int16;
+ typedef signed int Int32;
+ typedef float Float32;
+ typedef double Float64;
+
+ std::ostringstream errorStream_;
+ std::string errorText_;
+ bool showWarnings_;
+ RtApiStream stream_;
+ bool firstErrorOccurred_;
+
+ /*!
+ Protected, api-specific method that attempts to open a device
+ with the given parameters. This function MUST be implemented by
+ all subclasses. If an error is encountered during the probe, a
+ "warning" message is reported and FAILURE is returned. A
+ successful probe is indicated by a return value of SUCCESS.
+ */
+ virtual bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+
+ //! A protected function used to increment the stream time.
+ void tickStreamTime( void );
+
+ //! Protected common method to clear an RtApiStream structure.
+ void clearStreamInfo();
+
+ /*!
+ Protected common method that throws an RtAudioError (type =
+ INVALID_USE) if a stream is not open.
+ */
+ void verifyStream( void );
+
+ //! Protected common error method to allow global control over error handling.
+ void error( RtAudioError::Type type );
+
+ /*!
+ Protected method used to perform format, channel number, and/or interleaving
+ conversions between the user and device buffers.
+ */
+ void convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info );
+
+ //! Protected common method used to perform byte-swapping on buffers.
+ void byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format );
+
+ //! Protected common method that returns the number of bytes for a given format.
+ unsigned int formatBytes( RtAudioFormat format );
+
+ //! Protected common method that sets up the parameters for buffer conversion.
+ void setConvertInfo( StreamMode mode, unsigned int firstChannel );
+};
+
+// **************************************************************** //
+//
+// Inline RtAudio definitions.
+//
+// **************************************************************** //
+
+inline RtAudio::Api RtAudio :: getCurrentApi( void ) { return rtapi_->getCurrentApi(); }
+inline unsigned int RtAudio :: getDeviceCount( void ) { return rtapi_->getDeviceCount(); }
+inline RtAudio::DeviceInfo RtAudio :: getDeviceInfo( unsigned int device ) { return rtapi_->getDeviceInfo( device ); }
+inline unsigned int RtAudio :: getDefaultInputDevice( void ) { return rtapi_->getDefaultInputDevice(); }
+inline unsigned int RtAudio :: getDefaultOutputDevice( void ) { return rtapi_->getDefaultOutputDevice(); }
+inline void RtAudio :: closeStream( void ) { return rtapi_->closeStream(); }
+inline void RtAudio :: startStream( void ) { return rtapi_->startStream(); }
+inline void RtAudio :: stopStream( void ) { return rtapi_->stopStream(); }
+inline void RtAudio :: abortStream( void ) { return rtapi_->abortStream(); }
+inline bool RtAudio :: isStreamOpen( void ) const { return rtapi_->isStreamOpen(); }
+inline bool RtAudio :: isStreamRunning( void ) const { return rtapi_->isStreamRunning(); }
+inline long RtAudio :: getStreamLatency( void ) { return rtapi_->getStreamLatency(); }
+inline unsigned int RtAudio :: getStreamSampleRate( void ) { return rtapi_->getStreamSampleRate(); }
+inline double RtAudio :: getStreamTime( void ) { return rtapi_->getStreamTime(); }
+inline void RtAudio :: setStreamTime( double time ) { return rtapi_->setStreamTime( time ); }
+inline void RtAudio :: showWarnings( bool value ) { rtapi_->showWarnings( value ); }
+
+// RtApi Subclass prototypes.
+
+#if defined(__MACOSX_CORE__)
+
+#include <CoreAudio/AudioHardware.h>
+
+class RtApiCore: public RtApi
+{
+public:
+
+ RtApiCore();
+ ~RtApiCore();
+ RtAudio::Api getCurrentApi( void ) { return RtAudio::MACOSX_CORE; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ unsigned int getDefaultOutputDevice( void );
+ unsigned int getDefaultInputDevice( void );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ bool callbackEvent( AudioDeviceID deviceId,
+ const AudioBufferList *inBufferList,
+ const AudioBufferList *outBufferList );
+
+ private:
+
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+ static const char* getErrorCode( OSStatus code );
+};
+
+#endif
+
+#if defined(__UNIX_JACK__)
+
+class RtApiJack: public RtApi
+{
+public:
+
+ RtApiJack();
+ ~RtApiJack();
+ RtAudio::Api getCurrentApi( void ) { return RtAudio::UNIX_JACK; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ bool callbackEvent( unsigned long nframes );
+
+ private:
+
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+
+ bool shouldAutoconnect_;
+};
+
+#endif
+
+#if defined(__WINDOWS_ASIO__)
+
+class RtApiAsio: public RtApi
+{
+public:
+
+ RtApiAsio();
+ ~RtApiAsio();
+ RtAudio::Api getCurrentApi( void ) { return RtAudio::WINDOWS_ASIO; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ bool callbackEvent( long bufferIndex );
+
+ private:
+
+ std::vector<RtAudio::DeviceInfo> devices_;
+ void saveDeviceInfo( void );
+ bool coInitialized_;
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+};
+
+#endif
+
+#if defined(__WINDOWS_DS__)
+
+class RtApiDs: public RtApi
+{
+public:
+
+ RtApiDs();
+ ~RtApiDs();
+ RtAudio::Api getCurrentApi( void ) { return RtAudio::WINDOWS_DS; }
+ unsigned int getDeviceCount( void );
+ unsigned int getDefaultOutputDevice( void );
+ unsigned int getDefaultInputDevice( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ void callbackEvent( void );
+
+ private:
+
+ bool coInitialized_;
+ bool buffersRolling;
+ long duplexPrerollBytes;
+ std::vector<struct DsDevice> dsDevices;
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+};
+
+#endif
+
+#if defined(__WINDOWS_WASAPI__)
+
+struct IMMDeviceEnumerator;
+
+class RtApiWasapi : public RtApi
+{
+public:
+ RtApiWasapi();
+ virtual ~RtApiWasapi();
+
+ RtAudio::Api getCurrentApi( void ) { return RtAudio::WINDOWS_WASAPI; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ unsigned int getDefaultOutputDevice( void );
+ unsigned int getDefaultInputDevice( void );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+private:
+ bool coInitialized_;
+ IMMDeviceEnumerator* deviceEnumerator_;
+
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int* bufferSize,
+ RtAudio::StreamOptions* options );
+
+ static DWORD WINAPI runWasapiThread( void* wasapiPtr );
+ static DWORD WINAPI stopWasapiThread( void* wasapiPtr );
+ static DWORD WINAPI abortWasapiThread( void* wasapiPtr );
+ void wasapiThread();
+};
+
+#endif
+
+#if defined(__LINUX_ALSA__)
+
+class RtApiAlsa: public RtApi
+{
+public:
+
+ RtApiAlsa();
+ ~RtApiAlsa();
+ RtAudio::Api getCurrentApi() { return RtAudio::LINUX_ALSA; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ void callbackEvent( void );
+
+ private:
+
+ std::vector<RtAudio::DeviceInfo> devices_;
+ void saveDeviceInfo( void );
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+};
+
+#endif
+
+#if defined(__LINUX_PULSE__)
+
+class RtApiPulse: public RtApi
+{
+public:
+ ~RtApiPulse();
+ RtAudio::Api getCurrentApi() { return RtAudio::LINUX_PULSE; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ void callbackEvent( void );
+
+ private:
+
+ std::vector<RtAudio::DeviceInfo> devices_;
+ void saveDeviceInfo( void );
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+};
+
+#endif
+
+#if defined(__LINUX_OSS__)
+
+class RtApiOss: public RtApi
+{
+public:
+
+ RtApiOss();
+ ~RtApiOss();
+ RtAudio::Api getCurrentApi() { return RtAudio::LINUX_OSS; }
+ unsigned int getDeviceCount( void );
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int device );
+ void closeStream( void );
+ void startStream( void );
+ void stopStream( void );
+ void abortStream( void );
+
+ // This function is intended for internal use only. It must be
+ // public because it is called by the internal callback handler,
+ // which is not a member of RtAudio. External use of this function
+ // will most likely produce highly undesireable results!
+ void callbackEvent( void );
+
+ private:
+
+ bool probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
+ unsigned int firstChannel, unsigned int sampleRate,
+ RtAudioFormat format, unsigned int *bufferSize,
+ RtAudio::StreamOptions *options );
+};
+
+#endif
+
+#if defined(__RTAUDIO_DUMMY__)
+
+class RtApiDummy: public RtApi
+{
+public:
+
+ RtApiDummy() { errorText_ = "RtApiDummy: This class provides no functionality."; error( RtAudioError::WARNING ); }
+ RtAudio::Api getCurrentApi( void ) { return RtAudio::RTAUDIO_DUMMY; }
+ unsigned int getDeviceCount( void ) { return 0; }
+ RtAudio::DeviceInfo getDeviceInfo( unsigned int /*device*/ ) { RtAudio::DeviceInfo info; return info; }
+ void closeStream( void ) {}
+ void startStream( void ) {}
+ void stopStream( void ) {}
+ void abortStream( void ) {}
+
+ private:
+
+ bool probeDeviceOpen( unsigned int /*device*/, StreamMode /*mode*/, unsigned int /*channels*/,
+ unsigned int /*firstChannel*/, unsigned int /*sampleRate*/,
+ RtAudioFormat /*format*/, unsigned int * /*bufferSize*/,
+ RtAudio::StreamOptions * /*options*/ ) { return false; }
+};
+
+#endif
+
+#endif
+
+// Indentation settings for Vim and Emacs
+//
+// Local Variables:
+// c-basic-offset: 2
+// indent-tabs-mode: nil
+// End:
+//
+// vim: et sts=2 sw=2
--- /dev/null
+/*\r
+ Steinberg Audio Stream I/O API\r
+ (c) 1996, Steinberg Soft- und Hardware GmbH\r
+\r
+ asio.cpp\r
+ \r
+ asio functions entries which translate the\r
+ asio interface to the asiodrvr class methods\r
+*/ \r
+ \r
+#include <string.h>\r
+#include "asiosys.h" // platform definition\r
+#include "asio.h"\r
+\r
+#if MAC\r
+#include "asiodrvr.h"\r
+\r
+#pragma export on\r
+\r
+AsioDriver *theAsioDriver = 0;\r
+\r
+extern "C"\r
+{\r
+\r
+long main()\r
+{\r
+ return 'ASIO';\r
+}\r
+\r
+#elif WINDOWS\r
+\r
+#include "windows.h"\r
+#include "iasiodrv.h"\r
+#include "asiodrivers.h"\r
+\r
+IASIO *theAsioDriver = 0;\r
+extern AsioDrivers *asioDrivers;\r
+\r
+#elif SGI || SUN || BEOS || LINUX\r
+#include "asiodrvr.h"\r
+static AsioDriver *theAsioDriver = 0;\r
+#endif\r
+\r
+//-----------------------------------------------------------------------------------------------------\r
+ASIOError ASIOInit(ASIODriverInfo *info)\r
+{\r
+#if MAC || SGI || SUN || BEOS || LINUX\r
+ if(theAsioDriver)\r
+ {\r
+ delete theAsioDriver;\r
+ theAsioDriver = 0;\r
+ } \r
+ info->driverVersion = 0;\r
+ strcpy(info->name, "No ASIO Driver");\r
+ theAsioDriver = getDriver();\r
+ if(!theAsioDriver)\r
+ {\r
+ strcpy(info->errorMessage, "Not enough memory for the ASIO driver!"); \r
+ return ASE_NotPresent;\r
+ }\r
+ if(!theAsioDriver->init(info->sysRef))\r
+ {\r
+ theAsioDriver->getErrorMessage(info->errorMessage);\r
+ delete theAsioDriver;\r
+ theAsioDriver = 0;\r
+ return ASE_NotPresent;\r
+ }\r
+ strcpy(info->errorMessage, "No ASIO Driver Error");\r
+ theAsioDriver->getDriverName(info->name);\r
+ info->driverVersion = theAsioDriver->getDriverVersion();\r
+ return ASE_OK;\r
+\r
+#else\r
+\r
+ info->driverVersion = 0;\r
+ strcpy(info->name, "No ASIO Driver");\r
+ if(theAsioDriver) // must be loaded!\r
+ {\r
+ if(!theAsioDriver->init(info->sysRef))\r
+ {\r
+ theAsioDriver->getErrorMessage(info->errorMessage);\r
+ theAsioDriver = 0;\r
+ return ASE_NotPresent;\r
+ } \r
+\r
+ strcpy(info->errorMessage, "No ASIO Driver Error");\r
+ theAsioDriver->getDriverName(info->name);\r
+ info->driverVersion = theAsioDriver->getDriverVersion();\r
+ return ASE_OK;\r
+ }\r
+ return ASE_NotPresent;\r
+\r
+#endif // !MAC\r
+}\r
+\r
+ASIOError ASIOExit(void)\r
+{\r
+ if(theAsioDriver)\r
+ {\r
+#if WINDOWS\r
+ asioDrivers->removeCurrentDriver();\r
+#else\r
+ delete theAsioDriver;\r
+#endif\r
+ } \r
+ theAsioDriver = 0;\r
+ return ASE_OK;\r
+}\r
+\r
+ASIOError ASIOStart(void)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->start();\r
+}\r
+\r
+ASIOError ASIOStop(void)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->stop();\r
+}\r
+\r
+ASIOError ASIOGetChannels(long *numInputChannels, long *numOutputChannels)\r
+{\r
+ if(!theAsioDriver)\r
+ {\r
+ *numInputChannels = *numOutputChannels = 0;\r
+ return ASE_NotPresent;\r
+ }\r
+ return theAsioDriver->getChannels(numInputChannels, numOutputChannels);\r
+}\r
+\r
+ASIOError ASIOGetLatencies(long *inputLatency, long *outputLatency)\r
+{\r
+ if(!theAsioDriver)\r
+ {\r
+ *inputLatency = *outputLatency = 0;\r
+ return ASE_NotPresent;\r
+ }\r
+ return theAsioDriver->getLatencies(inputLatency, outputLatency);\r
+}\r
+\r
+ASIOError ASIOGetBufferSize(long *minSize, long *maxSize, long *preferredSize, long *granularity)\r
+{\r
+ if(!theAsioDriver)\r
+ {\r
+ *minSize = *maxSize = *preferredSize = *granularity = 0;\r
+ return ASE_NotPresent;\r
+ }\r
+ return theAsioDriver->getBufferSize(minSize, maxSize, preferredSize, granularity);\r
+}\r
+\r
+ASIOError ASIOCanSampleRate(ASIOSampleRate sampleRate)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->canSampleRate(sampleRate);\r
+}\r
+\r
+ASIOError ASIOGetSampleRate(ASIOSampleRate *currentRate)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->getSampleRate(currentRate);\r
+}\r
+\r
+ASIOError ASIOSetSampleRate(ASIOSampleRate sampleRate)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->setSampleRate(sampleRate);\r
+}\r
+\r
+ASIOError ASIOGetClockSources(ASIOClockSource *clocks, long *numSources)\r
+{\r
+ if(!theAsioDriver)\r
+ {\r
+ *numSources = 0;\r
+ return ASE_NotPresent;\r
+ }\r
+ return theAsioDriver->getClockSources(clocks, numSources);\r
+}\r
+\r
+ASIOError ASIOSetClockSource(long reference)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->setClockSource(reference);\r
+}\r
+\r
+ASIOError ASIOGetSamplePosition(ASIOSamples *sPos, ASIOTimeStamp *tStamp)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->getSamplePosition(sPos, tStamp);\r
+}\r
+\r
+ASIOError ASIOGetChannelInfo(ASIOChannelInfo *info)\r
+{\r
+ if(!theAsioDriver)\r
+ {\r
+ info->channelGroup = -1;\r
+ info->type = ASIOSTInt16MSB;\r
+ strcpy(info->name, "None");\r
+ return ASE_NotPresent;\r
+ }\r
+ return theAsioDriver->getChannelInfo(info);\r
+}\r
+\r
+ASIOError ASIOCreateBuffers(ASIOBufferInfo *bufferInfos, long numChannels,\r
+ long bufferSize, ASIOCallbacks *callbacks)\r
+{\r
+ if(!theAsioDriver)\r
+ {\r
+ ASIOBufferInfo *info = bufferInfos;\r
+ for(long i = 0; i < numChannels; i++, info++)\r
+ info->buffers[0] = info->buffers[1] = 0;\r
+ return ASE_NotPresent;\r
+ }\r
+ return theAsioDriver->createBuffers(bufferInfos, numChannels, bufferSize, callbacks);\r
+}\r
+\r
+ASIOError ASIODisposeBuffers(void)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->disposeBuffers();\r
+}\r
+\r
+ASIOError ASIOControlPanel(void)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->controlPanel();\r
+}\r
+\r
+ASIOError ASIOFuture(long selector, void *opt)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->future(selector, opt);\r
+}\r
+\r
+ASIOError ASIOOutputReady(void)\r
+{\r
+ if(!theAsioDriver)\r
+ return ASE_NotPresent;\r
+ return theAsioDriver->outputReady();\r
+}\r
+\r
+#if MAC\r
+} // extern "C"\r
+#pragma export off\r
+#endif\r
+\r
+\r
--- /dev/null
+//---------------------------------------------------------------------------------------------------\r
+//---------------------------------------------------------------------------------------------------\r
+\r
+/*\r
+ Steinberg Audio Stream I/O API\r
+ (c) 1997 - 2013, Steinberg Media Technologies GmbH\r
+\r
+ ASIO Interface Specification v 2.3\r
+\r
+ 2005 - Added support for DSD sample data (in cooperation with Sony)\r
+ 2012 - Added support for drop out detection\r
+ \r
+ \r
+\r
+ basic concept is an i/o synchronous double-buffer scheme:\r
+ \r
+ on bufferSwitch(index == 0), host will read/write:\r
+\r
+ after ASIOStart(), the\r
+ read first input buffer A (index 0)\r
+ | will be invalid (empty)\r
+ * ------------------------\r
+ |------------------------|-----------------------|\r
+ | | |\r
+ | Input Buffer A (0) | Input Buffer B (1) |\r
+ | | |\r
+ |------------------------|-----------------------|\r
+ | | |\r
+ | Output Buffer A (0) | Output Buffer B (1) |\r
+ | | |\r
+ |------------------------|-----------------------|\r
+ * -------------------------\r
+ | before calling ASIOStart(),\r
+ write host will have filled output\r
+ buffer B (index 1) already\r
+\r
+ *please* take special care of proper statement of input\r
+ and output latencies (see ASIOGetLatencies()), these\r
+ control sequencer sync accuracy\r
+\r
+*/\r
+\r
+//---------------------------------------------------------------------------------------------------\r
+//---------------------------------------------------------------------------------------------------\r
+\r
+/*\r
+\r
+prototypes summary:\r
+\r
+ASIOError ASIOInit(ASIODriverInfo *info);\r
+ASIOError ASIOExit(void);\r
+ASIOError ASIOStart(void);\r
+ASIOError ASIOStop(void);\r
+ASIOError ASIOGetChannels(long *numInputChannels, long *numOutputChannels);\r
+ASIOError ASIOGetLatencies(long *inputLatency, long *outputLatency);\r
+ASIOError ASIOGetBufferSize(long *minSize, long *maxSize, long *preferredSize, long *granularity);\r
+ASIOError ASIOCanSampleRate(ASIOSampleRate sampleRate);\r
+ASIOError ASIOGetSampleRate(ASIOSampleRate *currentRate);\r
+ASIOError ASIOSetSampleRate(ASIOSampleRate sampleRate);\r
+ASIOError ASIOGetClockSources(ASIOClockSource *clocks, long *numSources);\r
+ASIOError ASIOSetClockSource(long reference);\r
+ASIOError ASIOGetSamplePosition (ASIOSamples *sPos, ASIOTimeStamp *tStamp);\r
+ASIOError ASIOGetChannelInfo(ASIOChannelInfo *info);\r
+ASIOError ASIOCreateBuffers(ASIOBufferInfo *bufferInfos, long numChannels,\r
+ long bufferSize, ASIOCallbacks *callbacks);\r
+ASIOError ASIODisposeBuffers(void);\r
+ASIOError ASIOControlPanel(void);\r
+void *ASIOFuture(long selector, void *params);\r
+ASIOError ASIOOutputReady(void);\r
+\r
+*/\r
+\r
+//---------------------------------------------------------------------------------------------------\r
+//---------------------------------------------------------------------------------------------------\r
+\r
+#ifndef __ASIO_H\r
+#define __ASIO_H\r
+\r
+// force 4 byte alignment\r
+#if defined(_MSC_VER) && !defined(__MWERKS__) \r
+#pragma pack(push,4)\r
+#elif PRAGMA_ALIGN_SUPPORTED\r
+#pragma options align = native\r
+#endif\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// Type definitions\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+// number of samples data type is 64 bit integer\r
+#if NATIVE_INT64\r
+ typedef long long int ASIOSamples;\r
+#else\r
+ typedef struct ASIOSamples {\r
+ unsigned long hi;\r
+ unsigned long lo;\r
+ } ASIOSamples;\r
+#endif\r
+\r
+// Timestamp data type is 64 bit integer,\r
+// Time format is Nanoseconds.\r
+#if NATIVE_INT64\r
+ typedef long long int ASIOTimeStamp ;\r
+#else\r
+ typedef struct ASIOTimeStamp {\r
+ unsigned long hi;\r
+ unsigned long lo;\r
+ } ASIOTimeStamp;\r
+#endif\r
+\r
+// Samplerates are expressed in IEEE 754 64 bit double float,\r
+// native format as host computer\r
+#if IEEE754_64FLOAT\r
+ typedef double ASIOSampleRate;\r
+#else\r
+ typedef struct ASIOSampleRate {\r
+ char ieee[8];\r
+ } ASIOSampleRate;\r
+#endif\r
+\r
+// Boolean values are expressed as long\r
+typedef long ASIOBool;\r
+enum {\r
+ ASIOFalse = 0,\r
+ ASIOTrue = 1\r
+};\r
+\r
+// Sample Types are expressed as long\r
+typedef long ASIOSampleType;\r
+enum {\r
+ ASIOSTInt16MSB = 0,\r
+ ASIOSTInt24MSB = 1, // used for 20 bits as well\r
+ ASIOSTInt32MSB = 2,\r
+ ASIOSTFloat32MSB = 3, // IEEE 754 32 bit float\r
+ ASIOSTFloat64MSB = 4, // IEEE 754 64 bit double float\r
+\r
+ // these are used for 32 bit data buffer, with different alignment of the data inside\r
+ // 32 bit PCI bus systems can be more easily used with these\r
+ ASIOSTInt32MSB16 = 8, // 32 bit data with 16 bit alignment\r
+ ASIOSTInt32MSB18 = 9, // 32 bit data with 18 bit alignment\r
+ ASIOSTInt32MSB20 = 10, // 32 bit data with 20 bit alignment\r
+ ASIOSTInt32MSB24 = 11, // 32 bit data with 24 bit alignment\r
+ \r
+ ASIOSTInt16LSB = 16,\r
+ ASIOSTInt24LSB = 17, // used for 20 bits as well\r
+ ASIOSTInt32LSB = 18,\r
+ ASIOSTFloat32LSB = 19, // IEEE 754 32 bit float, as found on Intel x86 architecture\r
+ ASIOSTFloat64LSB = 20, // IEEE 754 64 bit double float, as found on Intel x86 architecture\r
+\r
+ // these are used for 32 bit data buffer, with different alignment of the data inside\r
+ // 32 bit PCI bus systems can more easily used with these\r
+ ASIOSTInt32LSB16 = 24, // 32 bit data with 18 bit alignment\r
+ ASIOSTInt32LSB18 = 25, // 32 bit data with 18 bit alignment\r
+ ASIOSTInt32LSB20 = 26, // 32 bit data with 20 bit alignment\r
+ ASIOSTInt32LSB24 = 27, // 32 bit data with 24 bit alignment\r
+\r
+ // ASIO DSD format.\r
+ ASIOSTDSDInt8LSB1 = 32, // DSD 1 bit data, 8 samples per byte. First sample in Least significant bit.\r
+ ASIOSTDSDInt8MSB1 = 33, // DSD 1 bit data, 8 samples per byte. First sample in Most significant bit.\r
+ ASIOSTDSDInt8NER8 = 40, // DSD 8 bit data, 1 sample per byte. No Endianness required.\r
+\r
+ ASIOSTLastEntry\r
+};\r
+\r
+/*-----------------------------------------------------------------------------\r
+// DSD operation and buffer layout\r
+// Definition by Steinberg/Sony Oxford.\r
+//\r
+// We have tried to treat DSD as PCM and so keep a consistant structure across\r
+// the ASIO interface.\r
+//\r
+// DSD's sample rate is normally referenced as a multiple of 44.1Khz, so\r
+// the standard sample rate is refered to as 64Fs (or 2.8224Mhz). We looked\r
+// at making a special case for DSD and adding a field to the ASIOFuture that\r
+// would allow the user to select the Over Sampleing Rate (OSR) as a seperate\r
+// entity but decided in the end just to treat it as a simple value of\r
+// 2.8224Mhz and use the standard interface to set it.\r
+//\r
+// The second problem was the "word" size, in PCM the word size is always a\r
+// greater than or equal to 8 bits (a byte). This makes life easy as we can\r
+// then pack the samples into the "natural" size for the machine.\r
+// In DSD the "word" size is 1 bit. This is not a major problem and can easily\r
+// be dealt with if we ensure that we always deal with a multiple of 8 samples.\r
+//\r
+// DSD brings with it another twist to the Endianness religion. How are the\r
+// samples packed into the byte. It would be nice to just say the most significant\r
+// bit is always the first sample, however there would then be a performance hit\r
+// on little endian machines. Looking at how some of the processing goes...\r
+// Little endian machines like the first sample to be in the Least Significant Bit,\r
+// this is because when you write it to memory the data is in the correct format\r
+// to be shifted in and out of the words.\r
+// Big endian machine prefer the first sample to be in the Most Significant Bit,\r
+// again for the same reasion.\r
+//\r
+// And just when things were looking really muddy there is a proposed extension to\r
+// DSD that uses 8 bit word sizes. It does not care what endianness you use.\r
+//\r
+// Switching the driver between DSD and PCM mode\r
+// ASIOFuture allows for extending the ASIO API quite transparently.\r
+// See kAsioSetIoFormat, kAsioGetIoFormat, kAsioCanDoIoFormat\r
+//\r
+//-----------------------------------------------------------------------------*/\r
+\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// Error codes\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+typedef long ASIOError;\r
+enum {\r
+ ASE_OK = 0, // This value will be returned whenever the call succeeded\r
+ ASE_SUCCESS = 0x3f4847a0, // unique success return value for ASIOFuture calls\r
+ ASE_NotPresent = -1000, // hardware input or output is not present or available\r
+ ASE_HWMalfunction, // hardware is malfunctioning (can be returned by any ASIO function)\r
+ ASE_InvalidParameter, // input parameter invalid\r
+ ASE_InvalidMode, // hardware is in a bad mode or used in a bad mode\r
+ ASE_SPNotAdvancing, // hardware is not running when sample position is inquired\r
+ ASE_NoClock, // sample clock or rate cannot be determined or is not present\r
+ ASE_NoMemory // not enough memory for completing the request\r
+};\r
+\r
+//---------------------------------------------------------------------------------------------------\r
+//---------------------------------------------------------------------------------------------------\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// Time Info support\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+typedef struct ASIOTimeCode\r
+{ \r
+ double speed; // speed relation (fraction of nominal speed)\r
+ // optional; set to 0. or 1. if not supported\r
+ ASIOSamples timeCodeSamples; // time in samples\r
+ unsigned long flags; // some information flags (see below)\r
+ char future[64];\r
+} ASIOTimeCode;\r
+\r
+typedef enum ASIOTimeCodeFlags\r
+{\r
+ kTcValid = 1,\r
+ kTcRunning = 1 << 1,\r
+ kTcReverse = 1 << 2,\r
+ kTcOnspeed = 1 << 3,\r
+ kTcStill = 1 << 4,\r
+ \r
+ kTcSpeedValid = 1 << 8\r
+} ASIOTimeCodeFlags;\r
+\r
+typedef struct AsioTimeInfo\r
+{\r
+ double speed; // absolute speed (1. = nominal)\r
+ ASIOTimeStamp systemTime; // system time related to samplePosition, in nanoseconds\r
+ // on mac, must be derived from Microseconds() (not UpTime()!)\r
+ // on windows, must be derived from timeGetTime()\r
+ ASIOSamples samplePosition;\r
+ ASIOSampleRate sampleRate; // current rate\r
+ unsigned long flags; // (see below)\r
+ char reserved[12];\r
+} AsioTimeInfo;\r
+\r
+typedef enum AsioTimeInfoFlags\r
+{\r
+ kSystemTimeValid = 1, // must always be valid\r
+ kSamplePositionValid = 1 << 1, // must always be valid\r
+ kSampleRateValid = 1 << 2,\r
+ kSpeedValid = 1 << 3,\r
+ \r
+ kSampleRateChanged = 1 << 4,\r
+ kClockSourceChanged = 1 << 5\r
+} AsioTimeInfoFlags;\r
+\r
+typedef struct ASIOTime // both input/output\r
+{\r
+ long reserved[4]; // must be 0\r
+ struct AsioTimeInfo timeInfo; // required\r
+ struct ASIOTimeCode timeCode; // optional, evaluated if (timeCode.flags & kTcValid)\r
+} ASIOTime;\r
+\r
+/*\r
+\r
+using time info:\r
+it is recommended to use the new method with time info even if the asio\r
+device does not support timecode; continuous calls to ASIOGetSamplePosition\r
+and ASIOGetSampleRate are avoided, and there is a more defined relationship\r
+between callback time and the time info.\r
+\r
+see the example below.\r
+to initiate time info mode, after you have received the callbacks pointer in\r
+ASIOCreateBuffers, you will call the asioMessage callback with kAsioSupportsTimeInfo\r
+as the argument. if this returns 1, host has accepted time info mode.\r
+now host expects the new callback bufferSwitchTimeInfo to be used instead\r
+of the old bufferSwitch method. the ASIOTime structure is assumed to be valid\r
+and accessible until the callback returns.\r
+\r
+using time code:\r
+if the device supports reading time code, it will call host's asioMessage callback\r
+with kAsioSupportsTimeCode as the selector. it may then fill the according\r
+fields and set the kTcValid flag.\r
+host will call the future method with the kAsioEnableTimeCodeRead selector when\r
+it wants to enable or disable tc reading by the device. you should also support\r
+the kAsioCanTimeInfo and kAsioCanTimeCode selectors in ASIOFuture (see example).\r
+\r
+note:\r
+the AsioTimeInfo/ASIOTimeCode pair is supposed to work in both directions.\r
+as a matter of convention, the relationship between the sample\r
+position counter and the time code at buffer switch time is\r
+(ignoring offset between tc and sample pos when tc is running):\r
+\r
+on input: sample 0 -> input buffer sample 0 -> time code 0\r
+on output: sample 0 -> output buffer sample 0 -> time code 0\r
+\r
+this means that for 'real' calculations, one has to take into account\r
+the according latencies.\r
+\r
+example:\r
+\r
+ASIOTime asioTime;\r
+\r
+in createBuffers()\r
+{\r
+ memset(&asioTime, 0, sizeof(ASIOTime));\r
+ AsioTimeInfo* ti = &asioTime.timeInfo;\r
+ ti->sampleRate = theSampleRate;\r
+ ASIOTimeCode* tc = &asioTime.timeCode;\r
+ tc->speed = 1.;\r
+ timeInfoMode = false;\r
+ canTimeCode = false;\r
+ if(callbacks->asioMessage(kAsioSupportsTimeInfo, 0, 0, 0) == 1)\r
+ {\r
+ timeInfoMode = true;\r
+#if kCanTimeCode\r
+ if(callbacks->asioMessage(kAsioSupportsTimeCode, 0, 0, 0) == 1)\r
+ canTimeCode = true;\r
+#endif\r
+ }\r
+}\r
+\r
+void switchBuffers(long doubleBufferIndex, bool processNow)\r
+{\r
+ if(timeInfoMode)\r
+ {\r
+ AsioTimeInfo* ti = &asioTime.timeInfo;\r
+ ti->flags = kSystemTimeValid | kSamplePositionValid | kSampleRateValid;\r
+ ti->systemTime = theNanoSeconds;\r
+ ti->samplePosition = theSamplePosition;\r
+ if(ti->sampleRate != theSampleRate)\r
+ ti->flags |= kSampleRateChanged;\r
+ ti->sampleRate = theSampleRate;\r
+\r
+#if kCanTimeCode\r
+ if(canTimeCode && timeCodeEnabled)\r
+ {\r
+ ASIOTimeCode* tc = &asioTime.timeCode;\r
+ tc->timeCodeSamples = tcSamples; // tc in samples\r
+ tc->flags = kTcValid | kTcRunning | kTcOnspeed; // if so...\r
+ }\r
+ ASIOTime* bb = callbacks->bufferSwitchTimeInfo(&asioTime, doubleBufferIndex, processNow ? ASIOTrue : ASIOFalse);\r
+#else\r
+ callbacks->bufferSwitchTimeInfo(&asioTime, doubleBufferIndex, processNow ? ASIOTrue : ASIOFalse);\r
+#endif\r
+ }\r
+ else\r
+ callbacks->bufferSwitch(doubleBufferIndex, ASIOFalse);\r
+}\r
+\r
+ASIOError ASIOFuture(long selector, void *params)\r
+{\r
+ switch(selector)\r
+ {\r
+ case kAsioEnableTimeCodeRead:\r
+ timeCodeEnabled = true;\r
+ return ASE_SUCCESS;\r
+ case kAsioDisableTimeCodeRead:\r
+ timeCodeEnabled = false;\r
+ return ASE_SUCCESS;\r
+ case kAsioCanTimeInfo:\r
+ return ASE_SUCCESS;\r
+ #if kCanTimeCode\r
+ case kAsioCanTimeCode:\r
+ return ASE_SUCCESS;\r
+ #endif\r
+ }\r
+ return ASE_NotPresent;\r
+};\r
+\r
+*/\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// application's audio stream handler callbacks\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+typedef struct ASIOCallbacks\r
+{\r
+ void (*bufferSwitch) (long doubleBufferIndex, ASIOBool directProcess);\r
+ // bufferSwitch indicates that both input and output are to be processed.\r
+ // the current buffer half index (0 for A, 1 for B) determines\r
+ // - the output buffer that the host should start to fill. the other buffer\r
+ // will be passed to output hardware regardless of whether it got filled\r
+ // in time or not.\r
+ // - the input buffer that is now filled with incoming data. Note that\r
+ // because of the synchronicity of i/o, the input always has at\r
+ // least one buffer latency in relation to the output.\r
+ // directProcess suggests to the host whether it should immedeately\r
+ // start processing (directProcess == ASIOTrue), or whether its process\r
+ // should be deferred because the call comes from a very low level\r
+ // (for instance, a high level priority interrupt), and direct processing\r
+ // would cause timing instabilities for the rest of the system. If in doubt,\r
+ // directProcess should be set to ASIOFalse.\r
+ // Note: bufferSwitch may be called at interrupt time for highest efficiency.\r
+\r
+ void (*sampleRateDidChange) (ASIOSampleRate sRate);\r
+ // gets called when the AudioStreamIO detects a sample rate change\r
+ // If sample rate is unknown, 0 is passed (for instance, clock loss\r
+ // when externally synchronized).\r
+\r
+ long (*asioMessage) (long selector, long value, void* message, double* opt);\r
+ // generic callback for various purposes, see selectors below.\r
+ // note this is only present if the asio version is 2 or higher\r
+\r
+ ASIOTime* (*bufferSwitchTimeInfo) (ASIOTime* params, long doubleBufferIndex, ASIOBool directProcess);\r
+ // new callback with time info. makes ASIOGetSamplePosition() and various\r
+ // calls to ASIOGetSampleRate obsolete,\r
+ // and allows for timecode sync etc. to be preferred; will be used if\r
+ // the driver calls asioMessage with selector kAsioSupportsTimeInfo.\r
+} ASIOCallbacks;\r
+\r
+// asioMessage selectors\r
+enum\r
+{\r
+ kAsioSelectorSupported = 1, // selector in <value>, returns 1L if supported,\r
+ // 0 otherwise\r
+ kAsioEngineVersion, // returns engine (host) asio implementation version,\r
+ // 2 or higher\r
+ kAsioResetRequest, // request driver reset. if accepted, this\r
+ // will close the driver (ASIO_Exit() ) and\r
+ // re-open it again (ASIO_Init() etc). some\r
+ // drivers need to reconfigure for instance\r
+ // when the sample rate changes, or some basic\r
+ // changes have been made in ASIO_ControlPanel().\r
+ // returns 1L; note the request is merely passed\r
+ // to the application, there is no way to determine\r
+ // if it gets accepted at this time (but it usually\r
+ // will be).\r
+ kAsioBufferSizeChange, // not yet supported, will currently always return 0L.\r
+ // for now, use kAsioResetRequest instead.\r
+ // once implemented, the new buffer size is expected\r
+ // in <value>, and on success returns 1L\r
+ kAsioResyncRequest, // the driver went out of sync, such that\r
+ // the timestamp is no longer valid. this\r
+ // is a request to re-start the engine and\r
+ // slave devices (sequencer). returns 1 for ok,\r
+ // 0 if not supported.\r
+ kAsioLatenciesChanged, // the drivers latencies have changed. The engine\r
+ // will refetch the latencies.\r
+ kAsioSupportsTimeInfo, // if host returns true here, it will expect the\r
+ // callback bufferSwitchTimeInfo to be called instead\r
+ // of bufferSwitch\r
+ kAsioSupportsTimeCode, // \r
+ kAsioMMCCommand, // unused - value: number of commands, message points to mmc commands\r
+ kAsioSupportsInputMonitor, // kAsioSupportsXXX return 1 if host supports this\r
+ kAsioSupportsInputGain, // unused and undefined\r
+ kAsioSupportsInputMeter, // unused and undefined\r
+ kAsioSupportsOutputGain, // unused and undefined\r
+ kAsioSupportsOutputMeter, // unused and undefined\r
+ kAsioOverload, // driver detected an overload\r
+\r
+ kAsioNumMessageSelectors\r
+};\r
+\r
+//---------------------------------------------------------------------------------------------------\r
+//---------------------------------------------------------------------------------------------------\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// (De-)Construction\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+typedef struct ASIODriverInfo\r
+{\r
+ long asioVersion; // currently, 2\r
+ long driverVersion; // driver specific\r
+ char name[32];\r
+ char errorMessage[124];\r
+ void *sysRef; // on input: system reference\r
+ // (Windows: application main window handle, Mac & SGI: 0)\r
+} ASIODriverInfo;\r
+\r
+ASIOError ASIOInit(ASIODriverInfo *info);\r
+/* Purpose:\r
+ Initialize the AudioStreamIO.\r
+ Parameter:\r
+ info: pointer to an ASIODriver structure:\r
+ - asioVersion:\r
+ - on input, the host version. *** Note *** this is 0 for earlier asio\r
+ implementations, and the asioMessage callback is implemeted\r
+ only if asioVersion is 2 or greater. sorry but due to a design fault\r
+ the driver doesn't have access to the host version in ASIOInit :-(\r
+ added selector for host (engine) version in the asioMessage callback\r
+ so we're ok from now on.\r
+ - on return, asio implementation version.\r
+ older versions are 1\r
+ if you support this version (namely, ASIO_outputReady() )\r
+ this should be 2 or higher. also see the note in\r
+ ASIO_getTimeStamp() !\r
+ - version: on return, the driver version (format is driver specific)\r
+ - name: on return, a null-terminated string containing the driver's name\r
+ - error message: on return, should contain a user message describing\r
+ the type of error that occured during ASIOInit(), if any.\r
+ - sysRef: platform specific\r
+ Returns:\r
+ If neither input nor output is present ASE_NotPresent\r
+ will be returned.\r
+ ASE_NoMemory, ASE_HWMalfunction are other possible error conditions\r
+*/\r
+\r
+ASIOError ASIOExit(void);\r
+/* Purpose:\r
+ Terminates the AudioStreamIO.\r
+ Parameter:\r
+ None.\r
+ Returns:\r
+ If neither input nor output is present ASE_NotPresent\r
+ will be returned.\r
+ Notes: this implies ASIOStop() and ASIODisposeBuffers(),\r
+ meaning that no host callbacks must be accessed after ASIOExit().\r
+*/\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// Start/Stop\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+ASIOError ASIOStart(void);\r
+/* Purpose:\r
+ Start input and output processing synchronously.\r
+ This will\r
+ - reset the sample counter to zero\r
+ - start the hardware (both input and output)\r
+ The first call to the hosts' bufferSwitch(index == 0) then tells\r
+ the host to read from input buffer A (index 0), and start\r
+ processing to output buffer A while output buffer B (which\r
+ has been filled by the host prior to calling ASIOStart())\r
+ is possibly sounding (see also ASIOGetLatencies()) \r
+ Parameter:\r
+ None.\r
+ Returns:\r
+ If neither input nor output is present, ASE_NotPresent\r
+ will be returned.\r
+ If the hardware fails to start, ASE_HWMalfunction will be returned.\r
+ Notes:\r
+ There is no restriction on the time that ASIOStart() takes\r
+ to perform (that is, it is not considered a realtime trigger).\r
+*/\r
+\r
+ASIOError ASIOStop(void);\r
+/* Purpose:\r
+ Stops input and output processing altogether.\r
+ Parameter:\r
+ None.\r
+ Returns:\r
+ If neither input nor output is present ASE_NotPresent\r
+ will be returned.\r
+ Notes:\r
+ On return from ASIOStop(), the driver must in no\r
+ case call the hosts' bufferSwitch() routine.\r
+*/\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// Inquiry methods and sample rate\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+ASIOError ASIOGetChannels(long *numInputChannels, long *numOutputChannels);\r
+/* Purpose:\r
+ Returns number of individual input/output channels.\r
+ Parameter:\r
+ numInputChannels will hold the number of available input channels\r
+ numOutputChannels will hold the number of available output channels\r
+ Returns:\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ If only inputs, or only outputs are available, the according\r
+ other parameter will be zero, and ASE_OK is returned.\r
+*/\r
+\r
+ASIOError ASIOGetLatencies(long *inputLatency, long *outputLatency);\r
+/* Purpose:\r
+ Returns the input and output latencies. This includes\r
+ device specific delays, like FIFOs etc.\r
+ Parameter:\r
+ inputLatency will hold the 'age' of the first sample frame\r
+ in the input buffer when the hosts reads it in bufferSwitch()\r
+ (this is theoretical, meaning it does not include the overhead\r
+ and delay between the actual physical switch, and the time\r
+ when bufferSitch() enters).\r
+ This will usually be the size of one block in sample frames, plus\r
+ device specific latencies.\r
+\r
+ outputLatency will specify the time between the buffer switch,\r
+ and the time when the next play buffer will start to sound.\r
+ The next play buffer is defined as the one the host starts\r
+ processing after (or at) bufferSwitch(), indicated by the\r
+ index parameter (0 for buffer A, 1 for buffer B).\r
+ It will usually be either one block, if the host writes directly\r
+ to a dma buffer, or two or more blocks if the buffer is 'latched' by\r
+ the driver. As an example, on ASIOStart(), the host will have filled\r
+ the play buffer at index 1 already; when it gets the callback (with\r
+ the parameter index == 0), this tells it to read from the input\r
+ buffer 0, and start to fill the play buffer 0 (assuming that now\r
+ play buffer 1 is already sounding). In this case, the output\r
+ latency is one block. If the driver decides to copy buffer 1\r
+ at that time, and pass it to the hardware at the next slot (which\r
+ is most commonly done, but should be avoided), the output latency\r
+ becomes two blocks instead, resulting in a total i/o latency of at least\r
+ 3 blocks. As memory access is the main bottleneck in native dsp processing,\r
+ and to acheive less latency, it is highly recommended to try to avoid\r
+ copying (this is also why the driver is the owner of the buffers). To\r
+ summarize, the minimum i/o latency can be acheived if the input buffer\r
+ is processed by the host into the output buffer which will physically\r
+ start to sound on the next time slice. Also note that the host expects\r
+ the bufferSwitch() callback to be accessed for each time slice in order\r
+ to retain sync, possibly recursively; if it fails to process a block in\r
+ time, it will suspend its operation for some time in order to recover.\r
+ Returns:\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+*/\r
+\r
+ASIOError ASIOGetBufferSize(long *minSize, long *maxSize, long *preferredSize, long *granularity);\r
+/* Purpose:\r
+ Returns min, max, and preferred buffer sizes for input/output\r
+ Parameter:\r
+ minSize will hold the minimum buffer size\r
+ maxSize will hold the maxium possible buffer size\r
+ preferredSize will hold the preferred buffer size (a size which\r
+ best fits performance and hardware requirements)\r
+ granularity will hold the granularity at which buffer sizes\r
+ may differ. Usually, the buffer size will be a power of 2;\r
+ in this case, granularity will hold -1 on return, signalling\r
+ possible buffer sizes starting from minSize, increased in\r
+ powers of 2 up to maxSize.\r
+ Returns:\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ Notes:\r
+ When minimum and maximum buffer size are equal,\r
+ the preferred buffer size has to be the same value as well; granularity\r
+ should be 0 in this case.\r
+*/\r
+\r
+ASIOError ASIOCanSampleRate(ASIOSampleRate sampleRate);\r
+/* Purpose:\r
+ Inquires the hardware for the available sample rates.\r
+ Parameter:\r
+ sampleRate is the rate in question.\r
+ Returns:\r
+ If the inquired sample rate is not supported, ASE_NoClock will be returned.\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+*/\r
+ASIOError ASIOGetSampleRate(ASIOSampleRate *currentRate);\r
+/* Purpose:\r
+ Get the current sample Rate.\r
+ Parameter:\r
+ currentRate will hold the current sample rate on return.\r
+ Returns:\r
+ If sample rate is unknown, sampleRate will be 0 and ASE_NoClock will be returned.\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ Notes:\r
+*/\r
+\r
+ASIOError ASIOSetSampleRate(ASIOSampleRate sampleRate);\r
+/* Purpose:\r
+ Set the hardware to the requested sample Rate. If sampleRate == 0,\r
+ enable external sync.\r
+ Parameter:\r
+ sampleRate: on input, the requested rate\r
+ Returns:\r
+ If sampleRate is unknown ASE_NoClock will be returned.\r
+ If the current clock is external, and sampleRate is != 0,\r
+ ASE_InvalidMode will be returned\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ Notes:\r
+*/\r
+\r
+typedef struct ASIOClockSource\r
+{\r
+ long index; // as used for ASIOSetClockSource()\r
+ long associatedChannel; // for instance, S/PDIF or AES/EBU\r
+ long associatedGroup; // see channel groups (ASIOGetChannelInfo())\r
+ ASIOBool isCurrentSource; // ASIOTrue if this is the current clock source\r
+ char name[32]; // for user selection\r
+} ASIOClockSource;\r
+\r
+ASIOError ASIOGetClockSources(ASIOClockSource *clocks, long *numSources);\r
+/* Purpose:\r
+ Get the available external audio clock sources\r
+ Parameter:\r
+ clocks points to an array of ASIOClockSource structures:\r
+ - index: this is used to identify the clock source\r
+ when ASIOSetClockSource() is accessed, should be\r
+ an index counting from zero\r
+ - associatedInputChannel: the first channel of an associated\r
+ input group, if any.\r
+ - associatedGroup: the group index of that channel.\r
+ groups of channels are defined to seperate for\r
+ instance analog, S/PDIF, AES/EBU, ADAT connectors etc,\r
+ when present simultaniously. Note that associated channel\r
+ is enumerated according to numInputs/numOutputs, means it\r
+ is independant from a group (see also ASIOGetChannelInfo())\r
+ inputs are associated to a clock if the physical connection\r
+ transfers both data and clock (like S/PDIF, AES/EBU, or\r
+ ADAT inputs). if there is no input channel associated with\r
+ the clock source (like Word Clock, or internal oscillator), both\r
+ associatedChannel and associatedGroup should be set to -1.\r
+ - isCurrentSource: on exit, ASIOTrue if this is the current clock\r
+ source, ASIOFalse else\r
+ - name: a null-terminated string for user selection of the available sources.\r
+ numSources:\r
+ on input: the number of allocated array members\r
+ on output: the number of available clock sources, at least\r
+ 1 (internal clock generator).\r
+ Returns:\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ Notes:\r
+*/\r
+\r
+ASIOError ASIOSetClockSource(long index);\r
+/* Purpose:\r
+ Set the audio clock source\r
+ Parameter:\r
+ index as obtained from an inquiry to ASIOGetClockSources()\r
+ Returns:\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ If the clock can not be selected because an input channel which\r
+ carries the current clock source is active, ASE_InvalidMode\r
+ *may* be returned (this depends on the properties of the driver\r
+ and/or hardware).\r
+ Notes:\r
+ Should *not* return ASE_NoClock if there is no clock signal present\r
+ at the selected source; this will be inquired via ASIOGetSampleRate().\r
+ It should call the host callback procedure sampleRateHasChanged(),\r
+ if the switch causes a sample rate change, or if no external clock\r
+ is present at the selected source.\r
+*/\r
+\r
+ASIOError ASIOGetSamplePosition (ASIOSamples *sPos, ASIOTimeStamp *tStamp);\r
+/* Purpose:\r
+ Inquires the sample position/time stamp pair.\r
+ Parameter:\r
+ sPos will hold the sample position on return. The sample\r
+ position is reset to zero when ASIOStart() gets called.\r
+ tStamp will hold the system time when the sample position\r
+ was latched.\r
+ Returns:\r
+ If no input/output is present, ASE_NotPresent will be returned.\r
+ If there is no clock, ASE_SPNotAdvancing will be returned.\r
+ Notes:\r
+\r
+ in order to be able to synchronise properly,\r
+ the sample position / time stamp pair must refer to the current block,\r
+ that is, the engine will call ASIOGetSamplePosition() in its bufferSwitch()\r
+ callback and expect the time for the current block. thus, when requested\r
+ in the very first bufferSwitch after ASIO_Start(), the sample position\r
+ should be zero, and the time stamp should refer to the very time where\r
+ the stream was started. it also means that the sample position must be\r
+ block aligned. the driver must ensure proper interpolation if the system\r
+ time can not be determined for the block position. the driver is responsible\r
+ for precise time stamps as it usually has most direct access to lower\r
+ level resources. proper behaviour of ASIO_GetSamplePosition() and ASIO_GetLatencies()\r
+ are essential for precise media synchronization!\r
+*/\r
+\r
+typedef struct ASIOChannelInfo\r
+{\r
+ long channel; // on input, channel index\r
+ ASIOBool isInput; // on input\r
+ ASIOBool isActive; // on exit\r
+ long channelGroup; // dto\r
+ ASIOSampleType type; // dto\r
+ char name[32]; // dto\r
+} ASIOChannelInfo;\r
+\r
+ASIOError ASIOGetChannelInfo(ASIOChannelInfo *info);\r
+/* Purpose:\r
+ retreive information about the nature of a channel\r
+ Parameter:\r
+ info: pointer to a ASIOChannelInfo structure with\r
+ - channel: on input, the channel index of the channel in question.\r
+ - isInput: on input, ASIOTrue if info for an input channel is\r
+ requested, else output\r
+ - channelGroup: on return, the channel group that the channel\r
+ belongs to. For drivers which support different types of\r
+ channels, like analog, S/PDIF, AES/EBU, ADAT etc interfaces,\r
+ there should be a reasonable grouping of these types. Groups\r
+ are always independant form a channel index, that is, a channel\r
+ index always counts from 0 to numInputs/numOutputs regardless\r
+ of the group it may belong to.\r
+ There will always be at least one group (group 0). Please\r
+ also note that by default, the host may decide to activate\r
+ channels 0 and 1; thus, these should belong to the most\r
+ useful type (analog i/o, if present).\r
+ - type: on return, contains the sample type of the channel\r
+ - isActive: on return, ASIOTrue if channel is active as it was\r
+ installed by ASIOCreateBuffers(), ASIOFalse else\r
+ - name: describing the type of channel in question. Used to allow\r
+ for user selection, and enabling of specific channels. examples:\r
+ "Analog In", "SPDIF Out" etc\r
+ Returns:\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ Notes:\r
+ If possible, the string should be organised such that the first\r
+ characters are most significantly describing the nature of the\r
+ port, to allow for identification even if the view showing the\r
+ port name is too small to display more than 8 characters, for\r
+ instance.\r
+*/\r
+\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+// Buffer preparation\r
+//- - - - - - - - - - - - - - - - - - - - - - - - -\r
+\r
+typedef struct ASIOBufferInfo\r
+{\r
+ ASIOBool isInput; // on input: ASIOTrue: input, else output\r
+ long channelNum; // on input: channel index\r
+ void *buffers[2]; // on output: double buffer addresses\r
+} ASIOBufferInfo;\r
+\r
+ASIOError ASIOCreateBuffers(ASIOBufferInfo *bufferInfos, long numChannels,\r
+ long bufferSize, ASIOCallbacks *callbacks);\r
+\r
+/* Purpose:\r
+ Allocates input/output buffers for all input and output channels to be activated.\r
+ Parameter:\r
+ bufferInfos is a pointer to an array of ASIOBufferInfo structures:\r
+ - isInput: on input, ASIOTrue if the buffer is to be allocated\r
+ for an input, output buffer else\r
+ - channelNum: on input, the index of the channel in question\r
+ (counting from 0)\r
+ - buffers: on exit, 2 pointers to the halves of the channels' double-buffer.\r
+ the size of the buffer(s) of course depend on both the ASIOSampleType\r
+ as obtained from ASIOGetChannelInfo(), and bufferSize\r
+ numChannels is the sum of all input and output channels to be created;\r
+ thus bufferInfos is a pointer to an array of numChannels ASIOBufferInfo\r
+ structures.\r
+ bufferSize selects one of the possible buffer sizes as obtained from\r
+ ASIOGetBufferSizes().\r
+ callbacks is a pointer to an ASIOCallbacks structure.\r
+ Returns:\r
+ If not enough memory is available ASE_NoMemory will be returned.\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ If bufferSize is not supported, or one or more of the bufferInfos elements\r
+ contain invalid settings, ASE_InvalidMode will be returned.\r
+ Notes:\r
+ If individual channel selection is not possible but requested,\r
+ the driver has to handle this. namely, bufferSwitch() will only\r
+ have filled buffers of enabled outputs. If possible, processing\r
+ and buss activities overhead should be avoided for channels which\r
+ were not enabled here.\r
+*/\r
+\r
+ASIOError ASIODisposeBuffers(void);\r
+/* Purpose:\r
+ Releases all buffers for the device.\r
+ Parameter:\r
+ None.\r
+ Returns:\r
+ If no buffer were ever prepared, ASE_InvalidMode will be returned.\r
+ If no input/output is present ASE_NotPresent will be returned.\r
+ Notes:\r
+ This implies ASIOStop().\r
+*/\r
+\r
+ASIOError ASIOControlPanel(void);\r
+/* Purpose:\r
+ request the driver to start a control panel component\r
+ for device specific user settings. This will not be\r
+ accessed on some platforms (where the component is accessed\r
+ instead).\r
+ Parameter:\r
+ None.\r
+ Returns:\r
+ If no panel is available ASE_NotPresent will be returned.\r
+ Actually, the return code is ignored.\r
+ Notes:\r
+ if the user applied settings which require a re-configuration\r
+ of parts or all of the enigine and/or driver (such as a change of\r
+ the block size), the asioMessage callback can be used (see\r
+ ASIO_Callbacks).\r
+*/\r
+\r
+ASIOError ASIOFuture(long selector, void *params);\r
+/* Purpose:\r
+ various\r
+ Parameter:\r
+ selector: operation Code as to be defined. zero is reserved for\r
+ testing purposes.\r
+ params: depends on the selector; usually pointer to a structure\r
+ for passing and retreiving any type and amount of parameters.\r
+ Returns:\r
+ the return value is also selector dependant. if the selector\r
+ is unknown, ASE_InvalidParameter should be returned to prevent\r
+ further calls with this selector. on success, ASE_SUCCESS\r
+ must be returned (note: ASE_OK is *not* sufficient!)\r
+ Notes:\r
+ see selectors defined below. \r
+*/\r
+\r
+enum\r
+{\r
+ kAsioEnableTimeCodeRead = 1, // no arguments\r
+ kAsioDisableTimeCodeRead, // no arguments\r
+ kAsioSetInputMonitor, // ASIOInputMonitor* in params\r
+ kAsioTransport, // ASIOTransportParameters* in params\r
+ kAsioSetInputGain, // ASIOChannelControls* in params, apply gain\r
+ kAsioGetInputMeter, // ASIOChannelControls* in params, fill meter\r
+ kAsioSetOutputGain, // ASIOChannelControls* in params, apply gain\r
+ kAsioGetOutputMeter, // ASIOChannelControls* in params, fill meter\r
+ kAsioCanInputMonitor, // no arguments for kAsioCanXXX selectors\r
+ kAsioCanTimeInfo,\r
+ kAsioCanTimeCode,\r
+ kAsioCanTransport,\r
+ kAsioCanInputGain,\r
+ kAsioCanInputMeter,\r
+ kAsioCanOutputGain,\r
+ kAsioCanOutputMeter,\r
+ kAsioOptionalOne,\r
+ \r
+ // DSD support\r
+ // The following extensions are required to allow switching\r
+ // and control of the DSD subsystem.\r
+ kAsioSetIoFormat = 0x23111961, /* ASIOIoFormat * in params. */\r
+ kAsioGetIoFormat = 0x23111983, /* ASIOIoFormat * in params. */\r
+ kAsioCanDoIoFormat = 0x23112004, /* ASIOIoFormat * in params. */\r
+ \r
+ // Extension for drop out detection\r
+ kAsioCanReportOverload = 0x24042012, /* return ASE_SUCCESS if driver can detect and report overloads */\r
+ \r
+ kAsioGetInternalBufferSamples = 0x25042012 /* ASIOInternalBufferInfo * in params. Deliver size of driver internal buffering, return ASE_SUCCESS if supported */\r
+};\r
+\r
+typedef struct ASIOInputMonitor\r
+{\r
+ long input; // this input was set to monitor (or off), -1: all\r
+ long output; // suggested output for monitoring the input (if so)\r
+ long gain; // suggested gain, ranging 0 - 0x7fffffffL (-inf to +12 dB)\r
+ ASIOBool state; // ASIOTrue => on, ASIOFalse => off\r
+ long pan; // suggested pan, 0 => all left, 0x7fffffff => right\r
+} ASIOInputMonitor;\r
+\r
+typedef struct ASIOChannelControls\r
+{\r
+ long channel; // on input, channel index\r
+ ASIOBool isInput; // on input\r
+ long gain; // on input, ranges 0 thru 0x7fffffff\r
+ long meter; // on return, ranges 0 thru 0x7fffffff\r
+ char future[32];\r
+} ASIOChannelControls;\r
+\r
+typedef struct ASIOTransportParameters\r
+{\r
+ long command; // see enum below\r
+ ASIOSamples samplePosition;\r
+ long track;\r
+ long trackSwitches[16]; // 512 tracks on/off\r
+ char future[64];\r
+} ASIOTransportParameters;\r
+\r
+enum\r
+{\r
+ kTransStart = 1,\r
+ kTransStop,\r
+ kTransLocate, // to samplePosition\r
+ kTransPunchIn,\r
+ kTransPunchOut,\r
+ kTransArmOn, // track\r
+ kTransArmOff, // track\r
+ kTransMonitorOn, // track\r
+ kTransMonitorOff, // track\r
+ kTransArm, // trackSwitches\r
+ kTransMonitor // trackSwitches\r
+};\r
+\r
+/*\r
+// DSD support\r
+// Some notes on how to use ASIOIoFormatType.\r
+//\r
+// The caller will fill the format with the request types.\r
+// If the board can do the request then it will leave the\r
+// values unchanged. If the board does not support the\r
+// request then it will change that entry to Invalid (-1)\r
+//\r
+// So to request DSD then\r
+//\r
+// ASIOIoFormat NeedThis={kASIODSDFormat};\r
+//\r
+// if(ASE_SUCCESS != ASIOFuture(kAsioSetIoFormat,&NeedThis) ){\r
+// // If the board did not accept one of the parameters then the\r
+// // whole call will fail and the failing parameter will\r
+// // have had its value changes to -1.\r
+// }\r
+//\r
+// Note: Switching between the formats need to be done before the "prepared"\r
+// state (see ASIO 2 documentation) is entered.\r
+*/\r
+typedef long int ASIOIoFormatType;\r
+enum ASIOIoFormatType_e\r
+{\r
+ kASIOFormatInvalid = -1,\r
+ kASIOPCMFormat = 0,\r
+ kASIODSDFormat = 1,\r
+};\r
+\r
+typedef struct ASIOIoFormat_s\r
+{\r
+ ASIOIoFormatType FormatType;\r
+ char future[512-sizeof(ASIOIoFormatType)];\r
+} ASIOIoFormat;\r
+\r
+// Extension for drop detection\r
+// Note: Refers to buffering that goes beyond the double buffer e.g. used by USB driver designs\r
+typedef struct ASIOInternalBufferInfo\r
+{\r
+ long inputSamples; // size of driver's internal input buffering which is included in getLatencies\r
+ long outputSamples; // size of driver's internal output buffering which is included in getLatencies\r
+} ASIOInternalBufferInfo;\r
+\r
+\r
+ASIOError ASIOOutputReady(void);\r
+/* Purpose:\r
+ this tells the driver that the host has completed processing\r
+ the output buffers. if the data format required by the hardware\r
+ differs from the supported asio formats, but the hardware\r
+ buffers are DMA buffers, the driver will have to convert\r
+ the audio stream data; as the bufferSwitch callback is\r
+ usually issued at dma block switch time, the driver will\r
+ have to convert the *previous* host buffer, which increases\r
+ the output latency by one block.\r
+ when the host finds out that ASIOOutputReady() returns\r
+ true, it will issue this call whenever it completed\r
+ output processing. then the driver can convert the\r
+ host data directly to the dma buffer to be played next,\r
+ reducing output latency by one block.\r
+ another way to look at it is, that the buffer switch is called\r
+ in order to pass the *input* stream to the host, so that it can\r
+ process the input into the output, and the output stream is passed\r
+ to the driver when the host has completed its process.\r
+ Parameter:\r
+ None\r
+ Returns:\r
+ only if the above mentioned scenario is given, and a reduction\r
+ of output latency can be acheived by this mechanism, should\r
+ ASE_OK be returned. otherwise (and usually), ASE_NotPresent\r
+ should be returned in order to prevent further calls to this\r
+ function. note that the host may want to determine if it is\r
+ to use this when the system is not yet fully initialized, so\r
+ ASE_OK should always be returned if the mechanism makes sense. \r
+ Notes:\r
+ please remeber to adjust ASIOGetLatencies() according to\r
+ whether ASIOOutputReady() was ever called or not, if your\r
+ driver supports this scenario.\r
+ also note that the engine may fail to call ASIO_OutputReady()\r
+ in time in overload cases. as already mentioned, bufferSwitch\r
+ should be called for every block regardless of whether a block\r
+ could be processed in time.\r
+*/\r
+\r
+// restore old alignment\r
+#if defined(_MSC_VER) && !defined(__MWERKS__) \r
+#pragma pack(pop)\r
+#elif PRAGMA_ALIGN_SUPPORTED\r
+#pragma options align = reset\r
+#endif\r
+\r
+#endif\r
+\r
--- /dev/null
+#include <string.h>\r
+#include "asiodrivers.h"\r
+\r
+AsioDrivers* asioDrivers = 0;\r
+\r
+bool loadAsioDriver(char *name);\r
+\r
+bool loadAsioDriver(char *name)\r
+{\r
+ if(!asioDrivers)\r
+ asioDrivers = new AsioDrivers();\r
+ if(asioDrivers)\r
+ return asioDrivers->loadDriver(name);\r
+ return false;\r
+}\r
+\r
+//------------------------------------------------------------------------------------\r
+\r
+#if MAC\r
+\r
+bool resolveASIO(unsigned long aconnID);\r
+\r
+AsioDrivers::AsioDrivers() : CodeFragments("ASIO Drivers", 'AsDr', 'Asio')\r
+{\r
+ connID = -1;\r
+ curIndex = -1;\r
+}\r
+\r
+AsioDrivers::~AsioDrivers()\r
+{\r
+ removeCurrentDriver();\r
+}\r
+\r
+bool AsioDrivers::getCurrentDriverName(char *name)\r
+{\r
+ if(curIndex >= 0)\r
+ return getName(curIndex, name);\r
+ return false;\r
+}\r
+\r
+long AsioDrivers::getDriverNames(char **names, long maxDrivers)\r
+{\r
+ for(long i = 0; i < getNumFragments() && i < maxDrivers; i++)\r
+ getName(i, names[i]);\r
+ return getNumFragments() < maxDrivers ? getNumFragments() : maxDrivers;\r
+}\r
+\r
+bool AsioDrivers::loadDriver(char *name)\r
+{\r
+ char dname[64];\r
+ unsigned long newID;\r
+\r
+ for(long i = 0; i < getNumFragments(); i++)\r
+ {\r
+ if(getName(i, dname) && !strcmp(name, dname))\r
+ {\r
+ if(newInstance(i, &newID))\r
+ {\r
+ if(resolveASIO(newID))\r
+ {\r
+ if(connID != -1)\r
+ removeInstance(curIndex, connID);\r
+ curIndex = i;\r
+ connID = newID;\r
+ return true;\r
+ }\r
+ }\r
+ break;\r
+ }\r
+ }\r
+ return false;\r
+}\r
+\r
+void AsioDrivers::removeCurrentDriver()\r
+{\r
+ if(connID != -1)\r
+ removeInstance(curIndex, connID);\r
+ connID = -1;\r
+ curIndex = -1;\r
+}\r
+\r
+//------------------------------------------------------------------------------------\r
+\r
+#elif WINDOWS\r
+\r
+#include "iasiodrv.h"\r
+\r
+extern IASIO* theAsioDriver;\r
+\r
+AsioDrivers::AsioDrivers() : AsioDriverList()\r
+{\r
+ curIndex = -1;\r
+}\r
+\r
+AsioDrivers::~AsioDrivers()\r
+{\r
+}\r
+\r
+bool AsioDrivers::getCurrentDriverName(char *name)\r
+{\r
+ if(curIndex >= 0)\r
+ return asioGetDriverName(curIndex, name, 32) == 0 ? true : false;\r
+ name[0] = 0;\r
+ return false;\r
+}\r
+\r
+long AsioDrivers::getDriverNames(char **names, long maxDrivers)\r
+{\r
+ for(long i = 0; i < asioGetNumDev() && i < maxDrivers; i++)\r
+ asioGetDriverName(i, names[i], 32);\r
+ return asioGetNumDev() < maxDrivers ? asioGetNumDev() : maxDrivers;\r
+}\r
+\r
+bool AsioDrivers::loadDriver(char *name)\r
+{\r
+ char dname[64];\r
+ char curName[64];\r
+\r
+ for(long i = 0; i < asioGetNumDev(); i++)\r
+ {\r
+ if(!asioGetDriverName(i, dname, 32) && !strcmp(name, dname))\r
+ {\r
+ curName[0] = 0;\r
+ getCurrentDriverName(curName); // in case we fail...\r
+ removeCurrentDriver();\r
+\r
+ if(!asioOpenDriver(i, (void **)&theAsioDriver))\r
+ {\r
+ curIndex = i;\r
+ return true;\r
+ }\r
+ else\r
+ {\r
+ theAsioDriver = 0;\r
+ if(curName[0] && strcmp(dname, curName))\r
+ loadDriver(curName); // try restore\r
+ }\r
+ break;\r
+ }\r
+ }\r
+ return false;\r
+}\r
+\r
+void AsioDrivers::removeCurrentDriver()\r
+{\r
+ if(curIndex != -1)\r
+ asioCloseDriver(curIndex);\r
+ curIndex = -1;\r
+}\r
+\r
+#elif SGI || BEOS\r
+\r
+#include "asiolist.h"\r
+\r
+AsioDrivers::AsioDrivers() \r
+ : AsioDriverList()\r
+{\r
+ curIndex = -1;\r
+}\r
+\r
+AsioDrivers::~AsioDrivers()\r
+{\r
+}\r
+\r
+bool AsioDrivers::getCurrentDriverName(char *name)\r
+{\r
+ return false;\r
+}\r
+\r
+long AsioDrivers::getDriverNames(char **names, long maxDrivers)\r
+{\r
+ return 0;\r
+}\r
+\r
+bool AsioDrivers::loadDriver(char *name)\r
+{\r
+ return false;\r
+}\r
+\r
+void AsioDrivers::removeCurrentDriver()\r
+{\r
+}\r
+\r
+#else\r
+#error implement me\r
+#endif\r
--- /dev/null
+#ifndef __AsioDrivers__\r
+#define __AsioDrivers__\r
+\r
+#include "ginclude.h"\r
+\r
+#if MAC\r
+#include "CodeFragments.hpp"\r
+\r
+class AsioDrivers : public CodeFragments\r
+\r
+#elif WINDOWS\r
+#include <windows.h>\r
+#include "asiolist.h"\r
+\r
+class AsioDrivers : public AsioDriverList\r
+\r
+#elif SGI || BEOS\r
+#include "asiolist.h"\r
+\r
+class AsioDrivers : public AsioDriverList\r
+\r
+#else\r
+#error implement me\r
+#endif\r
+\r
+{\r
+public:\r
+ AsioDrivers();\r
+ ~AsioDrivers();\r
+ \r
+ bool getCurrentDriverName(char *name);\r
+ long getDriverNames(char **names, long maxDrivers);\r
+ bool loadDriver(char *name);\r
+ void removeCurrentDriver();\r
+ long getCurrentDriverIndex() {return curIndex;}\r
+protected:\r
+ unsigned long connID;\r
+ long curIndex;\r
+};\r
+\r
+#endif\r
--- /dev/null
+/*\r
+ Steinberg Audio Stream I/O API\r
+ (c) 1996, Steinberg Soft- und Hardware GmbH\r
+ charlie (May 1996)\r
+\r
+ asiodrvr.h\r
+ c++ superclass to implement asio functionality. from this,\r
+ you can derive whatever required\r
+*/\r
+\r
+#ifndef _asiodrvr_\r
+#define _asiodrvr_\r
+\r
+// cpu and os system we are running on\r
+#include "asiosys.h"\r
+// basic "C" interface\r
+#include "asio.h"\r
+\r
+class AsioDriver;\r
+extern AsioDriver *getDriver(); // for generic constructor \r
+\r
+#if WINDOWS\r
+#include <windows.h>\r
+#include "combase.h"\r
+#include "iasiodrv.h"\r
+class AsioDriver : public IASIO ,public CUnknown\r
+{\r
+public:\r
+ AsioDriver(LPUNKNOWN pUnk, HRESULT *phr);\r
+\r
+ DECLARE_IUNKNOWN\r
+ // Factory method\r
+ static CUnknown *CreateInstance(LPUNKNOWN pUnk, HRESULT *phr);\r
+ // IUnknown\r
+ virtual HRESULT STDMETHODCALLTYPE NonDelegatingQueryInterface(REFIID riid,void **ppvObject);\r
+\r
+#else\r
+\r
+class AsioDriver\r
+{\r
+public:\r
+ AsioDriver();\r
+#endif\r
+ virtual ~AsioDriver();\r
+\r
+ virtual ASIOBool init(void* sysRef);\r
+ virtual void getDriverName(char *name); // max 32 bytes incl. terminating zero\r
+ virtual long getDriverVersion();\r
+ virtual void getErrorMessage(char *string); // max 124 bytes incl.\r
+\r
+ virtual ASIOError start();\r
+ virtual ASIOError stop();\r
+\r
+ virtual ASIOError getChannels(long *numInputChannels, long *numOutputChannels);\r
+ virtual ASIOError getLatencies(long *inputLatency, long *outputLatency);\r
+ virtual ASIOError getBufferSize(long *minSize, long *maxSize,\r
+ long *preferredSize, long *granularity);\r
+\r
+ virtual ASIOError canSampleRate(ASIOSampleRate sampleRate);\r
+ virtual ASIOError getSampleRate(ASIOSampleRate *sampleRate);\r
+ virtual ASIOError setSampleRate(ASIOSampleRate sampleRate);\r
+ virtual ASIOError getClockSources(ASIOClockSource *clocks, long *numSources);\r
+ virtual ASIOError setClockSource(long reference);\r
+\r
+ virtual ASIOError getSamplePosition(ASIOSamples *sPos, ASIOTimeStamp *tStamp);\r
+ virtual ASIOError getChannelInfo(ASIOChannelInfo *info);\r
+\r
+ virtual ASIOError createBuffers(ASIOBufferInfo *bufferInfos, long numChannels,\r
+ long bufferSize, ASIOCallbacks *callbacks);\r
+ virtual ASIOError disposeBuffers();\r
+\r
+ virtual ASIOError controlPanel();\r
+ virtual ASIOError future(long selector, void *opt);\r
+ virtual ASIOError outputReady();\r
+};\r
+#endif\r
--- /dev/null
+#include <windows.h>
+#include "iasiodrv.h"
+#include "asiolist.h"
+
+#define ASIODRV_DESC "description"
+#define INPROC_SERVER "InprocServer32"
+#define ASIO_PATH "software\\asio"
+#define COM_CLSID "clsid"
+
+// ******************************************************************
+// Local Functions
+// ******************************************************************
+static LONG findDrvPath (char *clsidstr,char *dllpath,int dllpathsize)
+{
+ HKEY hkEnum,hksub,hkpath;
+ char databuf[512];
+ LONG cr,rc = -1;
+ DWORD datatype,datasize;
+ DWORD index;
+ OFSTRUCT ofs;
+ HFILE hfile;
+ BOOL found = FALSE;
+
+#ifdef UNICODE
+ CharLowerBuffA(clsidstr,strlen(clsidstr));
+ if ((cr = RegOpenKeyA(HKEY_CLASSES_ROOT,COM_CLSID,&hkEnum)) == ERROR_SUCCESS) {
+
+ index = 0;
+ while (cr == ERROR_SUCCESS && !found) {
+ cr = RegEnumKeyA(hkEnum,index++,databuf,512);
+ if (cr == ERROR_SUCCESS) {
+ CharLowerBuffA(databuf,strlen(databuf));
+ if (!(strcmp(databuf,clsidstr))) {
+ if ((cr = RegOpenKeyExA(hkEnum,databuf,0,KEY_READ,&hksub)) == ERROR_SUCCESS) {
+ if ((cr = RegOpenKeyExA(hksub,INPROC_SERVER,0,KEY_READ,&hkpath)) == ERROR_SUCCESS) {
+ datatype = REG_SZ; datasize = (DWORD)dllpathsize;
+ cr = RegQueryValueEx(hkpath,0,0,&datatype,(LPBYTE)dllpath,&datasize);
+ if (cr == ERROR_SUCCESS) {
+ memset(&ofs,0,sizeof(OFSTRUCT));
+ ofs.cBytes = sizeof(OFSTRUCT);
+ hfile = OpenFile(dllpath,&ofs,OF_EXIST);
+ if (hfile) rc = 0;
+ }
+ RegCloseKey(hkpath);
+ }
+ RegCloseKey(hksub);
+ }
+ found = TRUE; // break out
+ }
+ }
+ }
+ RegCloseKey(hkEnum);
+ }
+#else
+ CharLowerBuff(clsidstr,strlen(clsidstr));
+ if ((cr = RegOpenKey(HKEY_CLASSES_ROOT,COM_CLSID,&hkEnum)) == ERROR_SUCCESS) {
+
+ index = 0;
+ while (cr == ERROR_SUCCESS && !found) {
+ cr = RegEnumKey(hkEnum,index++,databuf,512);
+ if (cr == ERROR_SUCCESS) {
+ CharLowerBuff(databuf,strlen(databuf));
+ if (!(strcmp(databuf,clsidstr))) {
+ if ((cr = RegOpenKeyEx(hkEnum,databuf,0,KEY_READ,&hksub)) == ERROR_SUCCESS) {
+ if ((cr = RegOpenKeyEx(hksub,INPROC_SERVER,0,KEY_READ,&hkpath)) == ERROR_SUCCESS) {
+ datatype = REG_SZ; datasize = (DWORD)dllpathsize;
+ cr = RegQueryValueEx(hkpath,0,0,&datatype,(LPBYTE)dllpath,&datasize);
+ if (cr == ERROR_SUCCESS) {
+ memset(&ofs,0,sizeof(OFSTRUCT));
+ ofs.cBytes = sizeof(OFSTRUCT);
+ hfile = OpenFile(dllpath,&ofs,OF_EXIST);
+ if (hfile) rc = 0;
+ }
+ RegCloseKey(hkpath);
+ }
+ RegCloseKey(hksub);
+ }
+ found = TRUE; // break out
+ }
+ }
+ }
+ RegCloseKey(hkEnum);
+ }
+#endif
+ return rc;
+}
+
+
+static LPASIODRVSTRUCT newDrvStruct (HKEY hkey,char *keyname,int drvID,LPASIODRVSTRUCT lpdrv)
+{
+ HKEY hksub;
+ char databuf[256];
+ char dllpath[MAXPATHLEN];
+ WORD wData[100];
+ CLSID clsid;
+ DWORD datatype,datasize;
+ LONG cr,rc;
+
+ if (!lpdrv) {
+ if ((cr = RegOpenKeyExA(hkey,keyname,0,KEY_READ,&hksub)) == ERROR_SUCCESS) {
+
+ datatype = REG_SZ; datasize = 256;
+ cr = RegQueryValueExA(hksub,COM_CLSID,0,&datatype,(LPBYTE)databuf,&datasize);
+ if (cr == ERROR_SUCCESS) {
+ rc = findDrvPath (databuf,dllpath,MAXPATHLEN);
+ if (rc == 0) {
+ lpdrv = new ASIODRVSTRUCT[1];
+ if (lpdrv) {
+ memset(lpdrv,0,sizeof(ASIODRVSTRUCT));
+ lpdrv->drvID = drvID;
+ MultiByteToWideChar(CP_ACP,0,(LPCSTR)databuf,-1,(LPWSTR)wData,100);
+ if ((cr = CLSIDFromString((LPOLESTR)wData,(LPCLSID)&clsid)) == S_OK) {
+ memcpy(&lpdrv->clsid,&clsid,sizeof(CLSID));
+ }
+
+ datatype = REG_SZ; datasize = 256;
+ cr = RegQueryValueExA(hksub,ASIODRV_DESC,0,&datatype,(LPBYTE)databuf,&datasize);
+ if (cr == ERROR_SUCCESS) {
+ strcpy(lpdrv->drvname,databuf);
+ }
+ else strcpy(lpdrv->drvname,keyname);
+ }
+ }
+ }
+ RegCloseKey(hksub);
+ }
+ }
+ else lpdrv->next = newDrvStruct(hkey,keyname,drvID+1,lpdrv->next);
+
+ return lpdrv;
+}
+
+static void deleteDrvStruct (LPASIODRVSTRUCT lpdrv)
+{
+ IASIO *iasio;
+
+ if (lpdrv != 0) {
+ deleteDrvStruct(lpdrv->next);
+ if (lpdrv->asiodrv) {
+ iasio = (IASIO *)lpdrv->asiodrv;
+ iasio->Release();
+ }
+ delete lpdrv;
+ }
+}
+
+
+static LPASIODRVSTRUCT getDrvStruct (int drvID,LPASIODRVSTRUCT lpdrv)
+{
+ while (lpdrv) {
+ if (lpdrv->drvID == drvID) return lpdrv;
+ lpdrv = lpdrv->next;
+ }
+ return 0;
+}
+// ******************************************************************
+
+
+// ******************************************************************
+// AsioDriverList
+// ******************************************************************
+AsioDriverList::AsioDriverList ()
+{
+ HKEY hkEnum = 0;
+ char keyname[MAXDRVNAMELEN];
+ LPASIODRVSTRUCT pdl;
+ LONG cr;
+ DWORD index = 0;
+ BOOL fin = FALSE;
+
+ numdrv = 0;
+ lpdrvlist = 0;
+
+#ifdef UNICODE
+ cr = RegOpenKeyA(HKEY_LOCAL_MACHINE,ASIO_PATH,&hkEnum);
+#else
+ cr = RegOpenKey(HKEY_LOCAL_MACHINE,ASIO_PATH,&hkEnum);
+#endif
+ while (cr == ERROR_SUCCESS) {
+#ifdef UNICODE
+ if ((cr = RegEnumKeyA(hkEnum,index++,keyname,MAXDRVNAMELEN))== ERROR_SUCCESS) {
+#else
+ if ((cr = RegEnumKey(hkEnum,index++,keyname,MAXDRVNAMELEN))== ERROR_SUCCESS) {
+#endif
+ lpdrvlist = newDrvStruct (hkEnum,keyname,0,lpdrvlist);
+ }
+ else fin = TRUE;
+ }
+ if (hkEnum) RegCloseKey(hkEnum);
+
+ pdl = lpdrvlist;
+ while (pdl) {
+ numdrv++;
+ pdl = pdl->next;
+ }
+
+ if (numdrv) CoInitialize(0); // initialize COM
+}
+
+AsioDriverList::~AsioDriverList ()
+{
+ if (numdrv) {
+ deleteDrvStruct(lpdrvlist);
+ CoUninitialize();
+ }
+}
+
+
+LONG AsioDriverList::asioGetNumDev (VOID)
+{
+ return (LONG)numdrv;
+}
+
+
+LONG AsioDriverList::asioOpenDriver (int drvID,LPVOID *asiodrv)
+{
+ LPASIODRVSTRUCT lpdrv = 0;
+ long rc;
+
+ if (!asiodrv) return DRVERR_INVALID_PARAM;
+
+ if ((lpdrv = getDrvStruct(drvID,lpdrvlist)) != 0) {
+ if (!lpdrv->asiodrv) {
+ rc = CoCreateInstance(lpdrv->clsid,0,CLSCTX_INPROC_SERVER,lpdrv->clsid,asiodrv);
+ if (rc == S_OK) {
+ lpdrv->asiodrv = *asiodrv;
+ return 0;
+ }
+ // else if (rc == REGDB_E_CLASSNOTREG)
+ // strcpy (info->messageText, "Driver not registered in the Registration Database!");
+ }
+ else rc = DRVERR_DEVICE_ALREADY_OPEN;
+ }
+ else rc = DRVERR_DEVICE_NOT_FOUND;
+
+ return rc;
+}
+
+
+LONG AsioDriverList::asioCloseDriver (int drvID)
+{
+ LPASIODRVSTRUCT lpdrv = 0;
+ IASIO *iasio;
+
+ if ((lpdrv = getDrvStruct(drvID,lpdrvlist)) != 0) {
+ if (lpdrv->asiodrv) {
+ iasio = (IASIO *)lpdrv->asiodrv;
+ iasio->Release();
+ lpdrv->asiodrv = 0;
+ }
+ }
+
+ return 0;
+}
+
+LONG AsioDriverList::asioGetDriverName (int drvID,char *drvname,int drvnamesize)
+{
+ LPASIODRVSTRUCT lpdrv = 0;
+
+ if (!drvname) return DRVERR_INVALID_PARAM;
+
+ if ((lpdrv = getDrvStruct(drvID,lpdrvlist)) != 0) {
+ if (strlen(lpdrv->drvname) < (unsigned int)drvnamesize) {
+ strcpy(drvname,lpdrv->drvname);
+ }
+ else {
+ memcpy(drvname,lpdrv->drvname,drvnamesize-4);
+ drvname[drvnamesize-4] = '.';
+ drvname[drvnamesize-3] = '.';
+ drvname[drvnamesize-2] = '.';
+ drvname[drvnamesize-1] = 0;
+ }
+ return 0;
+ }
+ return DRVERR_DEVICE_NOT_FOUND;
+}
+
+LONG AsioDriverList::asioGetDriverPath (int drvID,char *dllpath,int dllpathsize)
+{
+ LPASIODRVSTRUCT lpdrv = 0;
+
+ if (!dllpath) return DRVERR_INVALID_PARAM;
+
+ if ((lpdrv = getDrvStruct(drvID,lpdrvlist)) != 0) {
+ if (strlen(lpdrv->dllpath) < (unsigned int)dllpathsize) {
+ strcpy(dllpath,lpdrv->dllpath);
+ return 0;
+ }
+ dllpath[0] = 0;
+ return DRVERR_INVALID_PARAM;
+ }
+ return DRVERR_DEVICE_NOT_FOUND;
+}
+
+LONG AsioDriverList::asioGetDriverCLSID (int drvID,CLSID *clsid)
+{
+ LPASIODRVSTRUCT lpdrv = 0;
+
+ if (!clsid) return DRVERR_INVALID_PARAM;
+
+ if ((lpdrv = getDrvStruct(drvID,lpdrvlist)) != 0) {
+ memcpy(clsid,&lpdrv->clsid,sizeof(CLSID));
+ return 0;
+ }
+ return DRVERR_DEVICE_NOT_FOUND;
+}
+
+
--- /dev/null
+#ifndef __asiolist__\r
+#define __asiolist__\r
+\r
+#define DRVERR -5000\r
+#define DRVERR_INVALID_PARAM DRVERR-1\r
+#define DRVERR_DEVICE_ALREADY_OPEN DRVERR-2\r
+#define DRVERR_DEVICE_NOT_FOUND DRVERR-3\r
+\r
+#define MAXPATHLEN 512\r
+#define MAXDRVNAMELEN 128\r
+\r
+struct asiodrvstruct\r
+{\r
+ int drvID;\r
+ CLSID clsid;\r
+ char dllpath[MAXPATHLEN];\r
+ char drvname[MAXDRVNAMELEN];\r
+ LPVOID asiodrv;\r
+ struct asiodrvstruct *next;\r
+};\r
+\r
+typedef struct asiodrvstruct ASIODRVSTRUCT;\r
+typedef ASIODRVSTRUCT *LPASIODRVSTRUCT;\r
+\r
+class AsioDriverList {\r
+public:\r
+ AsioDriverList();\r
+ ~AsioDriverList();\r
+ \r
+ LONG asioOpenDriver (int,VOID **);\r
+ LONG asioCloseDriver (int);\r
+\r
+ // nice to have\r
+ LONG asioGetNumDev (VOID);\r
+ LONG asioGetDriverName (int,char *,int); \r
+ LONG asioGetDriverPath (int,char *,int);\r
+ LONG asioGetDriverCLSID (int,CLSID *);\r
+\r
+ // or use directly access\r
+ LPASIODRVSTRUCT lpdrvlist;\r
+ int numdrv;\r
+};\r
+\r
+typedef class AsioDriverList *LPASIODRIVERLIST;\r
+\r
+#endif\r
--- /dev/null
+#ifndef __asiosys__\r
+ #define __asiosys__\r
+\r
+ #if defined(_WIN32) || defined(_WIN64)\r
+ #undef MAC \r
+ #define PPC 0\r
+ #define WINDOWS 1\r
+ #define SGI 0\r
+ #define SUN 0\r
+ #define LINUX 0\r
+ #define BEOS 0\r
+\r
+ #define NATIVE_INT64 0\r
+ #define IEEE754_64FLOAT 1\r
+ \r
+ #elif BEOS\r
+ #define MAC 0\r
+ #define PPC 0\r
+ #define WINDOWS 0\r
+ #define PC 0\r
+ #define SGI 0\r
+ #define SUN 0\r
+ #define LINUX 0\r
+ \r
+ #define NATIVE_INT64 0\r
+ #define IEEE754_64FLOAT 1\r
+ \r
+ #ifndef DEBUG\r
+ #define DEBUG 0\r
+ #if DEBUG\r
+ void DEBUGGERMESSAGE(char *string);\r
+ #else\r
+ #define DEBUGGERMESSAGE(a)\r
+ #endif\r
+ #endif\r
+\r
+ #elif SGI\r
+ #define MAC 0\r
+ #define PPC 0\r
+ #define WINDOWS 0\r
+ #define PC 0\r
+ #define SUN 0\r
+ #define LINUX 0\r
+ #define BEOS 0\r
+ \r
+ #define NATIVE_INT64 0\r
+ #define IEEE754_64FLOAT 1\r
+ \r
+ #ifndef DEBUG\r
+ #define DEBUG 0\r
+ #if DEBUG\r
+ void DEBUGGERMESSAGE(char *string);\r
+ #else\r
+ #define DEBUGGERMESSAGE(a)\r
+ #endif\r
+ #endif\r
+\r
+ #else // MAC\r
+\r
+ #define MAC 1\r
+ #define PPC 1\r
+ #define WINDOWS 0\r
+ #define PC 0\r
+ #define SGI 0\r
+ #define SUN 0\r
+ #define LINUX 0\r
+ #define BEOS 0\r
+\r
+ #define NATIVE_INT64 0\r
+ #define IEEE754_64FLOAT 1\r
+\r
+ #ifndef DEBUG\r
+ #define DEBUG 0\r
+ #if DEBUG\r
+ void DEBUGGERMESSAGE(char *string);\r
+ #else\r
+ #define DEBUGGERMESSAGE(a)\r
+ #endif\r
+ #endif\r
+ #endif\r
+\r
+#endif\r
--- /dev/null
+/*==========================================================================;
+ *
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ *
+ * File: dsound.h
+ * Content: DirectSound include file
+ *
+ **************************************************************************/
+
+#define COM_NO_WINDOWS_H
+#include <objbase.h>
+#include <float.h>
+
+#ifndef DIRECTSOUND_VERSION
+#define DIRECTSOUND_VERSION 0x0900 /* Version 9.0 */
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif // __cplusplus
+
+#ifndef __DSOUND_INCLUDED__
+#define __DSOUND_INCLUDED__
+
+/* Type definitions shared with Direct3D */
+
+#ifndef DX_SHARED_DEFINES
+
+typedef float D3DVALUE, *LPD3DVALUE;
+
+#ifndef D3DCOLOR_DEFINED
+typedef DWORD D3DCOLOR;
+#define D3DCOLOR_DEFINED
+#endif
+
+#ifndef LPD3DCOLOR_DEFINED
+typedef DWORD *LPD3DCOLOR;
+#define LPD3DCOLOR_DEFINED
+#endif
+
+#ifndef D3DVECTOR_DEFINED
+typedef struct _D3DVECTOR {
+ float x;
+ float y;
+ float z;
+} D3DVECTOR;
+#define D3DVECTOR_DEFINED
+#endif
+
+#ifndef LPD3DVECTOR_DEFINED
+typedef D3DVECTOR *LPD3DVECTOR;
+#define LPD3DVECTOR_DEFINED
+#endif
+
+#define DX_SHARED_DEFINES
+#endif // DX_SHARED_DEFINES
+
+#define _FACDS 0x878 /* DirectSound's facility code */
+#define MAKE_DSHRESULT(code) MAKE_HRESULT(1, _FACDS, code)
+
+// DirectSound Component GUID {47D4D946-62E8-11CF-93BC-444553540000}
+DEFINE_GUID(CLSID_DirectSound, 0x47d4d946, 0x62e8, 0x11cf, 0x93, 0xbc, 0x44, 0x45, 0x53, 0x54, 0x0, 0x0);
+
+// DirectSound 8.0 Component GUID {3901CC3F-84B5-4FA4-BA35-AA8172B8A09B}
+DEFINE_GUID(CLSID_DirectSound8, 0x3901cc3f, 0x84b5, 0x4fa4, 0xba, 0x35, 0xaa, 0x81, 0x72, 0xb8, 0xa0, 0x9b);
+
+// DirectSound Capture Component GUID {B0210780-89CD-11D0-AF08-00A0C925CD16}
+DEFINE_GUID(CLSID_DirectSoundCapture, 0xb0210780, 0x89cd, 0x11d0, 0xaf, 0x8, 0x0, 0xa0, 0xc9, 0x25, 0xcd, 0x16);
+
+// DirectSound 8.0 Capture Component GUID {E4BCAC13-7F99-4908-9A8E-74E3BF24B6E1}
+DEFINE_GUID(CLSID_DirectSoundCapture8, 0xe4bcac13, 0x7f99, 0x4908, 0x9a, 0x8e, 0x74, 0xe3, 0xbf, 0x24, 0xb6, 0xe1);
+
+// DirectSound Full Duplex Component GUID {FEA4300C-7959-4147-B26A-2377B9E7A91D}
+DEFINE_GUID(CLSID_DirectSoundFullDuplex, 0xfea4300c, 0x7959, 0x4147, 0xb2, 0x6a, 0x23, 0x77, 0xb9, 0xe7, 0xa9, 0x1d);
+
+
+// DirectSound default playback device GUID {DEF00000-9C6D-47ED-AAF1-4DDA8F2B5C03}
+DEFINE_GUID(DSDEVID_DefaultPlayback, 0xdef00000, 0x9c6d, 0x47ed, 0xaa, 0xf1, 0x4d, 0xda, 0x8f, 0x2b, 0x5c, 0x03);
+
+// DirectSound default capture device GUID {DEF00001-9C6D-47ED-AAF1-4DDA8F2B5C03}
+DEFINE_GUID(DSDEVID_DefaultCapture, 0xdef00001, 0x9c6d, 0x47ed, 0xaa, 0xf1, 0x4d, 0xda, 0x8f, 0x2b, 0x5c, 0x03);
+
+// DirectSound default device for voice playback {DEF00002-9C6D-47ED-AAF1-4DDA8F2B5C03}
+DEFINE_GUID(DSDEVID_DefaultVoicePlayback, 0xdef00002, 0x9c6d, 0x47ed, 0xaa, 0xf1, 0x4d, 0xda, 0x8f, 0x2b, 0x5c, 0x03);
+
+// DirectSound default device for voice capture {DEF00003-9C6D-47ED-AAF1-4DDA8F2B5C03}
+DEFINE_GUID(DSDEVID_DefaultVoiceCapture, 0xdef00003, 0x9c6d, 0x47ed, 0xaa, 0xf1, 0x4d, 0xda, 0x8f, 0x2b, 0x5c, 0x03);
+
+
+//
+// Forward declarations for interfaces.
+// 'struct' not 'class' per the way DECLARE_INTERFACE_ is defined
+//
+
+#ifdef __cplusplus
+struct IDirectSound;
+struct IDirectSoundBuffer;
+struct IDirectSound3DListener;
+struct IDirectSound3DBuffer;
+struct IDirectSoundCapture;
+struct IDirectSoundCaptureBuffer;
+struct IDirectSoundNotify;
+#endif // __cplusplus
+
+
+//
+// DirectSound 8.0 interfaces.
+//
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+#ifdef __cplusplus
+struct IDirectSound8;
+struct IDirectSoundBuffer8;
+struct IDirectSoundCaptureBuffer8;
+struct IDirectSoundFXGargle;
+struct IDirectSoundFXChorus;
+struct IDirectSoundFXFlanger;
+struct IDirectSoundFXEcho;
+struct IDirectSoundFXDistortion;
+struct IDirectSoundFXCompressor;
+struct IDirectSoundFXParamEq;
+struct IDirectSoundFXWavesReverb;
+struct IDirectSoundFXI3DL2Reverb;
+struct IDirectSoundCaptureFXAec;
+struct IDirectSoundCaptureFXNoiseSuppress;
+struct IDirectSoundFullDuplex;
+#endif // __cplusplus
+
+// IDirectSound8, IDirectSoundBuffer8 and IDirectSoundCaptureBuffer8 are the
+// only DirectSound 7.0 interfaces with changed functionality in version 8.0.
+// The other level 8 interfaces as equivalent to their level 7 counterparts:
+
+#define IDirectSoundCapture8 IDirectSoundCapture
+#define IDirectSound3DListener8 IDirectSound3DListener
+#define IDirectSound3DBuffer8 IDirectSound3DBuffer
+#define IDirectSoundNotify8 IDirectSoundNotify
+#define IDirectSoundFXGargle8 IDirectSoundFXGargle
+#define IDirectSoundFXChorus8 IDirectSoundFXChorus
+#define IDirectSoundFXFlanger8 IDirectSoundFXFlanger
+#define IDirectSoundFXEcho8 IDirectSoundFXEcho
+#define IDirectSoundFXDistortion8 IDirectSoundFXDistortion
+#define IDirectSoundFXCompressor8 IDirectSoundFXCompressor
+#define IDirectSoundFXParamEq8 IDirectSoundFXParamEq
+#define IDirectSoundFXWavesReverb8 IDirectSoundFXWavesReverb
+#define IDirectSoundFXI3DL2Reverb8 IDirectSoundFXI3DL2Reverb
+#define IDirectSoundCaptureFXAec8 IDirectSoundCaptureFXAec
+#define IDirectSoundCaptureFXNoiseSuppress8 IDirectSoundCaptureFXNoiseSuppress
+#define IDirectSoundFullDuplex8 IDirectSoundFullDuplex
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+typedef struct IDirectSound *LPDIRECTSOUND;
+typedef struct IDirectSoundBuffer *LPDIRECTSOUNDBUFFER;
+typedef struct IDirectSound3DListener *LPDIRECTSOUND3DLISTENER;
+typedef struct IDirectSound3DBuffer *LPDIRECTSOUND3DBUFFER;
+typedef struct IDirectSoundCapture *LPDIRECTSOUNDCAPTURE;
+typedef struct IDirectSoundCaptureBuffer *LPDIRECTSOUNDCAPTUREBUFFER;
+typedef struct IDirectSoundNotify *LPDIRECTSOUNDNOTIFY;
+
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+typedef struct IDirectSoundFXGargle *LPDIRECTSOUNDFXGARGLE;
+typedef struct IDirectSoundFXChorus *LPDIRECTSOUNDFXCHORUS;
+typedef struct IDirectSoundFXFlanger *LPDIRECTSOUNDFXFLANGER;
+typedef struct IDirectSoundFXEcho *LPDIRECTSOUNDFXECHO;
+typedef struct IDirectSoundFXDistortion *LPDIRECTSOUNDFXDISTORTION;
+typedef struct IDirectSoundFXCompressor *LPDIRECTSOUNDFXCOMPRESSOR;
+typedef struct IDirectSoundFXParamEq *LPDIRECTSOUNDFXPARAMEQ;
+typedef struct IDirectSoundFXWavesReverb *LPDIRECTSOUNDFXWAVESREVERB;
+typedef struct IDirectSoundFXI3DL2Reverb *LPDIRECTSOUNDFXI3DL2REVERB;
+typedef struct IDirectSoundCaptureFXAec *LPDIRECTSOUNDCAPTUREFXAEC;
+typedef struct IDirectSoundCaptureFXNoiseSuppress *LPDIRECTSOUNDCAPTUREFXNOISESUPPRESS;
+typedef struct IDirectSoundFullDuplex *LPDIRECTSOUNDFULLDUPLEX;
+
+typedef struct IDirectSound8 *LPDIRECTSOUND8;
+typedef struct IDirectSoundBuffer8 *LPDIRECTSOUNDBUFFER8;
+typedef struct IDirectSound3DListener8 *LPDIRECTSOUND3DLISTENER8;
+typedef struct IDirectSound3DBuffer8 *LPDIRECTSOUND3DBUFFER8;
+typedef struct IDirectSoundCapture8 *LPDIRECTSOUNDCAPTURE8;
+typedef struct IDirectSoundCaptureBuffer8 *LPDIRECTSOUNDCAPTUREBUFFER8;
+typedef struct IDirectSoundNotify8 *LPDIRECTSOUNDNOTIFY8;
+typedef struct IDirectSoundFXGargle8 *LPDIRECTSOUNDFXGARGLE8;
+typedef struct IDirectSoundFXChorus8 *LPDIRECTSOUNDFXCHORUS8;
+typedef struct IDirectSoundFXFlanger8 *LPDIRECTSOUNDFXFLANGER8;
+typedef struct IDirectSoundFXEcho8 *LPDIRECTSOUNDFXECHO8;
+typedef struct IDirectSoundFXDistortion8 *LPDIRECTSOUNDFXDISTORTION8;
+typedef struct IDirectSoundFXCompressor8 *LPDIRECTSOUNDFXCOMPRESSOR8;
+typedef struct IDirectSoundFXParamEq8 *LPDIRECTSOUNDFXPARAMEQ8;
+typedef struct IDirectSoundFXWavesReverb8 *LPDIRECTSOUNDFXWAVESREVERB8;
+typedef struct IDirectSoundFXI3DL2Reverb8 *LPDIRECTSOUNDFXI3DL2REVERB8;
+typedef struct IDirectSoundCaptureFXAec8 *LPDIRECTSOUNDCAPTUREFXAEC8;
+typedef struct IDirectSoundCaptureFXNoiseSuppress8 *LPDIRECTSOUNDCAPTUREFXNOISESUPPRESS8;
+typedef struct IDirectSoundFullDuplex8 *LPDIRECTSOUNDFULLDUPLEX8;
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IID definitions for the unchanged DirectSound 8.0 interfaces
+//
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+#define IID_IDirectSoundCapture8 IID_IDirectSoundCapture
+#define IID_IDirectSound3DListener8 IID_IDirectSound3DListener
+#define IID_IDirectSound3DBuffer8 IID_IDirectSound3DBuffer
+#define IID_IDirectSoundNotify8 IID_IDirectSoundNotify
+#define IID_IDirectSoundFXGargle8 IID_IDirectSoundFXGargle
+#define IID_IDirectSoundFXChorus8 IID_IDirectSoundFXChorus
+#define IID_IDirectSoundFXFlanger8 IID_IDirectSoundFXFlanger
+#define IID_IDirectSoundFXEcho8 IID_IDirectSoundFXEcho
+#define IID_IDirectSoundFXDistortion8 IID_IDirectSoundFXDistortion
+#define IID_IDirectSoundFXCompressor8 IID_IDirectSoundFXCompressor
+#define IID_IDirectSoundFXParamEq8 IID_IDirectSoundFXParamEq
+#define IID_IDirectSoundFXWavesReverb8 IID_IDirectSoundFXWavesReverb
+#define IID_IDirectSoundFXI3DL2Reverb8 IID_IDirectSoundFXI3DL2Reverb
+#define IID_IDirectSoundCaptureFXAec8 IID_IDirectSoundCaptureFXAec
+#define IID_IDirectSoundCaptureFXNoiseSuppress8 IID_IDirectSoundCaptureFXNoiseSuppress
+#define IID_IDirectSoundFullDuplex8 IID_IDirectSoundFullDuplex
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// Compatibility typedefs
+//
+
+#ifndef _LPCWAVEFORMATEX_DEFINED
+#define _LPCWAVEFORMATEX_DEFINED
+typedef const WAVEFORMATEX *LPCWAVEFORMATEX;
+#endif // _LPCWAVEFORMATEX_DEFINED
+
+#ifndef __LPCGUID_DEFINED__
+#define __LPCGUID_DEFINED__
+typedef const GUID *LPCGUID;
+#endif // __LPCGUID_DEFINED__
+
+typedef LPDIRECTSOUND *LPLPDIRECTSOUND;
+typedef LPDIRECTSOUNDBUFFER *LPLPDIRECTSOUNDBUFFER;
+typedef LPDIRECTSOUND3DLISTENER *LPLPDIRECTSOUND3DLISTENER;
+typedef LPDIRECTSOUND3DBUFFER *LPLPDIRECTSOUND3DBUFFER;
+typedef LPDIRECTSOUNDCAPTURE *LPLPDIRECTSOUNDCAPTURE;
+typedef LPDIRECTSOUNDCAPTUREBUFFER *LPLPDIRECTSOUNDCAPTUREBUFFER;
+typedef LPDIRECTSOUNDNOTIFY *LPLPDIRECTSOUNDNOTIFY;
+
+#if DIRECTSOUND_VERSION >= 0x0800
+typedef LPDIRECTSOUND8 *LPLPDIRECTSOUND8;
+typedef LPDIRECTSOUNDBUFFER8 *LPLPDIRECTSOUNDBUFFER8;
+typedef LPDIRECTSOUNDCAPTURE8 *LPLPDIRECTSOUNDCAPTURE8;
+typedef LPDIRECTSOUNDCAPTUREBUFFER8 *LPLPDIRECTSOUNDCAPTUREBUFFER8;
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// Structures
+//
+
+typedef struct _DSCAPS
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwMinSecondarySampleRate;
+ DWORD dwMaxSecondarySampleRate;
+ DWORD dwPrimaryBuffers;
+ DWORD dwMaxHwMixingAllBuffers;
+ DWORD dwMaxHwMixingStaticBuffers;
+ DWORD dwMaxHwMixingStreamingBuffers;
+ DWORD dwFreeHwMixingAllBuffers;
+ DWORD dwFreeHwMixingStaticBuffers;
+ DWORD dwFreeHwMixingStreamingBuffers;
+ DWORD dwMaxHw3DAllBuffers;
+ DWORD dwMaxHw3DStaticBuffers;
+ DWORD dwMaxHw3DStreamingBuffers;
+ DWORD dwFreeHw3DAllBuffers;
+ DWORD dwFreeHw3DStaticBuffers;
+ DWORD dwFreeHw3DStreamingBuffers;
+ DWORD dwTotalHwMemBytes;
+ DWORD dwFreeHwMemBytes;
+ DWORD dwMaxContigFreeHwMemBytes;
+ DWORD dwUnlockTransferRateHwBuffers;
+ DWORD dwPlayCpuOverheadSwBuffers;
+ DWORD dwReserved1;
+ DWORD dwReserved2;
+} DSCAPS, *LPDSCAPS;
+
+typedef const DSCAPS *LPCDSCAPS;
+
+typedef struct _DSBCAPS
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwBufferBytes;
+ DWORD dwUnlockTransferRate;
+ DWORD dwPlayCpuOverhead;
+} DSBCAPS, *LPDSBCAPS;
+
+typedef const DSBCAPS *LPCDSBCAPS;
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+ typedef struct _DSEFFECTDESC
+ {
+ DWORD dwSize;
+ DWORD dwFlags;
+ GUID guidDSFXClass;
+ DWORD_PTR dwReserved1;
+ DWORD_PTR dwReserved2;
+ } DSEFFECTDESC, *LPDSEFFECTDESC;
+ typedef const DSEFFECTDESC *LPCDSEFFECTDESC;
+
+ #define DSFX_LOCHARDWARE 0x00000001
+ #define DSFX_LOCSOFTWARE 0x00000002
+
+ enum
+ {
+ DSFXR_PRESENT, // 0
+ DSFXR_LOCHARDWARE, // 1
+ DSFXR_LOCSOFTWARE, // 2
+ DSFXR_UNALLOCATED, // 3
+ DSFXR_FAILED, // 4
+ DSFXR_UNKNOWN, // 5
+ DSFXR_SENDLOOP // 6
+ };
+
+ typedef struct _DSCEFFECTDESC
+ {
+ DWORD dwSize;
+ DWORD dwFlags;
+ GUID guidDSCFXClass;
+ GUID guidDSCFXInstance;
+ DWORD dwReserved1;
+ DWORD dwReserved2;
+ } DSCEFFECTDESC, *LPDSCEFFECTDESC;
+ typedef const DSCEFFECTDESC *LPCDSCEFFECTDESC;
+
+ #define DSCFX_LOCHARDWARE 0x00000001
+ #define DSCFX_LOCSOFTWARE 0x00000002
+
+ #define DSCFXR_LOCHARDWARE 0x00000010
+ #define DSCFXR_LOCSOFTWARE 0x00000020
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+typedef struct _DSBUFFERDESC
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwBufferBytes;
+ DWORD dwReserved;
+ LPWAVEFORMATEX lpwfxFormat;
+#if DIRECTSOUND_VERSION >= 0x0700
+ GUID guid3DAlgorithm;
+#endif
+} DSBUFFERDESC, *LPDSBUFFERDESC;
+
+typedef const DSBUFFERDESC *LPCDSBUFFERDESC;
+
+// Older version of this structure:
+
+typedef struct _DSBUFFERDESC1
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwBufferBytes;
+ DWORD dwReserved;
+ LPWAVEFORMATEX lpwfxFormat;
+} DSBUFFERDESC1, *LPDSBUFFERDESC1;
+
+typedef const DSBUFFERDESC1 *LPCDSBUFFERDESC1;
+
+typedef struct _DS3DBUFFER
+{
+ DWORD dwSize;
+ D3DVECTOR vPosition;
+ D3DVECTOR vVelocity;
+ DWORD dwInsideConeAngle;
+ DWORD dwOutsideConeAngle;
+ D3DVECTOR vConeOrientation;
+ LONG lConeOutsideVolume;
+ D3DVALUE flMinDistance;
+ D3DVALUE flMaxDistance;
+ DWORD dwMode;
+} DS3DBUFFER, *LPDS3DBUFFER;
+
+typedef const DS3DBUFFER *LPCDS3DBUFFER;
+
+typedef struct _DS3DLISTENER
+{
+ DWORD dwSize;
+ D3DVECTOR vPosition;
+ D3DVECTOR vVelocity;
+ D3DVECTOR vOrientFront;
+ D3DVECTOR vOrientTop;
+ D3DVALUE flDistanceFactor;
+ D3DVALUE flRolloffFactor;
+ D3DVALUE flDopplerFactor;
+} DS3DLISTENER, *LPDS3DLISTENER;
+
+typedef const DS3DLISTENER *LPCDS3DLISTENER;
+
+typedef struct _DSCCAPS
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwFormats;
+ DWORD dwChannels;
+} DSCCAPS, *LPDSCCAPS;
+
+typedef const DSCCAPS *LPCDSCCAPS;
+
+typedef struct _DSCBUFFERDESC1
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwBufferBytes;
+ DWORD dwReserved;
+ LPWAVEFORMATEX lpwfxFormat;
+} DSCBUFFERDESC1, *LPDSCBUFFERDESC1;
+
+typedef struct _DSCBUFFERDESC
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwBufferBytes;
+ DWORD dwReserved;
+ LPWAVEFORMATEX lpwfxFormat;
+#if DIRECTSOUND_VERSION >= 0x0800
+ DWORD dwFXCount;
+ LPDSCEFFECTDESC lpDSCFXDesc;
+#endif
+} DSCBUFFERDESC, *LPDSCBUFFERDESC;
+
+typedef const DSCBUFFERDESC *LPCDSCBUFFERDESC;
+
+typedef struct _DSCBCAPS
+{
+ DWORD dwSize;
+ DWORD dwFlags;
+ DWORD dwBufferBytes;
+ DWORD dwReserved;
+} DSCBCAPS, *LPDSCBCAPS;
+
+typedef const DSCBCAPS *LPCDSCBCAPS;
+
+typedef struct _DSBPOSITIONNOTIFY
+{
+ DWORD dwOffset;
+ HANDLE hEventNotify;
+} DSBPOSITIONNOTIFY, *LPDSBPOSITIONNOTIFY;
+
+typedef const DSBPOSITIONNOTIFY *LPCDSBPOSITIONNOTIFY;
+
+//
+// DirectSound API
+//
+
+typedef BOOL (CALLBACK *LPDSENUMCALLBACKA)(LPGUID, LPCSTR, LPCSTR, LPVOID);
+typedef BOOL (CALLBACK *LPDSENUMCALLBACKW)(LPGUID, LPCWSTR, LPCWSTR, LPVOID);
+
+extern HRESULT WINAPI DirectSoundCreate(LPCGUID pcGuidDevice, LPDIRECTSOUND *ppDS, LPUNKNOWN pUnkOuter);
+extern HRESULT WINAPI DirectSoundEnumerateA(LPDSENUMCALLBACKA pDSEnumCallback, LPVOID pContext);
+extern HRESULT WINAPI DirectSoundEnumerateW(LPDSENUMCALLBACKW pDSEnumCallback, LPVOID pContext);
+
+extern HRESULT WINAPI DirectSoundCaptureCreate(LPCGUID pcGuidDevice, LPDIRECTSOUNDCAPTURE *ppDSC, LPUNKNOWN pUnkOuter);
+extern HRESULT WINAPI DirectSoundCaptureEnumerateA(LPDSENUMCALLBACKA pDSEnumCallback, LPVOID pContext);
+extern HRESULT WINAPI DirectSoundCaptureEnumerateW(LPDSENUMCALLBACKW pDSEnumCallback, LPVOID pContext);
+
+#if DIRECTSOUND_VERSION >= 0x0800
+extern HRESULT WINAPI DirectSoundCreate8(LPCGUID pcGuidDevice, LPDIRECTSOUND8 *ppDS8, LPUNKNOWN pUnkOuter);
+extern HRESULT WINAPI DirectSoundCaptureCreate8(LPCGUID pcGuidDevice, LPDIRECTSOUNDCAPTURE8 *ppDSC8, LPUNKNOWN pUnkOuter);
+extern HRESULT WINAPI DirectSoundFullDuplexCreate(LPCGUID pcGuidCaptureDevice, LPCGUID pcGuidRenderDevice,
+ LPCDSCBUFFERDESC pcDSCBufferDesc, LPCDSBUFFERDESC pcDSBufferDesc, HWND hWnd,
+ DWORD dwLevel, LPDIRECTSOUNDFULLDUPLEX* ppDSFD, LPDIRECTSOUNDCAPTUREBUFFER8 *ppDSCBuffer8,
+ LPDIRECTSOUNDBUFFER8 *ppDSBuffer8, LPUNKNOWN pUnkOuter);
+#define DirectSoundFullDuplexCreate8 DirectSoundFullDuplexCreate
+
+extern HRESULT WINAPI GetDeviceID(LPCGUID pGuidSrc, LPGUID pGuidDest);
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+#ifdef UNICODE
+#define LPDSENUMCALLBACK LPDSENUMCALLBACKW
+#define DirectSoundEnumerate DirectSoundEnumerateW
+#define DirectSoundCaptureEnumerate DirectSoundCaptureEnumerateW
+#else // UNICODE
+#define LPDSENUMCALLBACK LPDSENUMCALLBACKA
+#define DirectSoundEnumerate DirectSoundEnumerateA
+#define DirectSoundCaptureEnumerate DirectSoundCaptureEnumerateA
+#endif // UNICODE
+
+//
+// IUnknown
+//
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#ifndef IUnknown_QueryInterface
+#define IUnknown_QueryInterface(p,a,b) (p)->lpVtbl->QueryInterface(p,a,b)
+#endif // IUnknown_QueryInterface
+#ifndef IUnknown_AddRef
+#define IUnknown_AddRef(p) (p)->lpVtbl->AddRef(p)
+#endif // IUnknown_AddRef
+#ifndef IUnknown_Release
+#define IUnknown_Release(p) (p)->lpVtbl->Release(p)
+#endif // IUnknown_Release
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#ifndef IUnknown_QueryInterface
+#define IUnknown_QueryInterface(p,a,b) (p)->QueryInterface(a,b)
+#endif // IUnknown_QueryInterface
+#ifndef IUnknown_AddRef
+#define IUnknown_AddRef(p) (p)->AddRef()
+#endif // IUnknown_AddRef
+#ifndef IUnknown_Release
+#define IUnknown_Release(p) (p)->Release()
+#endif // IUnknown_Release
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#ifndef __IReferenceClock_INTERFACE_DEFINED__
+#define __IReferenceClock_INTERFACE_DEFINED__
+
+typedef LONGLONG REFERENCE_TIME;
+typedef REFERENCE_TIME *LPREFERENCE_TIME;
+
+DEFINE_GUID(IID_IReferenceClock, 0x56a86897, 0x0ad4, 0x11ce, 0xb0, 0x3a, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70);
+
+#undef INTERFACE
+#define INTERFACE IReferenceClock
+
+DECLARE_INTERFACE_(IReferenceClock, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IReferenceClock methods
+ STDMETHOD(GetTime) (THIS_ REFERENCE_TIME *pTime) PURE;
+ STDMETHOD(AdviseTime) (THIS_ REFERENCE_TIME rtBaseTime, REFERENCE_TIME rtStreamTime,
+ HANDLE hEvent, LPDWORD pdwAdviseCookie) PURE;
+ STDMETHOD(AdvisePeriodic) (THIS_ REFERENCE_TIME rtStartTime, REFERENCE_TIME rtPeriodTime,
+ HANDLE hSemaphore, LPDWORD pdwAdviseCookie) PURE;
+ STDMETHOD(Unadvise) (THIS_ DWORD dwAdviseCookie) PURE;
+};
+
+#endif // __IReferenceClock_INTERFACE_DEFINED__
+
+#ifndef IReferenceClock_QueryInterface
+
+#define IReferenceClock_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IReferenceClock_AddRef(p) IUnknown_AddRef(p)
+#define IReferenceClock_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IReferenceClock_GetTime(p,a) (p)->lpVtbl->GetTime(p,a)
+#define IReferenceClock_AdviseTime(p,a,b,c,d) (p)->lpVtbl->AdviseTime(p,a,b,c,d)
+#define IReferenceClock_AdvisePeriodic(p,a,b,c,d) (p)->lpVtbl->AdvisePeriodic(p,a,b,c,d)
+#define IReferenceClock_Unadvise(p,a) (p)->lpVtbl->Unadvise(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IReferenceClock_GetTime(p,a) (p)->GetTime(a)
+#define IReferenceClock_AdviseTime(p,a,b,c,d) (p)->AdviseTime(a,b,c,d)
+#define IReferenceClock_AdvisePeriodic(p,a,b,c,d) (p)->AdvisePeriodic(a,b,c,d)
+#define IReferenceClock_Unadvise(p,a) (p)->Unadvise(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#endif // IReferenceClock_QueryInterface
+
+//
+// IDirectSound
+//
+
+DEFINE_GUID(IID_IDirectSound, 0x279AFA83, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60);
+
+#undef INTERFACE
+#define INTERFACE IDirectSound
+
+DECLARE_INTERFACE_(IDirectSound, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSound methods
+ STDMETHOD(CreateSoundBuffer) (THIS_ LPCDSBUFFERDESC pcDSBufferDesc, LPDIRECTSOUNDBUFFER *ppDSBuffer, LPUNKNOWN pUnkOuter) PURE;
+ STDMETHOD(GetCaps) (THIS_ LPDSCAPS pDSCaps) PURE;
+ STDMETHOD(DuplicateSoundBuffer) (THIS_ LPDIRECTSOUNDBUFFER pDSBufferOriginal, LPDIRECTSOUNDBUFFER *ppDSBufferDuplicate) PURE;
+ STDMETHOD(SetCooperativeLevel) (THIS_ HWND hwnd, DWORD dwLevel) PURE;
+ STDMETHOD(Compact) (THIS) PURE;
+ STDMETHOD(GetSpeakerConfig) (THIS_ LPDWORD pdwSpeakerConfig) PURE;
+ STDMETHOD(SetSpeakerConfig) (THIS_ DWORD dwSpeakerConfig) PURE;
+ STDMETHOD(Initialize) (THIS_ LPCGUID pcGuidDevice) PURE;
+};
+
+#define IDirectSound_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSound_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSound_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound_CreateSoundBuffer(p,a,b,c) (p)->lpVtbl->CreateSoundBuffer(p,a,b,c)
+#define IDirectSound_GetCaps(p,a) (p)->lpVtbl->GetCaps(p,a)
+#define IDirectSound_DuplicateSoundBuffer(p,a,b) (p)->lpVtbl->DuplicateSoundBuffer(p,a,b)
+#define IDirectSound_SetCooperativeLevel(p,a,b) (p)->lpVtbl->SetCooperativeLevel(p,a,b)
+#define IDirectSound_Compact(p) (p)->lpVtbl->Compact(p)
+#define IDirectSound_GetSpeakerConfig(p,a) (p)->lpVtbl->GetSpeakerConfig(p,a)
+#define IDirectSound_SetSpeakerConfig(p,b) (p)->lpVtbl->SetSpeakerConfig(p,b)
+#define IDirectSound_Initialize(p,a) (p)->lpVtbl->Initialize(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound_CreateSoundBuffer(p,a,b,c) (p)->CreateSoundBuffer(a,b,c)
+#define IDirectSound_GetCaps(p,a) (p)->GetCaps(a)
+#define IDirectSound_DuplicateSoundBuffer(p,a,b) (p)->DuplicateSoundBuffer(a,b)
+#define IDirectSound_SetCooperativeLevel(p,a,b) (p)->SetCooperativeLevel(a,b)
+#define IDirectSound_Compact(p) (p)->Compact()
+#define IDirectSound_GetSpeakerConfig(p,a) (p)->GetSpeakerConfig(a)
+#define IDirectSound_SetSpeakerConfig(p,b) (p)->SetSpeakerConfig(b)
+#define IDirectSound_Initialize(p,a) (p)->Initialize(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSound8
+//
+
+DEFINE_GUID(IID_IDirectSound8, 0xC50A7E93, 0xF395, 0x4834, 0x9E, 0xF6, 0x7F, 0xA9, 0x9D, 0xE5, 0x09, 0x66);
+
+#undef INTERFACE
+#define INTERFACE IDirectSound8
+
+DECLARE_INTERFACE_(IDirectSound8, IDirectSound)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSound methods
+ STDMETHOD(CreateSoundBuffer) (THIS_ LPCDSBUFFERDESC pcDSBufferDesc, LPDIRECTSOUNDBUFFER *ppDSBuffer, LPUNKNOWN pUnkOuter) PURE;
+ STDMETHOD(GetCaps) (THIS_ LPDSCAPS pDSCaps) PURE;
+ STDMETHOD(DuplicateSoundBuffer) (THIS_ LPDIRECTSOUNDBUFFER pDSBufferOriginal, LPDIRECTSOUNDBUFFER *ppDSBufferDuplicate) PURE;
+ STDMETHOD(SetCooperativeLevel) (THIS_ HWND hwnd, DWORD dwLevel) PURE;
+ STDMETHOD(Compact) (THIS) PURE;
+ STDMETHOD(GetSpeakerConfig) (THIS_ LPDWORD pdwSpeakerConfig) PURE;
+ STDMETHOD(SetSpeakerConfig) (THIS_ DWORD dwSpeakerConfig) PURE;
+ STDMETHOD(Initialize) (THIS_ LPCGUID pcGuidDevice) PURE;
+
+ // IDirectSound8 methods
+ STDMETHOD(VerifyCertification) (THIS_ LPDWORD pdwCertified) PURE;
+};
+
+#define IDirectSound8_QueryInterface(p,a,b) IDirectSound_QueryInterface(p,a,b)
+#define IDirectSound8_AddRef(p) IDirectSound_AddRef(p)
+#define IDirectSound8_Release(p) IDirectSound_Release(p)
+#define IDirectSound8_CreateSoundBuffer(p,a,b,c) IDirectSound_CreateSoundBuffer(p,a,b,c)
+#define IDirectSound8_GetCaps(p,a) IDirectSound_GetCaps(p,a)
+#define IDirectSound8_DuplicateSoundBuffer(p,a,b) IDirectSound_DuplicateSoundBuffer(p,a,b)
+#define IDirectSound8_SetCooperativeLevel(p,a,b) IDirectSound_SetCooperativeLevel(p,a,b)
+#define IDirectSound8_Compact(p) IDirectSound_Compact(p)
+#define IDirectSound8_GetSpeakerConfig(p,a) IDirectSound_GetSpeakerConfig(p,a)
+#define IDirectSound8_SetSpeakerConfig(p,a) IDirectSound_SetSpeakerConfig(p,a)
+#define IDirectSound8_Initialize(p,a) IDirectSound_Initialize(p,a)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound8_VerifyCertification(p,a) (p)->lpVtbl->VerifyCertification(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound8_VerifyCertification(p,a) (p)->VerifyCertification(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSoundBuffer
+//
+
+DEFINE_GUID(IID_IDirectSoundBuffer, 0x279AFA85, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundBuffer
+
+DECLARE_INTERFACE_(IDirectSoundBuffer, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundBuffer methods
+ STDMETHOD(GetCaps) (THIS_ LPDSBCAPS pDSBufferCaps) PURE;
+ STDMETHOD(GetCurrentPosition) (THIS_ LPDWORD pdwCurrentPlayCursor, LPDWORD pdwCurrentWriteCursor) PURE;
+ STDMETHOD(GetFormat) (THIS_ LPWAVEFORMATEX pwfxFormat, DWORD dwSizeAllocated, LPDWORD pdwSizeWritten) PURE;
+ STDMETHOD(GetVolume) (THIS_ LPLONG plVolume) PURE;
+ STDMETHOD(GetPan) (THIS_ LPLONG plPan) PURE;
+ STDMETHOD(GetFrequency) (THIS_ LPDWORD pdwFrequency) PURE;
+ STDMETHOD(GetStatus) (THIS_ LPDWORD pdwStatus) PURE;
+ STDMETHOD(Initialize) (THIS_ LPDIRECTSOUND pDirectSound, LPCDSBUFFERDESC pcDSBufferDesc) PURE;
+ STDMETHOD(Lock) (THIS_ DWORD dwOffset, DWORD dwBytes, LPVOID *ppvAudioPtr1, LPDWORD pdwAudioBytes1,
+ LPVOID *ppvAudioPtr2, LPDWORD pdwAudioBytes2, DWORD dwFlags) PURE;
+ STDMETHOD(Play) (THIS_ DWORD dwReserved1, DWORD dwPriority, DWORD dwFlags) PURE;
+ STDMETHOD(SetCurrentPosition) (THIS_ DWORD dwNewPosition) PURE;
+ STDMETHOD(SetFormat) (THIS_ LPCWAVEFORMATEX pcfxFormat) PURE;
+ STDMETHOD(SetVolume) (THIS_ LONG lVolume) PURE;
+ STDMETHOD(SetPan) (THIS_ LONG lPan) PURE;
+ STDMETHOD(SetFrequency) (THIS_ DWORD dwFrequency) PURE;
+ STDMETHOD(Stop) (THIS) PURE;
+ STDMETHOD(Unlock) (THIS_ LPVOID pvAudioPtr1, DWORD dwAudioBytes1, LPVOID pvAudioPtr2, DWORD dwAudioBytes2) PURE;
+ STDMETHOD(Restore) (THIS) PURE;
+};
+
+#define IDirectSoundBuffer_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundBuffer_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundBuffer_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundBuffer_GetCaps(p,a) (p)->lpVtbl->GetCaps(p,a)
+#define IDirectSoundBuffer_GetCurrentPosition(p,a,b) (p)->lpVtbl->GetCurrentPosition(p,a,b)
+#define IDirectSoundBuffer_GetFormat(p,a,b,c) (p)->lpVtbl->GetFormat(p,a,b,c)
+#define IDirectSoundBuffer_GetVolume(p,a) (p)->lpVtbl->GetVolume(p,a)
+#define IDirectSoundBuffer_GetPan(p,a) (p)->lpVtbl->GetPan(p,a)
+#define IDirectSoundBuffer_GetFrequency(p,a) (p)->lpVtbl->GetFrequency(p,a)
+#define IDirectSoundBuffer_GetStatus(p,a) (p)->lpVtbl->GetStatus(p,a)
+#define IDirectSoundBuffer_Initialize(p,a,b) (p)->lpVtbl->Initialize(p,a,b)
+#define IDirectSoundBuffer_Lock(p,a,b,c,d,e,f,g) (p)->lpVtbl->Lock(p,a,b,c,d,e,f,g)
+#define IDirectSoundBuffer_Play(p,a,b,c) (p)->lpVtbl->Play(p,a,b,c)
+#define IDirectSoundBuffer_SetCurrentPosition(p,a) (p)->lpVtbl->SetCurrentPosition(p,a)
+#define IDirectSoundBuffer_SetFormat(p,a) (p)->lpVtbl->SetFormat(p,a)
+#define IDirectSoundBuffer_SetVolume(p,a) (p)->lpVtbl->SetVolume(p,a)
+#define IDirectSoundBuffer_SetPan(p,a) (p)->lpVtbl->SetPan(p,a)
+#define IDirectSoundBuffer_SetFrequency(p,a) (p)->lpVtbl->SetFrequency(p,a)
+#define IDirectSoundBuffer_Stop(p) (p)->lpVtbl->Stop(p)
+#define IDirectSoundBuffer_Unlock(p,a,b,c,d) (p)->lpVtbl->Unlock(p,a,b,c,d)
+#define IDirectSoundBuffer_Restore(p) (p)->lpVtbl->Restore(p)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundBuffer_GetCaps(p,a) (p)->GetCaps(a)
+#define IDirectSoundBuffer_GetCurrentPosition(p,a,b) (p)->GetCurrentPosition(a,b)
+#define IDirectSoundBuffer_GetFormat(p,a,b,c) (p)->GetFormat(a,b,c)
+#define IDirectSoundBuffer_GetVolume(p,a) (p)->GetVolume(a)
+#define IDirectSoundBuffer_GetPan(p,a) (p)->GetPan(a)
+#define IDirectSoundBuffer_GetFrequency(p,a) (p)->GetFrequency(a)
+#define IDirectSoundBuffer_GetStatus(p,a) (p)->GetStatus(a)
+#define IDirectSoundBuffer_Initialize(p,a,b) (p)->Initialize(a,b)
+#define IDirectSoundBuffer_Lock(p,a,b,c,d,e,f,g) (p)->Lock(a,b,c,d,e,f,g)
+#define IDirectSoundBuffer_Play(p,a,b,c) (p)->Play(a,b,c)
+#define IDirectSoundBuffer_SetCurrentPosition(p,a) (p)->SetCurrentPosition(a)
+#define IDirectSoundBuffer_SetFormat(p,a) (p)->SetFormat(a)
+#define IDirectSoundBuffer_SetVolume(p,a) (p)->SetVolume(a)
+#define IDirectSoundBuffer_SetPan(p,a) (p)->SetPan(a)
+#define IDirectSoundBuffer_SetFrequency(p,a) (p)->SetFrequency(a)
+#define IDirectSoundBuffer_Stop(p) (p)->Stop()
+#define IDirectSoundBuffer_Unlock(p,a,b,c,d) (p)->Unlock(a,b,c,d)
+#define IDirectSoundBuffer_Restore(p) (p)->Restore()
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSoundBuffer8
+//
+
+DEFINE_GUID(IID_IDirectSoundBuffer8, 0x6825a449, 0x7524, 0x4d82, 0x92, 0x0f, 0x50, 0xe3, 0x6a, 0xb3, 0xab, 0x1e);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundBuffer8
+
+DECLARE_INTERFACE_(IDirectSoundBuffer8, IDirectSoundBuffer)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundBuffer methods
+ STDMETHOD(GetCaps) (THIS_ LPDSBCAPS pDSBufferCaps) PURE;
+ STDMETHOD(GetCurrentPosition) (THIS_ LPDWORD pdwCurrentPlayCursor, LPDWORD pdwCurrentWriteCursor) PURE;
+ STDMETHOD(GetFormat) (THIS_ LPWAVEFORMATEX pwfxFormat, DWORD dwSizeAllocated, LPDWORD pdwSizeWritten) PURE;
+ STDMETHOD(GetVolume) (THIS_ LPLONG plVolume) PURE;
+ STDMETHOD(GetPan) (THIS_ LPLONG plPan) PURE;
+ STDMETHOD(GetFrequency) (THIS_ LPDWORD pdwFrequency) PURE;
+ STDMETHOD(GetStatus) (THIS_ LPDWORD pdwStatus) PURE;
+ STDMETHOD(Initialize) (THIS_ LPDIRECTSOUND pDirectSound, LPCDSBUFFERDESC pcDSBufferDesc) PURE;
+ STDMETHOD(Lock) (THIS_ DWORD dwOffset, DWORD dwBytes, LPVOID *ppvAudioPtr1, LPDWORD pdwAudioBytes1,
+ LPVOID *ppvAudioPtr2, LPDWORD pdwAudioBytes2, DWORD dwFlags) PURE;
+ STDMETHOD(Play) (THIS_ DWORD dwReserved1, DWORD dwPriority, DWORD dwFlags) PURE;
+ STDMETHOD(SetCurrentPosition) (THIS_ DWORD dwNewPosition) PURE;
+ STDMETHOD(SetFormat) (THIS_ LPCWAVEFORMATEX pcfxFormat) PURE;
+ STDMETHOD(SetVolume) (THIS_ LONG lVolume) PURE;
+ STDMETHOD(SetPan) (THIS_ LONG lPan) PURE;
+ STDMETHOD(SetFrequency) (THIS_ DWORD dwFrequency) PURE;
+ STDMETHOD(Stop) (THIS) PURE;
+ STDMETHOD(Unlock) (THIS_ LPVOID pvAudioPtr1, DWORD dwAudioBytes1, LPVOID pvAudioPtr2, DWORD dwAudioBytes2) PURE;
+ STDMETHOD(Restore) (THIS) PURE;
+
+ // IDirectSoundBuffer8 methods
+ STDMETHOD(SetFX) (THIS_ DWORD dwEffectsCount, LPDSEFFECTDESC pDSFXDesc, LPDWORD pdwResultCodes) PURE;
+ STDMETHOD(AcquireResources) (THIS_ DWORD dwFlags, DWORD dwEffectsCount, LPDWORD pdwResultCodes) PURE;
+ STDMETHOD(GetObjectInPath) (THIS_ REFGUID rguidObject, DWORD dwIndex, REFGUID rguidInterface, LPVOID *ppObject) PURE;
+};
+
+// Special GUID meaning "select all objects" for use in GetObjectInPath()
+DEFINE_GUID(GUID_All_Objects, 0xaa114de5, 0xc262, 0x4169, 0xa1, 0xc8, 0x23, 0xd6, 0x98, 0xcc, 0x73, 0xb5);
+
+#define IDirectSoundBuffer8_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundBuffer8_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundBuffer8_Release(p) IUnknown_Release(p)
+
+#define IDirectSoundBuffer8_GetCaps(p,a) IDirectSoundBuffer_GetCaps(p,a)
+#define IDirectSoundBuffer8_GetCurrentPosition(p,a,b) IDirectSoundBuffer_GetCurrentPosition(p,a,b)
+#define IDirectSoundBuffer8_GetFormat(p,a,b,c) IDirectSoundBuffer_GetFormat(p,a,b,c)
+#define IDirectSoundBuffer8_GetVolume(p,a) IDirectSoundBuffer_GetVolume(p,a)
+#define IDirectSoundBuffer8_GetPan(p,a) IDirectSoundBuffer_GetPan(p,a)
+#define IDirectSoundBuffer8_GetFrequency(p,a) IDirectSoundBuffer_GetFrequency(p,a)
+#define IDirectSoundBuffer8_GetStatus(p,a) IDirectSoundBuffer_GetStatus(p,a)
+#define IDirectSoundBuffer8_Initialize(p,a,b) IDirectSoundBuffer_Initialize(p,a,b)
+#define IDirectSoundBuffer8_Lock(p,a,b,c,d,e,f,g) IDirectSoundBuffer_Lock(p,a,b,c,d,e,f,g)
+#define IDirectSoundBuffer8_Play(p,a,b,c) IDirectSoundBuffer_Play(p,a,b,c)
+#define IDirectSoundBuffer8_SetCurrentPosition(p,a) IDirectSoundBuffer_SetCurrentPosition(p,a)
+#define IDirectSoundBuffer8_SetFormat(p,a) IDirectSoundBuffer_SetFormat(p,a)
+#define IDirectSoundBuffer8_SetVolume(p,a) IDirectSoundBuffer_SetVolume(p,a)
+#define IDirectSoundBuffer8_SetPan(p,a) IDirectSoundBuffer_SetPan(p,a)
+#define IDirectSoundBuffer8_SetFrequency(p,a) IDirectSoundBuffer_SetFrequency(p,a)
+#define IDirectSoundBuffer8_Stop(p) IDirectSoundBuffer_Stop(p)
+#define IDirectSoundBuffer8_Unlock(p,a,b,c,d) IDirectSoundBuffer_Unlock(p,a,b,c,d)
+#define IDirectSoundBuffer8_Restore(p) IDirectSoundBuffer_Restore(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundBuffer8_SetFX(p,a,b,c) (p)->lpVtbl->SetFX(p,a,b,c)
+#define IDirectSoundBuffer8_AcquireResources(p,a,b,c) (p)->lpVtbl->AcquireResources(p,a,b,c)
+#define IDirectSoundBuffer8_GetObjectInPath(p,a,b,c,d) (p)->lpVtbl->GetObjectInPath(p,a,b,c,d)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundBuffer8_SetFX(p,a,b,c) (p)->SetFX(a,b,c)
+#define IDirectSoundBuffer8_AcquireResources(p,a,b,c) (p)->AcquireResources(a,b,c)
+#define IDirectSoundBuffer8_GetObjectInPath(p,a,b,c,d) (p)->GetObjectInPath(a,b,c,d)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSound3DListener
+//
+
+DEFINE_GUID(IID_IDirectSound3DListener, 0x279AFA84, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60);
+
+#undef INTERFACE
+#define INTERFACE IDirectSound3DListener
+
+DECLARE_INTERFACE_(IDirectSound3DListener, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSound3DListener methods
+ STDMETHOD(GetAllParameters) (THIS_ LPDS3DLISTENER pListener) PURE;
+ STDMETHOD(GetDistanceFactor) (THIS_ D3DVALUE* pflDistanceFactor) PURE;
+ STDMETHOD(GetDopplerFactor) (THIS_ D3DVALUE* pflDopplerFactor) PURE;
+ STDMETHOD(GetOrientation) (THIS_ D3DVECTOR* pvOrientFront, D3DVECTOR* pvOrientTop) PURE;
+ STDMETHOD(GetPosition) (THIS_ D3DVECTOR* pvPosition) PURE;
+ STDMETHOD(GetRolloffFactor) (THIS_ D3DVALUE* pflRolloffFactor) PURE;
+ STDMETHOD(GetVelocity) (THIS_ D3DVECTOR* pvVelocity) PURE;
+ STDMETHOD(SetAllParameters) (THIS_ LPCDS3DLISTENER pcListener, DWORD dwApply) PURE;
+ STDMETHOD(SetDistanceFactor) (THIS_ D3DVALUE flDistanceFactor, DWORD dwApply) PURE;
+ STDMETHOD(SetDopplerFactor) (THIS_ D3DVALUE flDopplerFactor, DWORD dwApply) PURE;
+ STDMETHOD(SetOrientation) (THIS_ D3DVALUE xFront, D3DVALUE yFront, D3DVALUE zFront,
+ D3DVALUE xTop, D3DVALUE yTop, D3DVALUE zTop, DWORD dwApply) PURE;
+ STDMETHOD(SetPosition) (THIS_ D3DVALUE x, D3DVALUE y, D3DVALUE z, DWORD dwApply) PURE;
+ STDMETHOD(SetRolloffFactor) (THIS_ D3DVALUE flRolloffFactor, DWORD dwApply) PURE;
+ STDMETHOD(SetVelocity) (THIS_ D3DVALUE x, D3DVALUE y, D3DVALUE z, DWORD dwApply) PURE;
+ STDMETHOD(CommitDeferredSettings) (THIS) PURE;
+};
+
+#define IDirectSound3DListener_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSound3DListener_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSound3DListener_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound3DListener_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#define IDirectSound3DListener_GetDistanceFactor(p,a) (p)->lpVtbl->GetDistanceFactor(p,a)
+#define IDirectSound3DListener_GetDopplerFactor(p,a) (p)->lpVtbl->GetDopplerFactor(p,a)
+#define IDirectSound3DListener_GetOrientation(p,a,b) (p)->lpVtbl->GetOrientation(p,a,b)
+#define IDirectSound3DListener_GetPosition(p,a) (p)->lpVtbl->GetPosition(p,a)
+#define IDirectSound3DListener_GetRolloffFactor(p,a) (p)->lpVtbl->GetRolloffFactor(p,a)
+#define IDirectSound3DListener_GetVelocity(p,a) (p)->lpVtbl->GetVelocity(p,a)
+#define IDirectSound3DListener_SetAllParameters(p,a,b) (p)->lpVtbl->SetAllParameters(p,a,b)
+#define IDirectSound3DListener_SetDistanceFactor(p,a,b) (p)->lpVtbl->SetDistanceFactor(p,a,b)
+#define IDirectSound3DListener_SetDopplerFactor(p,a,b) (p)->lpVtbl->SetDopplerFactor(p,a,b)
+#define IDirectSound3DListener_SetOrientation(p,a,b,c,d,e,f,g) (p)->lpVtbl->SetOrientation(p,a,b,c,d,e,f,g)
+#define IDirectSound3DListener_SetPosition(p,a,b,c,d) (p)->lpVtbl->SetPosition(p,a,b,c,d)
+#define IDirectSound3DListener_SetRolloffFactor(p,a,b) (p)->lpVtbl->SetRolloffFactor(p,a,b)
+#define IDirectSound3DListener_SetVelocity(p,a,b,c,d) (p)->lpVtbl->SetVelocity(p,a,b,c,d)
+#define IDirectSound3DListener_CommitDeferredSettings(p) (p)->lpVtbl->CommitDeferredSettings(p)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound3DListener_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#define IDirectSound3DListener_GetDistanceFactor(p,a) (p)->GetDistanceFactor(a)
+#define IDirectSound3DListener_GetDopplerFactor(p,a) (p)->GetDopplerFactor(a)
+#define IDirectSound3DListener_GetOrientation(p,a,b) (p)->GetOrientation(a,b)
+#define IDirectSound3DListener_GetPosition(p,a) (p)->GetPosition(a)
+#define IDirectSound3DListener_GetRolloffFactor(p,a) (p)->GetRolloffFactor(a)
+#define IDirectSound3DListener_GetVelocity(p,a) (p)->GetVelocity(a)
+#define IDirectSound3DListener_SetAllParameters(p,a,b) (p)->SetAllParameters(a,b)
+#define IDirectSound3DListener_SetDistanceFactor(p,a,b) (p)->SetDistanceFactor(a,b)
+#define IDirectSound3DListener_SetDopplerFactor(p,a,b) (p)->SetDopplerFactor(a,b)
+#define IDirectSound3DListener_SetOrientation(p,a,b,c,d,e,f,g) (p)->SetOrientation(a,b,c,d,e,f,g)
+#define IDirectSound3DListener_SetPosition(p,a,b,c,d) (p)->SetPosition(a,b,c,d)
+#define IDirectSound3DListener_SetRolloffFactor(p,a,b) (p)->SetRolloffFactor(a,b)
+#define IDirectSound3DListener_SetVelocity(p,a,b,c,d) (p)->SetVelocity(a,b,c,d)
+#define IDirectSound3DListener_CommitDeferredSettings(p) (p)->CommitDeferredSettings()
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSound3DBuffer
+//
+
+DEFINE_GUID(IID_IDirectSound3DBuffer, 0x279AFA86, 0x4981, 0x11CE, 0xA5, 0x21, 0x00, 0x20, 0xAF, 0x0B, 0xE5, 0x60);
+
+#undef INTERFACE
+#define INTERFACE IDirectSound3DBuffer
+
+DECLARE_INTERFACE_(IDirectSound3DBuffer, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSound3DBuffer methods
+ STDMETHOD(GetAllParameters) (THIS_ LPDS3DBUFFER pDs3dBuffer) PURE;
+ STDMETHOD(GetConeAngles) (THIS_ LPDWORD pdwInsideConeAngle, LPDWORD pdwOutsideConeAngle) PURE;
+ STDMETHOD(GetConeOrientation) (THIS_ D3DVECTOR* pvOrientation) PURE;
+ STDMETHOD(GetConeOutsideVolume) (THIS_ LPLONG plConeOutsideVolume) PURE;
+ STDMETHOD(GetMaxDistance) (THIS_ D3DVALUE* pflMaxDistance) PURE;
+ STDMETHOD(GetMinDistance) (THIS_ D3DVALUE* pflMinDistance) PURE;
+ STDMETHOD(GetMode) (THIS_ LPDWORD pdwMode) PURE;
+ STDMETHOD(GetPosition) (THIS_ D3DVECTOR* pvPosition) PURE;
+ STDMETHOD(GetVelocity) (THIS_ D3DVECTOR* pvVelocity) PURE;
+ STDMETHOD(SetAllParameters) (THIS_ LPCDS3DBUFFER pcDs3dBuffer, DWORD dwApply) PURE;
+ STDMETHOD(SetConeAngles) (THIS_ DWORD dwInsideConeAngle, DWORD dwOutsideConeAngle, DWORD dwApply) PURE;
+ STDMETHOD(SetConeOrientation) (THIS_ D3DVALUE x, D3DVALUE y, D3DVALUE z, DWORD dwApply) PURE;
+ STDMETHOD(SetConeOutsideVolume) (THIS_ LONG lConeOutsideVolume, DWORD dwApply) PURE;
+ STDMETHOD(SetMaxDistance) (THIS_ D3DVALUE flMaxDistance, DWORD dwApply) PURE;
+ STDMETHOD(SetMinDistance) (THIS_ D3DVALUE flMinDistance, DWORD dwApply) PURE;
+ STDMETHOD(SetMode) (THIS_ DWORD dwMode, DWORD dwApply) PURE;
+ STDMETHOD(SetPosition) (THIS_ D3DVALUE x, D3DVALUE y, D3DVALUE z, DWORD dwApply) PURE;
+ STDMETHOD(SetVelocity) (THIS_ D3DVALUE x, D3DVALUE y, D3DVALUE z, DWORD dwApply) PURE;
+};
+
+#define IDirectSound3DBuffer_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSound3DBuffer_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSound3DBuffer_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound3DBuffer_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#define IDirectSound3DBuffer_GetConeAngles(p,a,b) (p)->lpVtbl->GetConeAngles(p,a,b)
+#define IDirectSound3DBuffer_GetConeOrientation(p,a) (p)->lpVtbl->GetConeOrientation(p,a)
+#define IDirectSound3DBuffer_GetConeOutsideVolume(p,a) (p)->lpVtbl->GetConeOutsideVolume(p,a)
+#define IDirectSound3DBuffer_GetPosition(p,a) (p)->lpVtbl->GetPosition(p,a)
+#define IDirectSound3DBuffer_GetMinDistance(p,a) (p)->lpVtbl->GetMinDistance(p,a)
+#define IDirectSound3DBuffer_GetMaxDistance(p,a) (p)->lpVtbl->GetMaxDistance(p,a)
+#define IDirectSound3DBuffer_GetMode(p,a) (p)->lpVtbl->GetMode(p,a)
+#define IDirectSound3DBuffer_GetVelocity(p,a) (p)->lpVtbl->GetVelocity(p,a)
+#define IDirectSound3DBuffer_SetAllParameters(p,a,b) (p)->lpVtbl->SetAllParameters(p,a,b)
+#define IDirectSound3DBuffer_SetConeAngles(p,a,b,c) (p)->lpVtbl->SetConeAngles(p,a,b,c)
+#define IDirectSound3DBuffer_SetConeOrientation(p,a,b,c,d) (p)->lpVtbl->SetConeOrientation(p,a,b,c,d)
+#define IDirectSound3DBuffer_SetConeOutsideVolume(p,a,b) (p)->lpVtbl->SetConeOutsideVolume(p,a,b)
+#define IDirectSound3DBuffer_SetPosition(p,a,b,c,d) (p)->lpVtbl->SetPosition(p,a,b,c,d)
+#define IDirectSound3DBuffer_SetMinDistance(p,a,b) (p)->lpVtbl->SetMinDistance(p,a,b)
+#define IDirectSound3DBuffer_SetMaxDistance(p,a,b) (p)->lpVtbl->SetMaxDistance(p,a,b)
+#define IDirectSound3DBuffer_SetMode(p,a,b) (p)->lpVtbl->SetMode(p,a,b)
+#define IDirectSound3DBuffer_SetVelocity(p,a,b,c,d) (p)->lpVtbl->SetVelocity(p,a,b,c,d)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSound3DBuffer_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#define IDirectSound3DBuffer_GetConeAngles(p,a,b) (p)->GetConeAngles(a,b)
+#define IDirectSound3DBuffer_GetConeOrientation(p,a) (p)->GetConeOrientation(a)
+#define IDirectSound3DBuffer_GetConeOutsideVolume(p,a) (p)->GetConeOutsideVolume(a)
+#define IDirectSound3DBuffer_GetPosition(p,a) (p)->GetPosition(a)
+#define IDirectSound3DBuffer_GetMinDistance(p,a) (p)->GetMinDistance(a)
+#define IDirectSound3DBuffer_GetMaxDistance(p,a) (p)->GetMaxDistance(a)
+#define IDirectSound3DBuffer_GetMode(p,a) (p)->GetMode(a)
+#define IDirectSound3DBuffer_GetVelocity(p,a) (p)->GetVelocity(a)
+#define IDirectSound3DBuffer_SetAllParameters(p,a,b) (p)->SetAllParameters(a,b)
+#define IDirectSound3DBuffer_SetConeAngles(p,a,b,c) (p)->SetConeAngles(a,b,c)
+#define IDirectSound3DBuffer_SetConeOrientation(p,a,b,c,d) (p)->SetConeOrientation(a,b,c,d)
+#define IDirectSound3DBuffer_SetConeOutsideVolume(p,a,b) (p)->SetConeOutsideVolume(a,b)
+#define IDirectSound3DBuffer_SetPosition(p,a,b,c,d) (p)->SetPosition(a,b,c,d)
+#define IDirectSound3DBuffer_SetMinDistance(p,a,b) (p)->SetMinDistance(a,b)
+#define IDirectSound3DBuffer_SetMaxDistance(p,a,b) (p)->SetMaxDistance(a,b)
+#define IDirectSound3DBuffer_SetMode(p,a,b) (p)->SetMode(a,b)
+#define IDirectSound3DBuffer_SetVelocity(p,a,b,c,d) (p)->SetVelocity(a,b,c,d)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundCapture
+//
+
+DEFINE_GUID(IID_IDirectSoundCapture, 0xb0210781, 0x89cd, 0x11d0, 0xaf, 0x8, 0x0, 0xa0, 0xc9, 0x25, 0xcd, 0x16);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundCapture
+
+DECLARE_INTERFACE_(IDirectSoundCapture, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundCapture methods
+ STDMETHOD(CreateCaptureBuffer) (THIS_ LPCDSCBUFFERDESC pcDSCBufferDesc, LPDIRECTSOUNDCAPTUREBUFFER *ppDSCBuffer, LPUNKNOWN pUnkOuter) PURE;
+ STDMETHOD(GetCaps) (THIS_ LPDSCCAPS pDSCCaps) PURE;
+ STDMETHOD(Initialize) (THIS_ LPCGUID pcGuidDevice) PURE;
+};
+
+#define IDirectSoundCapture_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundCapture_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundCapture_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCapture_CreateCaptureBuffer(p,a,b,c) (p)->lpVtbl->CreateCaptureBuffer(p,a,b,c)
+#define IDirectSoundCapture_GetCaps(p,a) (p)->lpVtbl->GetCaps(p,a)
+#define IDirectSoundCapture_Initialize(p,a) (p)->lpVtbl->Initialize(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCapture_CreateCaptureBuffer(p,a,b,c) (p)->CreateCaptureBuffer(a,b,c)
+#define IDirectSoundCapture_GetCaps(p,a) (p)->GetCaps(a)
+#define IDirectSoundCapture_Initialize(p,a) (p)->Initialize(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundCaptureBuffer
+//
+
+DEFINE_GUID(IID_IDirectSoundCaptureBuffer, 0xb0210782, 0x89cd, 0x11d0, 0xaf, 0x8, 0x0, 0xa0, 0xc9, 0x25, 0xcd, 0x16);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundCaptureBuffer
+
+DECLARE_INTERFACE_(IDirectSoundCaptureBuffer, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundCaptureBuffer methods
+ STDMETHOD(GetCaps) (THIS_ LPDSCBCAPS pDSCBCaps) PURE;
+ STDMETHOD(GetCurrentPosition) (THIS_ LPDWORD pdwCapturePosition, LPDWORD pdwReadPosition) PURE;
+ STDMETHOD(GetFormat) (THIS_ LPWAVEFORMATEX pwfxFormat, DWORD dwSizeAllocated, LPDWORD pdwSizeWritten) PURE;
+ STDMETHOD(GetStatus) (THIS_ LPDWORD pdwStatus) PURE;
+ STDMETHOD(Initialize) (THIS_ LPDIRECTSOUNDCAPTURE pDirectSoundCapture, LPCDSCBUFFERDESC pcDSCBufferDesc) PURE;
+ STDMETHOD(Lock) (THIS_ DWORD dwOffset, DWORD dwBytes, LPVOID *ppvAudioPtr1, LPDWORD pdwAudioBytes1,
+ LPVOID *ppvAudioPtr2, LPDWORD pdwAudioBytes2, DWORD dwFlags) PURE;
+ STDMETHOD(Start) (THIS_ DWORD dwFlags) PURE;
+ STDMETHOD(Stop) (THIS) PURE;
+ STDMETHOD(Unlock) (THIS_ LPVOID pvAudioPtr1, DWORD dwAudioBytes1, LPVOID pvAudioPtr2, DWORD dwAudioBytes2) PURE;
+};
+
+#define IDirectSoundCaptureBuffer_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundCaptureBuffer_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundCaptureBuffer_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureBuffer_GetCaps(p,a) (p)->lpVtbl->GetCaps(p,a)
+#define IDirectSoundCaptureBuffer_GetCurrentPosition(p,a,b) (p)->lpVtbl->GetCurrentPosition(p,a,b)
+#define IDirectSoundCaptureBuffer_GetFormat(p,a,b,c) (p)->lpVtbl->GetFormat(p,a,b,c)
+#define IDirectSoundCaptureBuffer_GetStatus(p,a) (p)->lpVtbl->GetStatus(p,a)
+#define IDirectSoundCaptureBuffer_Initialize(p,a,b) (p)->lpVtbl->Initialize(p,a,b)
+#define IDirectSoundCaptureBuffer_Lock(p,a,b,c,d,e,f,g) (p)->lpVtbl->Lock(p,a,b,c,d,e,f,g)
+#define IDirectSoundCaptureBuffer_Start(p,a) (p)->lpVtbl->Start(p,a)
+#define IDirectSoundCaptureBuffer_Stop(p) (p)->lpVtbl->Stop(p)
+#define IDirectSoundCaptureBuffer_Unlock(p,a,b,c,d) (p)->lpVtbl->Unlock(p,a,b,c,d)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureBuffer_GetCaps(p,a) (p)->GetCaps(a)
+#define IDirectSoundCaptureBuffer_GetCurrentPosition(p,a,b) (p)->GetCurrentPosition(a,b)
+#define IDirectSoundCaptureBuffer_GetFormat(p,a,b,c) (p)->GetFormat(a,b,c)
+#define IDirectSoundCaptureBuffer_GetStatus(p,a) (p)->GetStatus(a)
+#define IDirectSoundCaptureBuffer_Initialize(p,a,b) (p)->Initialize(a,b)
+#define IDirectSoundCaptureBuffer_Lock(p,a,b,c,d,e,f,g) (p)->Lock(a,b,c,d,e,f,g)
+#define IDirectSoundCaptureBuffer_Start(p,a) (p)->Start(a)
+#define IDirectSoundCaptureBuffer_Stop(p) (p)->Stop()
+#define IDirectSoundCaptureBuffer_Unlock(p,a,b,c,d) (p)->Unlock(a,b,c,d)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSoundCaptureBuffer8
+//
+
+DEFINE_GUID(IID_IDirectSoundCaptureBuffer8, 0x990df4, 0xdbb, 0x4872, 0x83, 0x3e, 0x6d, 0x30, 0x3e, 0x80, 0xae, 0xb6);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundCaptureBuffer8
+
+DECLARE_INTERFACE_(IDirectSoundCaptureBuffer8, IDirectSoundCaptureBuffer)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundCaptureBuffer methods
+ STDMETHOD(GetCaps) (THIS_ LPDSCBCAPS pDSCBCaps) PURE;
+ STDMETHOD(GetCurrentPosition) (THIS_ LPDWORD pdwCapturePosition, LPDWORD pdwReadPosition) PURE;
+ STDMETHOD(GetFormat) (THIS_ LPWAVEFORMATEX pwfxFormat, DWORD dwSizeAllocated, LPDWORD pdwSizeWritten) PURE;
+ STDMETHOD(GetStatus) (THIS_ LPDWORD pdwStatus) PURE;
+ STDMETHOD(Initialize) (THIS_ LPDIRECTSOUNDCAPTURE pDirectSoundCapture, LPCDSCBUFFERDESC pcDSCBufferDesc) PURE;
+ STDMETHOD(Lock) (THIS_ DWORD dwOffset, DWORD dwBytes, LPVOID *ppvAudioPtr1, LPDWORD pdwAudioBytes1,
+ LPVOID *ppvAudioPtr2, LPDWORD pdwAudioBytes2, DWORD dwFlags) PURE;
+ STDMETHOD(Start) (THIS_ DWORD dwFlags) PURE;
+ STDMETHOD(Stop) (THIS) PURE;
+ STDMETHOD(Unlock) (THIS_ LPVOID pvAudioPtr1, DWORD dwAudioBytes1, LPVOID pvAudioPtr2, DWORD dwAudioBytes2) PURE;
+
+ // IDirectSoundCaptureBuffer8 methods
+ STDMETHOD(GetObjectInPath) (THIS_ REFGUID rguidObject, DWORD dwIndex, REFGUID rguidInterface, LPVOID *ppObject) PURE;
+ STDMETHOD(GetFXStatus) (DWORD dwFXCount, LPDWORD pdwFXStatus) PURE;
+};
+
+#define IDirectSoundCaptureBuffer8_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundCaptureBuffer8_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundCaptureBuffer8_Release(p) IUnknown_Release(p)
+
+#define IDirectSoundCaptureBuffer8_GetCaps(p,a) IDirectSoundCaptureBuffer_GetCaps(p,a)
+#define IDirectSoundCaptureBuffer8_GetCurrentPosition(p,a,b) IDirectSoundCaptureBuffer_GetCurrentPosition(p,a,b)
+#define IDirectSoundCaptureBuffer8_GetFormat(p,a,b,c) IDirectSoundCaptureBuffer_GetFormat(p,a,b,c)
+#define IDirectSoundCaptureBuffer8_GetStatus(p,a) IDirectSoundCaptureBuffer_GetStatus(p,a)
+#define IDirectSoundCaptureBuffer8_Initialize(p,a,b) IDirectSoundCaptureBuffer_Initialize(p,a,b)
+#define IDirectSoundCaptureBuffer8_Lock(p,a,b,c,d,e,f,g) IDirectSoundCaptureBuffer_Lock(p,a,b,c,d,e,f,g)
+#define IDirectSoundCaptureBuffer8_Start(p,a) IDirectSoundCaptureBuffer_Start(p,a)
+#define IDirectSoundCaptureBuffer8_Stop(p) IDirectSoundCaptureBuffer_Stop(p))
+#define IDirectSoundCaptureBuffer8_Unlock(p,a,b,c,d) IDirectSoundCaptureBuffer_Unlock(p,a,b,c,d)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureBuffer8_GetObjectInPath(p,a,b,c,d) (p)->lpVtbl->GetObjectInPath(p,a,b,c,d)
+#define IDirectSoundCaptureBuffer8_GetFXStatus(p,a,b) (p)->lpVtbl->GetFXStatus(p,a,b)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureBuffer8_GetObjectInPath(p,a,b,c,d) (p)->GetObjectInPath(a,b,c,d)
+#define IDirectSoundCaptureBuffer8_GetFXStatus(p,a,b) (p)->GetFXStatus(a,b)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSoundNotify
+//
+
+DEFINE_GUID(IID_IDirectSoundNotify, 0xb0210783, 0x89cd, 0x11d0, 0xaf, 0x8, 0x0, 0xa0, 0xc9, 0x25, 0xcd, 0x16);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundNotify
+
+DECLARE_INTERFACE_(IDirectSoundNotify, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundNotify methods
+ STDMETHOD(SetNotificationPositions) (THIS_ DWORD dwPositionNotifies, LPCDSBPOSITIONNOTIFY pcPositionNotifies) PURE;
+};
+
+#define IDirectSoundNotify_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundNotify_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundNotify_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundNotify_SetNotificationPositions(p,a,b) (p)->lpVtbl->SetNotificationPositions(p,a,b)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundNotify_SetNotificationPositions(p,a,b) (p)->SetNotificationPositions(a,b)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IKsPropertySet
+//
+
+#ifndef _IKsPropertySet_
+#define _IKsPropertySet_
+
+#ifdef __cplusplus
+// 'struct' not 'class' per the way DECLARE_INTERFACE_ is defined
+struct IKsPropertySet;
+#endif // __cplusplus
+
+typedef struct IKsPropertySet *LPKSPROPERTYSET;
+
+#define KSPROPERTY_SUPPORT_GET 0x00000001
+#define KSPROPERTY_SUPPORT_SET 0x00000002
+
+DEFINE_GUID(IID_IKsPropertySet, 0x31efac30, 0x515c, 0x11d0, 0xa9, 0xaa, 0x00, 0xaa, 0x00, 0x61, 0xbe, 0x93);
+
+#undef INTERFACE
+#define INTERFACE IKsPropertySet
+
+DECLARE_INTERFACE_(IKsPropertySet, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IKsPropertySet methods
+ STDMETHOD(Get) (THIS_ REFGUID rguidPropSet, ULONG ulId, LPVOID pInstanceData, ULONG ulInstanceLength,
+ LPVOID pPropertyData, ULONG ulDataLength, PULONG pulBytesReturned) PURE;
+ STDMETHOD(Set) (THIS_ REFGUID rguidPropSet, ULONG ulId, LPVOID pInstanceData, ULONG ulInstanceLength,
+ LPVOID pPropertyData, ULONG ulDataLength) PURE;
+ STDMETHOD(QuerySupport) (THIS_ REFGUID rguidPropSet, ULONG ulId, PULONG pulTypeSupport) PURE;
+};
+
+#define IKsPropertySet_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IKsPropertySet_AddRef(p) IUnknown_AddRef(p)
+#define IKsPropertySet_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IKsPropertySet_Get(p,a,b,c,d,e,f,g) (p)->lpVtbl->Get(p,a,b,c,d,e,f,g)
+#define IKsPropertySet_Set(p,a,b,c,d,e,f) (p)->lpVtbl->Set(p,a,b,c,d,e,f)
+#define IKsPropertySet_QuerySupport(p,a,b,c) (p)->lpVtbl->QuerySupport(p,a,b,c)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IKsPropertySet_Get(p,a,b,c,d,e,f,g) (p)->Get(a,b,c,d,e,f,g)
+#define IKsPropertySet_Set(p,a,b,c,d,e,f) (p)->Set(a,b,c,d,e,f)
+#define IKsPropertySet_QuerySupport(p,a,b,c) (p)->QuerySupport(a,b,c)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#endif // _IKsPropertySet_
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+//
+// IDirectSoundFXGargle
+//
+
+DEFINE_GUID(IID_IDirectSoundFXGargle, 0xd616f352, 0xd622, 0x11ce, 0xaa, 0xc5, 0x00, 0x20, 0xaf, 0x0b, 0x99, 0xa3);
+
+typedef struct _DSFXGargle
+{
+ DWORD dwRateHz; // Rate of modulation in hz
+ DWORD dwWaveShape; // DSFXGARGLE_WAVE_xxx
+} DSFXGargle, *LPDSFXGargle;
+
+#define DSFXGARGLE_WAVE_TRIANGLE 0
+#define DSFXGARGLE_WAVE_SQUARE 1
+
+typedef const DSFXGargle *LPCDSFXGargle;
+
+#define DSFXGARGLE_RATEHZ_MIN 1
+#define DSFXGARGLE_RATEHZ_MAX 1000
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXGargle
+
+DECLARE_INTERFACE_(IDirectSoundFXGargle, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXGargle methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXGargle pcDsFxGargle) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXGargle pDsFxGargle) PURE;
+};
+
+#define IDirectSoundFXGargle_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXGargle_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXGargle_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXGargle_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXGargle_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXGargle_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXGargle_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXChorus
+//
+
+DEFINE_GUID(IID_IDirectSoundFXChorus, 0x880842e3, 0x145f, 0x43e6, 0xa9, 0x34, 0xa7, 0x18, 0x06, 0xe5, 0x05, 0x47);
+
+typedef struct _DSFXChorus
+{
+ FLOAT fWetDryMix;
+ FLOAT fDepth;
+ FLOAT fFeedback;
+ FLOAT fFrequency;
+ LONG lWaveform; // LFO shape; DSFXCHORUS_WAVE_xxx
+ FLOAT fDelay;
+ LONG lPhase;
+} DSFXChorus, *LPDSFXChorus;
+
+typedef const DSFXChorus *LPCDSFXChorus;
+
+#define DSFXCHORUS_WAVE_TRIANGLE 0
+#define DSFXCHORUS_WAVE_SIN 1
+
+#define DSFXCHORUS_WETDRYMIX_MIN 0.0f
+#define DSFXCHORUS_WETDRYMIX_MAX 100.0f
+#define DSFXCHORUS_DEPTH_MIN 0.0f
+#define DSFXCHORUS_DEPTH_MAX 100.0f
+#define DSFXCHORUS_FEEDBACK_MIN -99.0f
+#define DSFXCHORUS_FEEDBACK_MAX 99.0f
+#define DSFXCHORUS_FREQUENCY_MIN 0.0f
+#define DSFXCHORUS_FREQUENCY_MAX 10.0f
+#define DSFXCHORUS_DELAY_MIN 0.0f
+#define DSFXCHORUS_DELAY_MAX 20.0f
+#define DSFXCHORUS_PHASE_MIN 0
+#define DSFXCHORUS_PHASE_MAX 4
+
+#define DSFXCHORUS_PHASE_NEG_180 0
+#define DSFXCHORUS_PHASE_NEG_90 1
+#define DSFXCHORUS_PHASE_ZERO 2
+#define DSFXCHORUS_PHASE_90 3
+#define DSFXCHORUS_PHASE_180 4
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXChorus
+
+DECLARE_INTERFACE_(IDirectSoundFXChorus, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXChorus methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXChorus pcDsFxChorus) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXChorus pDsFxChorus) PURE;
+};
+
+#define IDirectSoundFXChorus_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXChorus_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXChorus_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXChorus_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXChorus_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXChorus_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXChorus_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXFlanger
+//
+
+DEFINE_GUID(IID_IDirectSoundFXFlanger, 0x903e9878, 0x2c92, 0x4072, 0x9b, 0x2c, 0xea, 0x68, 0xf5, 0x39, 0x67, 0x83);
+
+typedef struct _DSFXFlanger
+{
+ FLOAT fWetDryMix;
+ FLOAT fDepth;
+ FLOAT fFeedback;
+ FLOAT fFrequency;
+ LONG lWaveform;
+ FLOAT fDelay;
+ LONG lPhase;
+} DSFXFlanger, *LPDSFXFlanger;
+
+typedef const DSFXFlanger *LPCDSFXFlanger;
+
+#define DSFXFLANGER_WAVE_TRIANGLE 0
+#define DSFXFLANGER_WAVE_SIN 1
+
+#define DSFXFLANGER_WETDRYMIX_MIN 0.0f
+#define DSFXFLANGER_WETDRYMIX_MAX 100.0f
+#define DSFXFLANGER_FREQUENCY_MIN 0.0f
+#define DSFXFLANGER_FREQUENCY_MAX 10.0f
+#define DSFXFLANGER_DEPTH_MIN 0.0f
+#define DSFXFLANGER_DEPTH_MAX 100.0f
+#define DSFXFLANGER_PHASE_MIN 0
+#define DSFXFLANGER_PHASE_MAX 4
+#define DSFXFLANGER_FEEDBACK_MIN -99.0f
+#define DSFXFLANGER_FEEDBACK_MAX 99.0f
+#define DSFXFLANGER_DELAY_MIN 0.0f
+#define DSFXFLANGER_DELAY_MAX 4.0f
+
+#define DSFXFLANGER_PHASE_NEG_180 0
+#define DSFXFLANGER_PHASE_NEG_90 1
+#define DSFXFLANGER_PHASE_ZERO 2
+#define DSFXFLANGER_PHASE_90 3
+#define DSFXFLANGER_PHASE_180 4
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXFlanger
+
+DECLARE_INTERFACE_(IDirectSoundFXFlanger, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXFlanger methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXFlanger pcDsFxFlanger) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXFlanger pDsFxFlanger) PURE;
+};
+
+#define IDirectSoundFXFlanger_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXFlanger_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXFlanger_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXFlanger_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXFlanger_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXFlanger_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXFlanger_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXEcho
+//
+
+DEFINE_GUID(IID_IDirectSoundFXEcho, 0x8bd28edf, 0x50db, 0x4e92, 0xa2, 0xbd, 0x44, 0x54, 0x88, 0xd1, 0xed, 0x42);
+
+typedef struct _DSFXEcho
+{
+ FLOAT fWetDryMix;
+ FLOAT fFeedback;
+ FLOAT fLeftDelay;
+ FLOAT fRightDelay;
+ LONG lPanDelay;
+} DSFXEcho, *LPDSFXEcho;
+
+typedef const DSFXEcho *LPCDSFXEcho;
+
+#define DSFXECHO_WETDRYMIX_MIN 0.0f
+#define DSFXECHO_WETDRYMIX_MAX 100.0f
+#define DSFXECHO_FEEDBACK_MIN 0.0f
+#define DSFXECHO_FEEDBACK_MAX 100.0f
+#define DSFXECHO_LEFTDELAY_MIN 1.0f
+#define DSFXECHO_LEFTDELAY_MAX 2000.0f
+#define DSFXECHO_RIGHTDELAY_MIN 1.0f
+#define DSFXECHO_RIGHTDELAY_MAX 2000.0f
+#define DSFXECHO_PANDELAY_MIN 0
+#define DSFXECHO_PANDELAY_MAX 1
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXEcho
+
+DECLARE_INTERFACE_(IDirectSoundFXEcho, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXEcho methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXEcho pcDsFxEcho) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXEcho pDsFxEcho) PURE;
+};
+
+#define IDirectSoundFXEcho_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXEcho_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXEcho_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXEcho_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXEcho_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXEcho_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXEcho_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXDistortion
+//
+
+DEFINE_GUID(IID_IDirectSoundFXDistortion, 0x8ecf4326, 0x455f, 0x4d8b, 0xbd, 0xa9, 0x8d, 0x5d, 0x3e, 0x9e, 0x3e, 0x0b);
+
+typedef struct _DSFXDistortion
+{
+ FLOAT fGain;
+ FLOAT fEdge;
+ FLOAT fPostEQCenterFrequency;
+ FLOAT fPostEQBandwidth;
+ FLOAT fPreLowpassCutoff;
+} DSFXDistortion, *LPDSFXDistortion;
+
+typedef const DSFXDistortion *LPCDSFXDistortion;
+
+#define DSFXDISTORTION_GAIN_MIN -60.0f
+#define DSFXDISTORTION_GAIN_MAX 0.0f
+#define DSFXDISTORTION_EDGE_MIN 0.0f
+#define DSFXDISTORTION_EDGE_MAX 100.0f
+#define DSFXDISTORTION_POSTEQCENTERFREQUENCY_MIN 100.0f
+#define DSFXDISTORTION_POSTEQCENTERFREQUENCY_MAX 8000.0f
+#define DSFXDISTORTION_POSTEQBANDWIDTH_MIN 100.0f
+#define DSFXDISTORTION_POSTEQBANDWIDTH_MAX 8000.0f
+#define DSFXDISTORTION_PRELOWPASSCUTOFF_MIN 100.0f
+#define DSFXDISTORTION_PRELOWPASSCUTOFF_MAX 8000.0f
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXDistortion
+
+DECLARE_INTERFACE_(IDirectSoundFXDistortion, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXDistortion methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXDistortion pcDsFxDistortion) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXDistortion pDsFxDistortion) PURE;
+};
+
+#define IDirectSoundFXDistortion_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXDistortion_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXDistortion_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXDistortion_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXDistortion_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXDistortion_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXDistortion_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXCompressor
+//
+
+DEFINE_GUID(IID_IDirectSoundFXCompressor, 0x4bbd1154, 0x62f6, 0x4e2c, 0xa1, 0x5c, 0xd3, 0xb6, 0xc4, 0x17, 0xf7, 0xa0);
+
+typedef struct _DSFXCompressor
+{
+ FLOAT fGain;
+ FLOAT fAttack;
+ FLOAT fRelease;
+ FLOAT fThreshold;
+ FLOAT fRatio;
+ FLOAT fPredelay;
+} DSFXCompressor, *LPDSFXCompressor;
+
+typedef const DSFXCompressor *LPCDSFXCompressor;
+
+#define DSFXCOMPRESSOR_GAIN_MIN -60.0f
+#define DSFXCOMPRESSOR_GAIN_MAX 60.0f
+#define DSFXCOMPRESSOR_ATTACK_MIN 0.01f
+#define DSFXCOMPRESSOR_ATTACK_MAX 500.0f
+#define DSFXCOMPRESSOR_RELEASE_MIN 50.0f
+#define DSFXCOMPRESSOR_RELEASE_MAX 3000.0f
+#define DSFXCOMPRESSOR_THRESHOLD_MIN -60.0f
+#define DSFXCOMPRESSOR_THRESHOLD_MAX 0.0f
+#define DSFXCOMPRESSOR_RATIO_MIN 1.0f
+#define DSFXCOMPRESSOR_RATIO_MAX 100.0f
+#define DSFXCOMPRESSOR_PREDELAY_MIN 0.0f
+#define DSFXCOMPRESSOR_PREDELAY_MAX 4.0f
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXCompressor
+
+DECLARE_INTERFACE_(IDirectSoundFXCompressor, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXCompressor methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXCompressor pcDsFxCompressor) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXCompressor pDsFxCompressor) PURE;
+};
+
+#define IDirectSoundFXCompressor_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXCompressor_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXCompressor_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXCompressor_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXCompressor_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXCompressor_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXCompressor_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXParamEq
+//
+
+DEFINE_GUID(IID_IDirectSoundFXParamEq, 0xc03ca9fe, 0xfe90, 0x4204, 0x80, 0x78, 0x82, 0x33, 0x4c, 0xd1, 0x77, 0xda);
+
+typedef struct _DSFXParamEq
+{
+ FLOAT fCenter;
+ FLOAT fBandwidth;
+ FLOAT fGain;
+} DSFXParamEq, *LPDSFXParamEq;
+
+typedef const DSFXParamEq *LPCDSFXParamEq;
+
+#define DSFXPARAMEQ_CENTER_MIN 80.0f
+#define DSFXPARAMEQ_CENTER_MAX 16000.0f
+#define DSFXPARAMEQ_BANDWIDTH_MIN 1.0f
+#define DSFXPARAMEQ_BANDWIDTH_MAX 36.0f
+#define DSFXPARAMEQ_GAIN_MIN -15.0f
+#define DSFXPARAMEQ_GAIN_MAX 15.0f
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXParamEq
+
+DECLARE_INTERFACE_(IDirectSoundFXParamEq, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXParamEq methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXParamEq pcDsFxParamEq) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXParamEq pDsFxParamEq) PURE;
+};
+
+#define IDirectSoundFXParamEq_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXParamEq_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXParamEq_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXParamEq_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXParamEq_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXParamEq_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXParamEq_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXI3DL2Reverb
+//
+
+DEFINE_GUID(IID_IDirectSoundFXI3DL2Reverb, 0x4b166a6a, 0x0d66, 0x43f3, 0x80, 0xe3, 0xee, 0x62, 0x80, 0xde, 0xe1, 0xa4);
+
+typedef struct _DSFXI3DL2Reverb
+{
+ LONG lRoom; // [-10000, 0] default: -1000 mB
+ LONG lRoomHF; // [-10000, 0] default: 0 mB
+ FLOAT flRoomRolloffFactor; // [0.0, 10.0] default: 0.0
+ FLOAT flDecayTime; // [0.1, 20.0] default: 1.49s
+ FLOAT flDecayHFRatio; // [0.1, 2.0] default: 0.83
+ LONG lReflections; // [-10000, 1000] default: -2602 mB
+ FLOAT flReflectionsDelay; // [0.0, 0.3] default: 0.007 s
+ LONG lReverb; // [-10000, 2000] default: 200 mB
+ FLOAT flReverbDelay; // [0.0, 0.1] default: 0.011 s
+ FLOAT flDiffusion; // [0.0, 100.0] default: 100.0 %
+ FLOAT flDensity; // [0.0, 100.0] default: 100.0 %
+ FLOAT flHFReference; // [20.0, 20000.0] default: 5000.0 Hz
+} DSFXI3DL2Reverb, *LPDSFXI3DL2Reverb;
+
+typedef const DSFXI3DL2Reverb *LPCDSFXI3DL2Reverb;
+
+#define DSFX_I3DL2REVERB_ROOM_MIN (-10000)
+#define DSFX_I3DL2REVERB_ROOM_MAX 0
+#define DSFX_I3DL2REVERB_ROOM_DEFAULT (-1000)
+
+#define DSFX_I3DL2REVERB_ROOMHF_MIN (-10000)
+#define DSFX_I3DL2REVERB_ROOMHF_MAX 0
+#define DSFX_I3DL2REVERB_ROOMHF_DEFAULT (-100)
+
+#define DSFX_I3DL2REVERB_ROOMROLLOFFFACTOR_MIN 0.0f
+#define DSFX_I3DL2REVERB_ROOMROLLOFFFACTOR_MAX 10.0f
+#define DSFX_I3DL2REVERB_ROOMROLLOFFFACTOR_DEFAULT 0.0f
+
+#define DSFX_I3DL2REVERB_DECAYTIME_MIN 0.1f
+#define DSFX_I3DL2REVERB_DECAYTIME_MAX 20.0f
+#define DSFX_I3DL2REVERB_DECAYTIME_DEFAULT 1.49f
+
+#define DSFX_I3DL2REVERB_DECAYHFRATIO_MIN 0.1f
+#define DSFX_I3DL2REVERB_DECAYHFRATIO_MAX 2.0f
+#define DSFX_I3DL2REVERB_DECAYHFRATIO_DEFAULT 0.83f
+
+#define DSFX_I3DL2REVERB_REFLECTIONS_MIN (-10000)
+#define DSFX_I3DL2REVERB_REFLECTIONS_MAX 1000
+#define DSFX_I3DL2REVERB_REFLECTIONS_DEFAULT (-2602)
+
+#define DSFX_I3DL2REVERB_REFLECTIONSDELAY_MIN 0.0f
+#define DSFX_I3DL2REVERB_REFLECTIONSDELAY_MAX 0.3f
+#define DSFX_I3DL2REVERB_REFLECTIONSDELAY_DEFAULT 0.007f
+
+#define DSFX_I3DL2REVERB_REVERB_MIN (-10000)
+#define DSFX_I3DL2REVERB_REVERB_MAX 2000
+#define DSFX_I3DL2REVERB_REVERB_DEFAULT (200)
+
+#define DSFX_I3DL2REVERB_REVERBDELAY_MIN 0.0f
+#define DSFX_I3DL2REVERB_REVERBDELAY_MAX 0.1f
+#define DSFX_I3DL2REVERB_REVERBDELAY_DEFAULT 0.011f
+
+#define DSFX_I3DL2REVERB_DIFFUSION_MIN 0.0f
+#define DSFX_I3DL2REVERB_DIFFUSION_MAX 100.0f
+#define DSFX_I3DL2REVERB_DIFFUSION_DEFAULT 100.0f
+
+#define DSFX_I3DL2REVERB_DENSITY_MIN 0.0f
+#define DSFX_I3DL2REVERB_DENSITY_MAX 100.0f
+#define DSFX_I3DL2REVERB_DENSITY_DEFAULT 100.0f
+
+#define DSFX_I3DL2REVERB_HFREFERENCE_MIN 20.0f
+#define DSFX_I3DL2REVERB_HFREFERENCE_MAX 20000.0f
+#define DSFX_I3DL2REVERB_HFREFERENCE_DEFAULT 5000.0f
+
+#define DSFX_I3DL2REVERB_QUALITY_MIN 0
+#define DSFX_I3DL2REVERB_QUALITY_MAX 3
+#define DSFX_I3DL2REVERB_QUALITY_DEFAULT 2
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXI3DL2Reverb
+
+DECLARE_INTERFACE_(IDirectSoundFXI3DL2Reverb, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXI3DL2Reverb methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXI3DL2Reverb pcDsFxI3DL2Reverb) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXI3DL2Reverb pDsFxI3DL2Reverb) PURE;
+ STDMETHOD(SetPreset) (THIS_ DWORD dwPreset) PURE;
+ STDMETHOD(GetPreset) (THIS_ LPDWORD pdwPreset) PURE;
+ STDMETHOD(SetQuality) (THIS_ LONG lQuality) PURE;
+ STDMETHOD(GetQuality) (THIS_ LONG *plQuality) PURE;
+};
+
+#define IDirectSoundFXI3DL2Reverb_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXI3DL2Reverb_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXI3DL2Reverb_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXI3DL2Reverb_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXI3DL2Reverb_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#define IDirectSoundFXI3DL2Reverb_SetPreset(p,a) (p)->lpVtbl->SetPreset(p,a)
+#define IDirectSoundFXI3DL2Reverb_GetPreset(p,a) (p)->lpVtbl->GetPreset(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXI3DL2Reverb_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXI3DL2Reverb_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#define IDirectSoundFXI3DL2Reverb_SetPreset(p,a) (p)->SetPreset(a)
+#define IDirectSoundFXI3DL2Reverb_GetPreset(p,a) (p)->GetPreset(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundFXWavesReverb
+//
+
+DEFINE_GUID(IID_IDirectSoundFXWavesReverb,0x46858c3a,0x0dc6,0x45e3,0xb7,0x60,0xd4,0xee,0xf1,0x6c,0xb3,0x25);
+
+typedef struct _DSFXWavesReverb
+{
+ FLOAT fInGain; // [-96.0,0.0] default: 0.0 dB
+ FLOAT fReverbMix; // [-96.0,0.0] default: 0.0 db
+ FLOAT fReverbTime; // [0.001,3000.0] default: 1000.0 ms
+ FLOAT fHighFreqRTRatio; // [0.001,0.999] default: 0.001
+} DSFXWavesReverb, *LPDSFXWavesReverb;
+
+typedef const DSFXWavesReverb *LPCDSFXWavesReverb;
+
+#define DSFX_WAVESREVERB_INGAIN_MIN -96.0f
+#define DSFX_WAVESREVERB_INGAIN_MAX 0.0f
+#define DSFX_WAVESREVERB_INGAIN_DEFAULT 0.0f
+#define DSFX_WAVESREVERB_REVERBMIX_MIN -96.0f
+#define DSFX_WAVESREVERB_REVERBMIX_MAX 0.0f
+#define DSFX_WAVESREVERB_REVERBMIX_DEFAULT 0.0f
+#define DSFX_WAVESREVERB_REVERBTIME_MIN 0.001f
+#define DSFX_WAVESREVERB_REVERBTIME_MAX 3000.0f
+#define DSFX_WAVESREVERB_REVERBTIME_DEFAULT 1000.0f
+#define DSFX_WAVESREVERB_HIGHFREQRTRATIO_MIN 0.001f
+#define DSFX_WAVESREVERB_HIGHFREQRTRATIO_MAX 0.999f
+#define DSFX_WAVESREVERB_HIGHFREQRTRATIO_DEFAULT 0.001f
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFXWavesReverb
+
+DECLARE_INTERFACE_(IDirectSoundFXWavesReverb, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFXWavesReverb methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSFXWavesReverb pcDsFxWavesReverb) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSFXWavesReverb pDsFxWavesReverb) PURE;
+};
+
+#define IDirectSoundFXWavesReverb_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFXWavesReverb_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFXWavesReverb_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXWavesReverb_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundFXWavesReverb_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFXWavesReverb_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundFXWavesReverb_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+//
+// IDirectSoundCaptureFXAec
+//
+
+DEFINE_GUID(IID_IDirectSoundCaptureFXAec, 0xad74143d, 0x903d, 0x4ab7, 0x80, 0x66, 0x28, 0xd3, 0x63, 0x03, 0x6d, 0x65);
+
+typedef struct _DSCFXAec
+{
+ BOOL fEnable;
+ BOOL fNoiseFill;
+ DWORD dwMode;
+} DSCFXAec, *LPDSCFXAec;
+
+typedef const DSCFXAec *LPCDSCFXAec;
+
+// These match the AEC_MODE_* constants in the DDK's ksmedia.h file
+#define DSCFX_AEC_MODE_PASS_THROUGH 0x0
+#define DSCFX_AEC_MODE_HALF_DUPLEX 0x1
+#define DSCFX_AEC_MODE_FULL_DUPLEX 0x2
+
+// These match the AEC_STATUS_* constants in ksmedia.h
+#define DSCFX_AEC_STATUS_HISTORY_UNINITIALIZED 0x0
+#define DSCFX_AEC_STATUS_HISTORY_CONTINUOUSLY_CONVERGED 0x1
+#define DSCFX_AEC_STATUS_HISTORY_PREVIOUSLY_DIVERGED 0x2
+#define DSCFX_AEC_STATUS_CURRENTLY_CONVERGED 0x8
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundCaptureFXAec
+
+DECLARE_INTERFACE_(IDirectSoundCaptureFXAec, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundCaptureFXAec methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSCFXAec pDscFxAec) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSCFXAec pDscFxAec) PURE;
+ STDMETHOD(GetStatus) (THIS_ PDWORD pdwStatus) PURE;
+ STDMETHOD(Reset) (THIS) PURE;
+};
+
+#define IDirectSoundCaptureFXAec_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundCaptureFXAec_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundCaptureFXAec_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureFXAec_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundCaptureFXAec_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureFXAec_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundCaptureFXAec_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+
+//
+// IDirectSoundCaptureFXNoiseSuppress
+//
+
+DEFINE_GUID(IID_IDirectSoundCaptureFXNoiseSuppress, 0xed311e41, 0xfbae, 0x4175, 0x96, 0x25, 0xcd, 0x8, 0x54, 0xf6, 0x93, 0xca);
+
+typedef struct _DSCFXNoiseSuppress
+{
+ BOOL fEnable;
+} DSCFXNoiseSuppress, *LPDSCFXNoiseSuppress;
+
+typedef const DSCFXNoiseSuppress *LPCDSCFXNoiseSuppress;
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundCaptureFXNoiseSuppress
+
+DECLARE_INTERFACE_(IDirectSoundCaptureFXNoiseSuppress, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundCaptureFXNoiseSuppress methods
+ STDMETHOD(SetAllParameters) (THIS_ LPCDSCFXNoiseSuppress pcDscFxNoiseSuppress) PURE;
+ STDMETHOD(GetAllParameters) (THIS_ LPDSCFXNoiseSuppress pDscFxNoiseSuppress) PURE;
+ STDMETHOD(Reset) (THIS) PURE;
+};
+
+#define IDirectSoundCaptureFXNoiseSuppress_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundCaptureFXNoiseSuppress_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundCaptureFXNoiseSuppress_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureFXNoiseSuppress_SetAllParameters(p,a) (p)->lpVtbl->SetAllParameters(p,a)
+#define IDirectSoundCaptureFXNoiseSuppress_GetAllParameters(p,a) (p)->lpVtbl->GetAllParameters(p,a)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundCaptureFXNoiseSuppress_SetAllParameters(p,a) (p)->SetAllParameters(a)
+#define IDirectSoundCaptureFXNoiseSuppress_GetAllParameters(p,a) (p)->GetAllParameters(a)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+
+//
+// IDirectSoundFullDuplex
+//
+
+#ifndef _IDirectSoundFullDuplex_
+#define _IDirectSoundFullDuplex_
+
+#ifdef __cplusplus
+// 'struct' not 'class' per the way DECLARE_INTERFACE_ is defined
+struct IDirectSoundFullDuplex;
+#endif // __cplusplus
+
+typedef struct IDirectSoundFullDuplex *LPDIRECTSOUNDFULLDUPLEX;
+
+DEFINE_GUID(IID_IDirectSoundFullDuplex, 0xedcb4c7a, 0xdaab, 0x4216, 0xa4, 0x2e, 0x6c, 0x50, 0x59, 0x6d, 0xdc, 0x1d);
+
+#undef INTERFACE
+#define INTERFACE IDirectSoundFullDuplex
+
+DECLARE_INTERFACE_(IDirectSoundFullDuplex, IUnknown)
+{
+ // IUnknown methods
+ STDMETHOD(QueryInterface) (THIS_ REFIID, LPVOID *) PURE;
+ STDMETHOD_(ULONG,AddRef) (THIS) PURE;
+ STDMETHOD_(ULONG,Release) (THIS) PURE;
+
+ // IDirectSoundFullDuplex methods
+ STDMETHOD(Initialize) (THIS_ LPCGUID pCaptureGuid, LPCGUID pRenderGuid, LPCDSCBUFFERDESC lpDscBufferDesc, LPCDSBUFFERDESC lpDsBufferDesc, HWND hWnd, DWORD dwLevel, LPLPDIRECTSOUNDCAPTUREBUFFER8 lplpDirectSoundCaptureBuffer8, LPLPDIRECTSOUNDBUFFER8 lplpDirectSoundBuffer8) PURE;
+};
+
+#define IDirectSoundFullDuplex_QueryInterface(p,a,b) IUnknown_QueryInterface(p,a,b)
+#define IDirectSoundFullDuplex_AddRef(p) IUnknown_AddRef(p)
+#define IDirectSoundFullDuplex_Release(p) IUnknown_Release(p)
+
+#if !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFullDuplex_Initialize(p,a,b,c,d,e,f,g,h) (p)->lpVtbl->Initialize(p,a,b,c,d,e,f,g,h)
+#else // !defined(__cplusplus) || defined(CINTERFACE)
+#define IDirectSoundFullDuplex_Initialize(p,a,b,c,d,e,f,g,h) (p)->Initialize(a,b,c,d,e,f,g,h)
+#endif // !defined(__cplusplus) || defined(CINTERFACE)
+
+#endif // _IDirectSoundFullDuplex_
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+//
+// Return Codes
+//
+
+// The function completed successfully
+#define DS_OK S_OK
+
+// The call succeeded, but we had to substitute the 3D algorithm
+#define DS_NO_VIRTUALIZATION MAKE_HRESULT(0, _FACDS, 10)
+
+// The call failed because resources (such as a priority level)
+// were already being used by another caller
+#define DSERR_ALLOCATED MAKE_DSHRESULT(10)
+
+// The control (vol, pan, etc.) requested by the caller is not available
+#define DSERR_CONTROLUNAVAIL MAKE_DSHRESULT(30)
+
+// An invalid parameter was passed to the returning function
+#define DSERR_INVALIDPARAM E_INVALIDARG
+
+// This call is not valid for the current state of this object
+#define DSERR_INVALIDCALL MAKE_DSHRESULT(50)
+
+// An undetermined error occurred inside the DirectSound subsystem
+#define DSERR_GENERIC E_FAIL
+
+// The caller does not have the priority level required for the function to
+// succeed
+#define DSERR_PRIOLEVELNEEDED MAKE_DSHRESULT(70)
+
+// Not enough free memory is available to complete the operation
+#define DSERR_OUTOFMEMORY E_OUTOFMEMORY
+
+// The specified WAVE format is not supported
+#define DSERR_BADFORMAT MAKE_DSHRESULT(100)
+
+// The function called is not supported at this time
+#define DSERR_UNSUPPORTED E_NOTIMPL
+
+// No sound driver is available for use
+#define DSERR_NODRIVER MAKE_DSHRESULT(120)
+// This object is already initialized
+#define DSERR_ALREADYINITIALIZED MAKE_DSHRESULT(130)
+
+// This object does not support aggregation
+#define DSERR_NOAGGREGATION CLASS_E_NOAGGREGATION
+
+// The buffer memory has been lost, and must be restored
+#define DSERR_BUFFERLOST MAKE_DSHRESULT(150)
+
+// Another app has a higher priority level, preventing this call from
+// succeeding
+#define DSERR_OTHERAPPHASPRIO MAKE_DSHRESULT(160)
+
+// This object has not been initialized
+#define DSERR_UNINITIALIZED MAKE_DSHRESULT(170)
+
+// The requested COM interface is not available
+#define DSERR_NOINTERFACE E_NOINTERFACE
+
+// Access is denied
+#define DSERR_ACCESSDENIED E_ACCESSDENIED
+
+// Tried to create a DSBCAPS_CTRLFX buffer shorter than DSBSIZE_FX_MIN milliseconds
+#define DSERR_BUFFERTOOSMALL MAKE_DSHRESULT(180)
+
+// Attempt to use DirectSound 8 functionality on an older DirectSound object
+#define DSERR_DS8_REQUIRED MAKE_DSHRESULT(190)
+
+// A circular loop of send effects was detected
+#define DSERR_SENDLOOP MAKE_DSHRESULT(200)
+
+// The GUID specified in an audiopath file does not match a valid MIXIN buffer
+#define DSERR_BADSENDBUFFERGUID MAKE_DSHRESULT(210)
+
+// The object requested was not found (numerically equal to DMUS_E_NOT_FOUND)
+#define DSERR_OBJECTNOTFOUND MAKE_DSHRESULT(4449)
+
+// The effects requested could not be found on the system, or they were found
+// but in the wrong order, or in the wrong hardware/software locations.
+#define DSERR_FXUNAVAILABLE MAKE_DSHRESULT(220)
+
+//
+// Flags
+//
+
+#define DSCAPS_PRIMARYMONO 0x00000001
+#define DSCAPS_PRIMARYSTEREO 0x00000002
+#define DSCAPS_PRIMARY8BIT 0x00000004
+#define DSCAPS_PRIMARY16BIT 0x00000008
+#define DSCAPS_CONTINUOUSRATE 0x00000010
+#define DSCAPS_EMULDRIVER 0x00000020
+#define DSCAPS_CERTIFIED 0x00000040
+#define DSCAPS_SECONDARYMONO 0x00000100
+#define DSCAPS_SECONDARYSTEREO 0x00000200
+#define DSCAPS_SECONDARY8BIT 0x00000400
+#define DSCAPS_SECONDARY16BIT 0x00000800
+
+#define DSSCL_NORMAL 0x00000001
+#define DSSCL_PRIORITY 0x00000002
+#define DSSCL_EXCLUSIVE 0x00000003
+#define DSSCL_WRITEPRIMARY 0x00000004
+
+#define DSSPEAKER_DIRECTOUT 0x00000000
+#define DSSPEAKER_HEADPHONE 0x00000001
+#define DSSPEAKER_MONO 0x00000002
+#define DSSPEAKER_QUAD 0x00000003
+#define DSSPEAKER_STEREO 0x00000004
+#define DSSPEAKER_SURROUND 0x00000005
+#define DSSPEAKER_5POINT1 0x00000006 // obsolete 5.1 setting
+#define DSSPEAKER_7POINT1 0x00000007 // obsolete 7.1 setting
+#define DSSPEAKER_7POINT1_SURROUND 0x00000008 // correct 7.1 Home Theater setting
+#define DSSPEAKER_7POINT1_WIDE DSSPEAKER_7POINT1
+#if (DIRECTSOUND_VERSION >= 0x1000)
+ #define DSSPEAKER_5POINT1_SURROUND 0x00000009 // correct 5.1 setting
+ #define DSSPEAKER_5POINT1_BACK DSSPEAKER_5POINT1
+#endif
+
+#define DSSPEAKER_GEOMETRY_MIN 0x00000005 // 5 degrees
+#define DSSPEAKER_GEOMETRY_NARROW 0x0000000A // 10 degrees
+#define DSSPEAKER_GEOMETRY_WIDE 0x00000014 // 20 degrees
+#define DSSPEAKER_GEOMETRY_MAX 0x000000B4 // 180 degrees
+
+#define DSSPEAKER_COMBINED(c, g) ((DWORD)(((BYTE)(c)) | ((DWORD)((BYTE)(g))) << 16))
+#define DSSPEAKER_CONFIG(a) ((BYTE)(a))
+#define DSSPEAKER_GEOMETRY(a) ((BYTE)(((DWORD)(a) >> 16) & 0x00FF))
+
+#define DSBCAPS_PRIMARYBUFFER 0x00000001
+#define DSBCAPS_STATIC 0x00000002
+#define DSBCAPS_LOCHARDWARE 0x00000004
+#define DSBCAPS_LOCSOFTWARE 0x00000008
+#define DSBCAPS_CTRL3D 0x00000010
+#define DSBCAPS_CTRLFREQUENCY 0x00000020
+#define DSBCAPS_CTRLPAN 0x00000040
+#define DSBCAPS_CTRLVOLUME 0x00000080
+#define DSBCAPS_CTRLPOSITIONNOTIFY 0x00000100
+#define DSBCAPS_CTRLFX 0x00000200
+#define DSBCAPS_STICKYFOCUS 0x00004000
+#define DSBCAPS_GLOBALFOCUS 0x00008000
+#define DSBCAPS_GETCURRENTPOSITION2 0x00010000
+#define DSBCAPS_MUTE3DATMAXDISTANCE 0x00020000
+#define DSBCAPS_LOCDEFER 0x00040000
+#if (DIRECTSOUND_VERSION >= 0x1000)
+ // Force GetCurrentPosition() to return a buffer's true play position;
+ // unmodified by aids to enhance backward compatibility.
+ #define DSBCAPS_TRUEPLAYPOSITION 0x00080000
+#endif
+
+#define DSBPLAY_LOOPING 0x00000001
+#define DSBPLAY_LOCHARDWARE 0x00000002
+#define DSBPLAY_LOCSOFTWARE 0x00000004
+#define DSBPLAY_TERMINATEBY_TIME 0x00000008
+#define DSBPLAY_TERMINATEBY_DISTANCE 0x000000010
+#define DSBPLAY_TERMINATEBY_PRIORITY 0x000000020
+
+#define DSBSTATUS_PLAYING 0x00000001
+#define DSBSTATUS_BUFFERLOST 0x00000002
+#define DSBSTATUS_LOOPING 0x00000004
+#define DSBSTATUS_LOCHARDWARE 0x00000008
+#define DSBSTATUS_LOCSOFTWARE 0x00000010
+#define DSBSTATUS_TERMINATED 0x00000020
+
+#define DSBLOCK_FROMWRITECURSOR 0x00000001
+#define DSBLOCK_ENTIREBUFFER 0x00000002
+
+#define DSBFREQUENCY_ORIGINAL 0
+#define DSBFREQUENCY_MIN 100
+#if DIRECTSOUND_VERSION >= 0x0900
+#define DSBFREQUENCY_MAX 200000
+#else
+#define DSBFREQUENCY_MAX 100000
+#endif
+
+#define DSBPAN_LEFT -10000
+#define DSBPAN_CENTER 0
+#define DSBPAN_RIGHT 10000
+
+#define DSBVOLUME_MIN -10000
+#define DSBVOLUME_MAX 0
+
+#define DSBSIZE_MIN 4
+#define DSBSIZE_MAX 0x0FFFFFFF
+#define DSBSIZE_FX_MIN 150 // NOTE: Milliseconds, not bytes
+
+#define DSBNOTIFICATIONS_MAX 100000UL
+
+#define DS3DMODE_NORMAL 0x00000000
+#define DS3DMODE_HEADRELATIVE 0x00000001
+#define DS3DMODE_DISABLE 0x00000002
+
+#define DS3D_IMMEDIATE 0x00000000
+#define DS3D_DEFERRED 0x00000001
+
+#define DS3D_MINDISTANCEFACTOR FLT_MIN
+#define DS3D_MAXDISTANCEFACTOR FLT_MAX
+#define DS3D_DEFAULTDISTANCEFACTOR 1.0f
+
+#define DS3D_MINROLLOFFFACTOR 0.0f
+#define DS3D_MAXROLLOFFFACTOR 10.0f
+#define DS3D_DEFAULTROLLOFFFACTOR 1.0f
+
+#define DS3D_MINDOPPLERFACTOR 0.0f
+#define DS3D_MAXDOPPLERFACTOR 10.0f
+#define DS3D_DEFAULTDOPPLERFACTOR 1.0f
+
+#define DS3D_DEFAULTMINDISTANCE 1.0f
+#define DS3D_DEFAULTMAXDISTANCE 1000000000.0f
+
+#define DS3D_MINCONEANGLE 0
+#define DS3D_MAXCONEANGLE 360
+#define DS3D_DEFAULTCONEANGLE 360
+
+#define DS3D_DEFAULTCONEOUTSIDEVOLUME DSBVOLUME_MAX
+
+// IDirectSoundCapture attributes
+
+#define DSCCAPS_EMULDRIVER DSCAPS_EMULDRIVER
+#define DSCCAPS_CERTIFIED DSCAPS_CERTIFIED
+#define DSCCAPS_MULTIPLECAPTURE 0x00000001
+
+// IDirectSoundCaptureBuffer attributes
+
+#define DSCBCAPS_WAVEMAPPED 0x80000000
+
+#if DIRECTSOUND_VERSION >= 0x0800
+#define DSCBCAPS_CTRLFX 0x00000200
+#endif
+
+
+#define DSCBLOCK_ENTIREBUFFER 0x00000001
+
+#define DSCBSTATUS_CAPTURING 0x00000001
+#define DSCBSTATUS_LOOPING 0x00000002
+
+#define DSCBSTART_LOOPING 0x00000001
+
+#define DSBPN_OFFSETSTOP 0xFFFFFFFF
+
+#define DS_CERTIFIED 0x00000000
+#define DS_UNCERTIFIED 0x00000001
+
+
+//
+// Flags for the I3DL2 effects
+//
+
+//
+// I3DL2 Material Presets
+//
+
+enum
+{
+ DSFX_I3DL2_MATERIAL_PRESET_SINGLEWINDOW,
+ DSFX_I3DL2_MATERIAL_PRESET_DOUBLEWINDOW,
+ DSFX_I3DL2_MATERIAL_PRESET_THINDOOR,
+ DSFX_I3DL2_MATERIAL_PRESET_THICKDOOR,
+ DSFX_I3DL2_MATERIAL_PRESET_WOODWALL,
+ DSFX_I3DL2_MATERIAL_PRESET_BRICKWALL,
+ DSFX_I3DL2_MATERIAL_PRESET_STONEWALL,
+ DSFX_I3DL2_MATERIAL_PRESET_CURTAIN
+};
+
+#define I3DL2_MATERIAL_PRESET_SINGLEWINDOW -2800,0.71f
+#define I3DL2_MATERIAL_PRESET_DOUBLEWINDOW -5000,0.40f
+#define I3DL2_MATERIAL_PRESET_THINDOOR -1800,0.66f
+#define I3DL2_MATERIAL_PRESET_THICKDOOR -4400,0.64f
+#define I3DL2_MATERIAL_PRESET_WOODWALL -4000,0.50f
+#define I3DL2_MATERIAL_PRESET_BRICKWALL -5000,0.60f
+#define I3DL2_MATERIAL_PRESET_STONEWALL -6000,0.68f
+#define I3DL2_MATERIAL_PRESET_CURTAIN -1200,0.15f
+
+enum
+{
+ DSFX_I3DL2_ENVIRONMENT_PRESET_DEFAULT,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_GENERIC,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_PADDEDCELL,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_ROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_BATHROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_LIVINGROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_STONEROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_AUDITORIUM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_CONCERTHALL,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_CAVE,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_ARENA,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_HANGAR,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_CARPETEDHALLWAY,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_HALLWAY,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_ALLEY,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_FOREST,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_CITY,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_MOUNTAINS,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_QUARRY,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_PLAIN,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_PARKINGLOT,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_SEWERPIPE,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_UNDERWATER,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_SMALLROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_MEDIUMROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_LARGEROOM,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_MEDIUMHALL,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_LARGEHALL,
+ DSFX_I3DL2_ENVIRONMENT_PRESET_PLATE
+};
+
+//
+// I3DL2 Reverberation Presets Values
+//
+
+#define I3DL2_ENVIRONMENT_PRESET_DEFAULT -1000, -100, 0.0f, 1.49f, 0.83f, -2602, 0.007f, 200, 0.011f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_GENERIC -1000, -100, 0.0f, 1.49f, 0.83f, -2602, 0.007f, 200, 0.011f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_PADDEDCELL -1000,-6000, 0.0f, 0.17f, 0.10f, -1204, 0.001f, 207, 0.002f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_ROOM -1000, -454, 0.0f, 0.40f, 0.83f, -1646, 0.002f, 53, 0.003f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_BATHROOM -1000,-1200, 0.0f, 1.49f, 0.54f, -370, 0.007f, 1030, 0.011f, 100.0f, 60.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_LIVINGROOM -1000,-6000, 0.0f, 0.50f, 0.10f, -1376, 0.003f, -1104, 0.004f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_STONEROOM -1000, -300, 0.0f, 2.31f, 0.64f, -711, 0.012f, 83, 0.017f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_AUDITORIUM -1000, -476, 0.0f, 4.32f, 0.59f, -789, 0.020f, -289, 0.030f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_CONCERTHALL -1000, -500, 0.0f, 3.92f, 0.70f, -1230, 0.020f, -2, 0.029f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_CAVE -1000, 0, 0.0f, 2.91f, 1.30f, -602, 0.015f, -302, 0.022f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_ARENA -1000, -698, 0.0f, 7.24f, 0.33f, -1166, 0.020f, 16, 0.030f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_HANGAR -1000,-1000, 0.0f,10.05f, 0.23f, -602, 0.020f, 198, 0.030f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_CARPETEDHALLWAY -1000,-4000, 0.0f, 0.30f, 0.10f, -1831, 0.002f, -1630, 0.030f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_HALLWAY -1000, -300, 0.0f, 1.49f, 0.59f, -1219, 0.007f, 441, 0.011f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR -1000, -237, 0.0f, 2.70f, 0.79f, -1214, 0.013f, 395, 0.020f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_ALLEY -1000, -270, 0.0f, 1.49f, 0.86f, -1204, 0.007f, -4, 0.011f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_FOREST -1000,-3300, 0.0f, 1.49f, 0.54f, -2560, 0.162f, -613, 0.088f, 79.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_CITY -1000, -800, 0.0f, 1.49f, 0.67f, -2273, 0.007f, -2217, 0.011f, 50.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_MOUNTAINS -1000,-2500, 0.0f, 1.49f, 0.21f, -2780, 0.300f, -2014, 0.100f, 27.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_QUARRY -1000,-1000, 0.0f, 1.49f, 0.83f,-10000, 0.061f, 500, 0.025f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_PLAIN -1000,-2000, 0.0f, 1.49f, 0.50f, -2466, 0.179f, -2514, 0.100f, 21.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_PARKINGLOT -1000, 0, 0.0f, 1.65f, 1.50f, -1363, 0.008f, -1153, 0.012f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_SEWERPIPE -1000,-1000, 0.0f, 2.81f, 0.14f, 429, 0.014f, 648, 0.021f, 80.0f, 60.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_UNDERWATER -1000,-4000, 0.0f, 1.49f, 0.10f, -449, 0.007f, 1700, 0.011f, 100.0f, 100.0f, 5000.0f
+
+//
+// Examples simulating 'musical' reverb presets
+//
+// Name Decay time Description
+// Small Room 1.1s A small size room with a length of 5m or so.
+// Medium Room 1.3s A medium size room with a length of 10m or so.
+// Large Room 1.5s A large size room suitable for live performances.
+// Medium Hall 1.8s A medium size concert hall.
+// Large Hall 1.8s A large size concert hall suitable for a full orchestra.
+// Plate 1.3s A plate reverb simulation.
+//
+
+#define I3DL2_ENVIRONMENT_PRESET_SMALLROOM -1000, -600, 0.0f, 1.10f, 0.83f, -400, 0.005f, 500, 0.010f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_MEDIUMROOM -1000, -600, 0.0f, 1.30f, 0.83f, -1000, 0.010f, -200, 0.020f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_LARGEROOM -1000, -600, 0.0f, 1.50f, 0.83f, -1600, 0.020f, -1000, 0.040f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_MEDIUMHALL -1000, -600, 0.0f, 1.80f, 0.70f, -1300, 0.015f, -800, 0.030f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_LARGEHALL -1000, -600, 0.0f, 1.80f, 0.70f, -2000, 0.030f, -1400, 0.060f, 100.0f, 100.0f, 5000.0f
+#define I3DL2_ENVIRONMENT_PRESET_PLATE -1000, -200, 0.0f, 1.30f, 0.90f, 0, 0.002f, 0, 0.010f, 100.0f, 75.0f, 5000.0f
+
+//
+// DirectSound3D Algorithms
+//
+
+// Default DirectSound3D algorithm {00000000-0000-0000-0000-000000000000}
+#define DS3DALG_DEFAULT GUID_NULL
+
+// No virtualization (Pan3D) {C241333F-1C1B-11d2-94F5-00C04FC28ACA}
+DEFINE_GUID(DS3DALG_NO_VIRTUALIZATION, 0xc241333f, 0x1c1b, 0x11d2, 0x94, 0xf5, 0x0, 0xc0, 0x4f, 0xc2, 0x8a, 0xca);
+
+// High-quality HRTF algorithm {C2413340-1C1B-11d2-94F5-00C04FC28ACA}
+DEFINE_GUID(DS3DALG_HRTF_FULL, 0xc2413340, 0x1c1b, 0x11d2, 0x94, 0xf5, 0x0, 0xc0, 0x4f, 0xc2, 0x8a, 0xca);
+
+// Lower-quality HRTF algorithm {C2413342-1C1B-11d2-94F5-00C04FC28ACA}
+DEFINE_GUID(DS3DALG_HRTF_LIGHT, 0xc2413342, 0x1c1b, 0x11d2, 0x94, 0xf5, 0x0, 0xc0, 0x4f, 0xc2, 0x8a, 0xca);
+
+
+#if DIRECTSOUND_VERSION >= 0x0800
+
+//
+// DirectSound Internal Effect Algorithms
+//
+
+
+// Gargle {DAFD8210-5711-4B91-9FE3-F75B7AE279BF}
+DEFINE_GUID(GUID_DSFX_STANDARD_GARGLE, 0xdafd8210, 0x5711, 0x4b91, 0x9f, 0xe3, 0xf7, 0x5b, 0x7a, 0xe2, 0x79, 0xbf);
+
+// Chorus {EFE6629C-81F7-4281-BD91-C9D604A95AF6}
+DEFINE_GUID(GUID_DSFX_STANDARD_CHORUS, 0xefe6629c, 0x81f7, 0x4281, 0xbd, 0x91, 0xc9, 0xd6, 0x04, 0xa9, 0x5a, 0xf6);
+
+// Flanger {EFCA3D92-DFD8-4672-A603-7420894BAD98}
+DEFINE_GUID(GUID_DSFX_STANDARD_FLANGER, 0xefca3d92, 0xdfd8, 0x4672, 0xa6, 0x03, 0x74, 0x20, 0x89, 0x4b, 0xad, 0x98);
+
+// Echo/Delay {EF3E932C-D40B-4F51-8CCF-3F98F1B29D5D}
+DEFINE_GUID(GUID_DSFX_STANDARD_ECHO, 0xef3e932c, 0xd40b, 0x4f51, 0x8c, 0xcf, 0x3f, 0x98, 0xf1, 0xb2, 0x9d, 0x5d);
+
+// Distortion {EF114C90-CD1D-484E-96E5-09CFAF912A21}
+DEFINE_GUID(GUID_DSFX_STANDARD_DISTORTION, 0xef114c90, 0xcd1d, 0x484e, 0x96, 0xe5, 0x09, 0xcf, 0xaf, 0x91, 0x2a, 0x21);
+
+// Compressor/Limiter {EF011F79-4000-406D-87AF-BFFB3FC39D57}
+DEFINE_GUID(GUID_DSFX_STANDARD_COMPRESSOR, 0xef011f79, 0x4000, 0x406d, 0x87, 0xaf, 0xbf, 0xfb, 0x3f, 0xc3, 0x9d, 0x57);
+
+// Parametric Equalization {120CED89-3BF4-4173-A132-3CB406CF3231}
+DEFINE_GUID(GUID_DSFX_STANDARD_PARAMEQ, 0x120ced89, 0x3bf4, 0x4173, 0xa1, 0x32, 0x3c, 0xb4, 0x06, 0xcf, 0x32, 0x31);
+
+// I3DL2 Environmental Reverberation: Reverb (Listener) Effect {EF985E71-D5C7-42D4-BA4D-2D073E2E96F4}
+DEFINE_GUID(GUID_DSFX_STANDARD_I3DL2REVERB, 0xef985e71, 0xd5c7, 0x42d4, 0xba, 0x4d, 0x2d, 0x07, 0x3e, 0x2e, 0x96, 0xf4);
+
+// Waves Reverberation {87FC0268-9A55-4360-95AA-004A1D9DE26C}
+DEFINE_GUID(GUID_DSFX_WAVES_REVERB, 0x87fc0268, 0x9a55, 0x4360, 0x95, 0xaa, 0x00, 0x4a, 0x1d, 0x9d, 0xe2, 0x6c);
+
+//
+// DirectSound Capture Effect Algorithms
+//
+
+
+// Acoustic Echo Canceller {BF963D80-C559-11D0-8A2B-00A0C9255AC1}
+// Matches KSNODETYPE_ACOUSTIC_ECHO_CANCEL in ksmedia.h
+DEFINE_GUID(GUID_DSCFX_CLASS_AEC, 0xBF963D80L, 0xC559, 0x11D0, 0x8A, 0x2B, 0x00, 0xA0, 0xC9, 0x25, 0x5A, 0xC1);
+
+// Microsoft AEC {CDEBB919-379A-488a-8765-F53CFD36DE40}
+DEFINE_GUID(GUID_DSCFX_MS_AEC, 0xcdebb919, 0x379a, 0x488a, 0x87, 0x65, 0xf5, 0x3c, 0xfd, 0x36, 0xde, 0x40);
+
+// System AEC {1C22C56D-9879-4f5b-A389-27996DDC2810}
+DEFINE_GUID(GUID_DSCFX_SYSTEM_AEC, 0x1c22c56d, 0x9879, 0x4f5b, 0xa3, 0x89, 0x27, 0x99, 0x6d, 0xdc, 0x28, 0x10);
+
+// Noise Supression {E07F903F-62FD-4e60-8CDD-DEA7236665B5}
+// Matches KSNODETYPE_NOISE_SUPPRESS in post Windows ME DDK's ksmedia.h
+DEFINE_GUID(GUID_DSCFX_CLASS_NS, 0xe07f903f, 0x62fd, 0x4e60, 0x8c, 0xdd, 0xde, 0xa7, 0x23, 0x66, 0x65, 0xb5);
+
+// Microsoft Noise Suppresion {11C5C73B-66E9-4ba1-A0BA-E814C6EED92D}
+DEFINE_GUID(GUID_DSCFX_MS_NS, 0x11c5c73b, 0x66e9, 0x4ba1, 0xa0, 0xba, 0xe8, 0x14, 0xc6, 0xee, 0xd9, 0x2d);
+
+// System Noise Suppresion {5AB0882E-7274-4516-877D-4EEE99BA4FD0}
+DEFINE_GUID(GUID_DSCFX_SYSTEM_NS, 0x5ab0882e, 0x7274, 0x4516, 0x87, 0x7d, 0x4e, 0xee, 0x99, 0xba, 0x4f, 0xd0);
+
+#endif // DIRECTSOUND_VERSION >= 0x0800
+
+#endif // __DSOUND_INCLUDED__
+
+
+
+#ifdef __cplusplus
+};
+#endif // __cplusplus
+
--- /dev/null
+#pragma once
+
+/*++
+
+Copyright (c) Microsoft Corporation. All rights reserved.
+
+Module Name:
+
+ devpkey.h
+
+Abstract:
+
+ Defines property keys for the Plug and Play Device Property API.
+
+Author:
+
+ Jim Cavalaris (jamesca) 10-14-2003
+
+Environment:
+
+ User-mode only.
+
+Revision History:
+
+ 14-October-2003 jamesca
+
+ Creation and initial implementation.
+
+ 20-June-2006 dougb
+
+ Copied Jim's version replaced "DEFINE_DEVPROPKEY(DEVPKEY_" with "DEFINE_PROPERTYKEY(PKEY_"
+
+--*/
+
+//#include <devpropdef.h>
+
+//
+// _NAME
+//
+
+DEFINE_PROPERTYKEY(PKEY_NAME, 0xb725f130, 0x47ef, 0x101a, 0xa5, 0xf1, 0x02, 0x60, 0x8c, 0x9e, 0xeb, 0xac, 10); // DEVPROP_TYPE_STRING
+
+//
+// Device properties
+// These PKEYs correspond to the old setupapi SPDRP_XXX properties
+//
+DEFINE_PROPERTYKEY(PKEY_Device_DeviceDesc, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 2); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_HardwareIds, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 3); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_CompatibleIds, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 4); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_Service, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 6); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_Class, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 9); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_ClassGuid, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 10); // DEVPROP_TYPE_GUID
+DEFINE_PROPERTYKEY(PKEY_Device_Driver, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 11); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_ConfigFlags, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 12); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_Manufacturer, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 13); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_FriendlyName, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 14); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_LocationInfo, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 15); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_PDOName, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 16); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_Capabilities, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 17); // DEVPROP_TYPE_UNINT32
+DEFINE_PROPERTYKEY(PKEY_Device_UINumber, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 18); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_UpperFilters, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 19); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_LowerFilters, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 20); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_BusTypeGuid, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 21); // DEVPROP_TYPE_GUID
+DEFINE_PROPERTYKEY(PKEY_Device_LegacyBusType, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 22); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_BusNumber, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 23); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_EnumeratorName, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 24); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_Security, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 25); // DEVPROP_TYPE_SECURITY_DESCRIPTOR
+DEFINE_PROPERTYKEY(PKEY_Device_SecuritySDS, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 26); // DEVPROP_TYPE_SECURITY_DESCRIPTOR_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DevType, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 27); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_Exclusive, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 28); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_Characteristics, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 29); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_Address, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 30); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_UINumberDescFormat, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 31); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_PowerData, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 32); // DEVPROP_TYPE_BINARY
+DEFINE_PROPERTYKEY(PKEY_Device_RemovalPolicy, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 33); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_RemovalPolicyDefault, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 34); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_RemovalPolicyOverride, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 35); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_InstallState, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 36); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_LocationPaths, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 37); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_BaseContainerId, 0xa45c254e, 0xdf1c, 0x4efd, 0x80, 0x20, 0x67, 0xd1, 0x46, 0xa8, 0x50, 0xe0, 38); // DEVPROP_TYPE_GUID
+
+//
+// Device properties
+// These PKEYs correspond to a device's status and problem code
+//
+DEFINE_PROPERTYKEY(PKEY_Device_DevNodeStatus, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 2); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_ProblemCode, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 3); // DEVPROP_TYPE_UINT32
+
+//
+// Device properties
+// These PKEYs correspond to device relations
+//
+DEFINE_PROPERTYKEY(PKEY_Device_EjectionRelations, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 4); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_RemovalRelations, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 5); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_PowerRelations, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 6); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_BusRelations, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 7); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_Parent, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 8); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_Children, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 9); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_Siblings, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 10); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_TransportRelations, 0x4340a6c5, 0x93fa, 0x4706, 0x97, 0x2c, 0x7b, 0x64, 0x80, 0x08, 0xa5, 0xa7, 11); // DEVPROP_TYPE_STRING_LIST
+
+//
+// Other Device properties
+//
+DEFINE_PROPERTYKEY(PKEY_Device_Reported, 0x80497100, 0x8c73, 0x48b9, 0xaa, 0xd9, 0xce, 0x38, 0x7e, 0x19, 0xc5, 0x6e, 2); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_Device_Legacy, 0x80497100, 0x8c73, 0x48b9, 0xaa, 0xd9, 0xce, 0x38, 0x7e, 0x19, 0xc5, 0x6e, 3); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_Device_InstanceId, 0x78c34fc8, 0x104a, 0x4aca, 0x9e, 0xa4, 0x52, 0x4d, 0x52, 0x99, 0x6e, 0x57, 256); // DEVPROP_TYPE_STRING
+
+DEFINE_PROPERTYKEY(PKEY_Device_ContainerId, 0x8c7ed206, 0x3f8a, 0x4827, 0xb3, 0xab, 0xae, 0x9e, 0x1f, 0xae, 0xfc, 0x6c, 2); // DEVPROP_TYPE_GUID
+
+DEFINE_PROPERTYKEY(PKEY_Device_ModelId, 0x80d81ea6, 0x7473, 0x4b0c, 0x82, 0x16, 0xef, 0xc1, 0x1a, 0x2c, 0x4c, 0x8b, 2); // DEVPROP_TYPE_GUID
+
+DEFINE_PROPERTYKEY(PKEY_Device_FriendlyNameAttributes, 0x80d81ea6, 0x7473, 0x4b0c, 0x82, 0x16, 0xef, 0xc1, 0x1a, 0x2c, 0x4c, 0x8b, 3); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_ManufacturerAttributes, 0x80d81ea6, 0x7473, 0x4b0c, 0x82, 0x16, 0xef, 0xc1, 0x1a, 0x2c, 0x4c, 0x8b, 4); // DEVPROP_TYPE_UINT32
+
+DEFINE_PROPERTYKEY(PKEY_Device_PresenceNotForDevice, 0x80d81ea6, 0x7473, 0x4b0c, 0x82, 0x16, 0xef, 0xc1, 0x1a, 0x2c, 0x4c, 0x8b, 5); // DEVPROP_TYPE_BOOLEAN
+
+
+DEFINE_PROPERTYKEY(PKEY_Numa_Proximity_Domain, 0x540b947e, 0x8b40, 0x45bc, 0xa8, 0xa2, 0x6a, 0x0b, 0x89, 0x4c, 0xbd, 0xa2, 1); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_DHP_Rebalance_Policy, 0x540b947e, 0x8b40, 0x45bc, 0xa8, 0xa2, 0x6a, 0x0b, 0x89, 0x4c, 0xbd, 0xa2, 2); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_Numa_Node, 0x540b947e, 0x8b40, 0x45bc, 0xa8, 0xa2, 0x6a, 0x0b, 0x89, 0x4c, 0xbd, 0xa2, 3); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_BusReportedDeviceDesc, 0x540b947e, 0x8b40, 0x45bc, 0xa8, 0xa2, 0x6a, 0x0b, 0x89, 0x4c, 0xbd, 0xa2, 4); // DEVPROP_TYPE_STRING
+
+DEFINE_PROPERTYKEY(PKEY_Device_InstallInProgress, 0x83da6326, 0x97a6, 0x4088, 0x94, 0x53, 0xa1, 0x92, 0x3f, 0x57, 0x3b, 0x29, 9); // DEVPROP_TYPE_BOOLEAN
+
+//
+// Device driver properties
+//
+DEFINE_PROPERTYKEY(PKEY_Device_DriverDate, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 2); // DEVPROP_TYPE_FILETIME
+DEFINE_PROPERTYKEY(PKEY_Device_DriverVersion, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 3); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverDesc, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 4); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverInfPath, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 5); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverInfSection, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 6); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverInfSectionExt, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 7); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_MatchingDeviceId, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 8); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverProvider, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 9); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverPropPageProvider, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 10); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverCoInstallers, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 11); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_Device_ResourcePickerTags, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 12); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_ResourcePickerExceptions, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 13); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_Device_DriverRank, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 14); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_DriverLogoLevel, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 15); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_Device_NoConnectSound, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 17); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_Device_GenericDriverInstalled, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 18); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_Device_AdditionalSoftwareRequested, 0xa8b865dd, 0x2e3d, 0x4094, 0xad, 0x97, 0xe5, 0x93, 0xa7, 0xc, 0x75, 0xd6, 19);// DEVPROP_TYPE_BOOLEAN
+
+//
+// Device safe-removal properties
+//
+DEFINE_PROPERTYKEY(PKEY_Device_SafeRemovalRequired, 0xafd97640, 0x86a3, 0x4210, 0xb6, 0x7c, 0x28, 0x9c, 0x41, 0xaa, 0xbe, 0x55, 2); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_Device_SafeRemovalRequiredOverride, 0xafd97640, 0x86a3, 0x4210, 0xb6, 0x7c, 0x28, 0x9c, 0x41, 0xaa, 0xbe, 0x55, 3);// DEVPROP_TYPE_BOOLEAN
+
+
+//
+// Device properties that were set by the driver package that was installed
+// on the device.
+//
+DEFINE_PROPERTYKEY(PKEY_DrvPkg_Model, 0xcf73bb51, 0x3abf, 0x44a2, 0x85, 0xe0, 0x9a, 0x3d, 0xc7, 0xa1, 0x21, 0x32, 2); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DrvPkg_VendorWebSite, 0xcf73bb51, 0x3abf, 0x44a2, 0x85, 0xe0, 0x9a, 0x3d, 0xc7, 0xa1, 0x21, 0x32, 3); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DrvPkg_DetailedDescription, 0xcf73bb51, 0x3abf, 0x44a2, 0x85, 0xe0, 0x9a, 0x3d, 0xc7, 0xa1, 0x21, 0x32, 4); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DrvPkg_DocumentationLink, 0xcf73bb51, 0x3abf, 0x44a2, 0x85, 0xe0, 0x9a, 0x3d, 0xc7, 0xa1, 0x21, 0x32, 5); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DrvPkg_Icon, 0xcf73bb51, 0x3abf, 0x44a2, 0x85, 0xe0, 0x9a, 0x3d, 0xc7, 0xa1, 0x21, 0x32, 6); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_DrvPkg_BrandingIcon, 0xcf73bb51, 0x3abf, 0x44a2, 0x85, 0xe0, 0x9a, 0x3d, 0xc7, 0xa1, 0x21, 0x32, 7); // DEVPROP_TYPE_STRING_LIST
+
+//
+// Device setup class properties
+// These PKEYs correspond to the old setupapi SPCRP_XXX properties
+//
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_UpperFilters, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 19); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_LowerFilters, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 20); // DEVPROP_TYPE_STRING_LIST
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_Security, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 25); // DEVPROP_TYPE_SECURITY_DESCRIPTOR
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_SecuritySDS, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 26); // DEVPROP_TYPE_SECURITY_DESCRIPTOR_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_DevType, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 27); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_Exclusive, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 28); // DEVPROP_TYPE_UINT32
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_Characteristics, 0x4321918b, 0xf69e, 0x470d, 0xa5, 0xde, 0x4d, 0x88, 0xc7, 0x5a, 0xd2, 0x4b, 29); // DEVPROP_TYPE_UINT32
+
+//
+// Device setup class properties
+// These PKEYs correspond to registry values under the device class GUID key
+//
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_Name, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 2); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_ClassName, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 3); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_Icon, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 4); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_ClassInstaller, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 5); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_PropPageProvider, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 6); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_NoInstallClass, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 7); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_NoDisplayClass, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 8); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_SilentInstall, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 9); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_NoUseClass, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 10); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_DefaultService, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 11); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_IconPath, 0x259abffc, 0x50a7, 0x47ce, 0xaf, 0x8, 0x68, 0xc9, 0xa7, 0xd7, 0x33, 0x66, 12); // DEVPROP_TYPE_STRING_LIST
+
+//
+// Other Device setup class properties
+//
+DEFINE_PROPERTYKEY(PKEY_DeviceClass_ClassCoInstallers, 0x713d1703, 0xa2e2, 0x49f5, 0x92, 0x14, 0x56, 0x47, 0x2e, 0xf3, 0xda, 0x5c, 2); // DEVPROP_TYPE_STRING_LIST
+
+//
+// Device interface properties
+//
+DEFINE_PROPERTYKEY(PKEY_DeviceInterface_FriendlyName, 0x026e516e, 0xb814, 0x414b, 0x83, 0xcd, 0x85, 0x6d, 0x6f, 0xef, 0x48, 0x22, 2); // DEVPROP_TYPE_STRING
+DEFINE_PROPERTYKEY(PKEY_DeviceInterface_Enabled, 0x026e516e, 0xb814, 0x414b, 0x83, 0xcd, 0x85, 0x6d, 0x6f, 0xef, 0x48, 0x22, 3); // DEVPROP_TYPE_BOOLEAN
+DEFINE_PROPERTYKEY(PKEY_DeviceInterface_ClassGuid, 0x026e516e, 0xb814, 0x414b, 0x83, 0xcd, 0x85, 0x6d, 0x6f, 0xef, 0x48, 0x22, 4); // DEVPROP_TYPE_GUID
+
+//
+// Device interface class properties
+//
+DEFINE_PROPERTYKEY(PKEY_DeviceInterfaceClass_DefaultInterface, 0x14c83a99, 0x0b3f, 0x44b7, 0xbe, 0x4c, 0xa1, 0x78, 0xd3, 0x99, 0x05, 0x64, 2); // DEVPROP_TYPE_STRING
+
+
+
+
--- /dev/null
+#ifndef __gInclude__\r
+#define __gInclude__\r
+\r
+#if SGI \r
+ #undef BEOS \r
+ #undef MAC \r
+ #undef WINDOWS\r
+ //\r
+ #define ASIO_BIG_ENDIAN 1\r
+ #define ASIO_CPU_MIPS 1\r
+#elif defined(_WIN32) || defined(_WIN64)\r
+ #undef BEOS \r
+ #undef MAC \r
+ #undef SGI\r
+ #define WINDOWS 1\r
+ #define ASIO_LITTLE_ENDIAN 1\r
+ #define ASIO_CPU_X86 1\r
+#elif BEOS\r
+ #undef MAC \r
+ #undef SGI\r
+ #undef WINDOWS\r
+ #define ASIO_LITTLE_ENDIAN 1\r
+ #define ASIO_CPU_X86 1\r
+ //\r
+#else\r
+ #define MAC 1\r
+ #undef BEOS \r
+ #undef WINDOWS\r
+ #undef SGI\r
+ #define ASIO_BIG_ENDIAN 1\r
+ #define ASIO_CPU_PPC 1\r
+#endif\r
+\r
+// always\r
+#define NATIVE_INT64 0\r
+#define IEEE754_64FLOAT 1\r
+\r
+#endif // __gInclude__\r
--- /dev/null
+#include "asiosys.h"\r
+#include "asio.h"\r
+\r
+/* Forward Declarations */ \r
+\r
+#ifndef __ASIODRIVER_FWD_DEFINED__\r
+#define __ASIODRIVER_FWD_DEFINED__\r
+typedef interface IASIO IASIO;\r
+#endif /* __ASIODRIVER_FWD_DEFINED__ */\r
+\r
+interface IASIO : public IUnknown\r
+{\r
+\r
+ virtual ASIOBool init(void *sysHandle) = 0;\r
+ virtual void getDriverName(char *name) = 0; \r
+ virtual long getDriverVersion() = 0;\r
+ virtual void getErrorMessage(char *string) = 0; \r
+ virtual ASIOError start() = 0;\r
+ virtual ASIOError stop() = 0;\r
+ virtual ASIOError getChannels(long *numInputChannels, long *numOutputChannels) = 0;\r
+ virtual ASIOError getLatencies(long *inputLatency, long *outputLatency) = 0;\r
+ virtual ASIOError getBufferSize(long *minSize, long *maxSize,\r
+ long *preferredSize, long *granularity) = 0;\r
+ virtual ASIOError canSampleRate(ASIOSampleRate sampleRate) = 0;\r
+ virtual ASIOError getSampleRate(ASIOSampleRate *sampleRate) = 0;\r
+ virtual ASIOError setSampleRate(ASIOSampleRate sampleRate) = 0;\r
+ virtual ASIOError getClockSources(ASIOClockSource *clocks, long *numSources) = 0;\r
+ virtual ASIOError setClockSource(long reference) = 0;\r
+ virtual ASIOError getSamplePosition(ASIOSamples *sPos, ASIOTimeStamp *tStamp) = 0;\r
+ virtual ASIOError getChannelInfo(ASIOChannelInfo *info) = 0;\r
+ virtual ASIOError createBuffers(ASIOBufferInfo *bufferInfos, long numChannels,\r
+ long bufferSize, ASIOCallbacks *callbacks) = 0;\r
+ virtual ASIOError disposeBuffers() = 0;\r
+ virtual ASIOError controlPanel() = 0;\r
+ virtual ASIOError future(long selector,void *opt) = 0;\r
+ virtual ASIOError outputReady() = 0;\r
+};\r
--- /dev/null
+/*
+ IASIOThiscallResolver.cpp see the comments in iasiothiscallresolver.h for
+ the top level description - this comment describes the technical details of
+ the implementation.
+
+ The latest version of this file is available from:
+ http://www.audiomulch.com/~rossb/code/calliasio
+
+ please email comments to Ross Bencina <rossb@audiomulch.com>
+
+ BACKGROUND
+
+ The IASIO interface declared in the Steinberg ASIO 2 SDK declares
+ functions with no explicit calling convention. This causes MSVC++ to default
+ to using the thiscall convention, which is a proprietary convention not
+ implemented by some non-microsoft compilers - notably borland BCC,
+ C++Builder, and gcc. MSVC++ is the defacto standard compiler used by
+ Steinberg. As a result of this situation, the ASIO sdk will compile with
+ any compiler, however attempting to execute the compiled code will cause a
+ crash due to different default calling conventions on non-Microsoft
+ compilers.
+
+ IASIOThiscallResolver solves the problem by providing an adapter class that
+ delegates to the IASIO interface using the correct calling convention
+ (thiscall). Due to the lack of support for thiscall in the Borland and GCC
+ compilers, the calls have been implemented in assembly language.
+
+ A number of macros are defined for thiscall function calls with different
+ numbers of parameters, with and without return values - it may be possible
+ to modify the format of these macros to make them work with other inline
+ assemblers.
+
+
+ THISCALL DEFINITION
+
+ A number of definitions of the thiscall calling convention are floating
+ around the internet. The following definition has been validated against
+ output from the MSVC++ compiler:
+
+ For non-vararg functions, thiscall works as follows: the object (this)
+ pointer is passed in ECX. All arguments are passed on the stack in
+ right to left order. The return value is placed in EAX. The callee
+ clears the passed arguments from the stack.
+
+
+ FINDING FUNCTION POINTERS FROM AN IASIO POINTER
+
+ The first field of a COM object is a pointer to its vtble. Thus a pointer
+ to an object implementing the IASIO interface also points to a pointer to
+ that object's vtbl. The vtble is a table of function pointers for all of
+ the virtual functions exposed by the implemented interfaces.
+
+ If we consider a variable declared as a pointer to IASO:
+
+ IASIO *theAsioDriver
+
+ theAsioDriver points to:
+
+ object implementing IASIO
+ {
+ IASIOvtbl *vtbl
+ other data
+ }
+
+ in other words, theAsioDriver points to a pointer to an IASIOvtbl
+
+ vtbl points to a table of function pointers:
+
+ IASIOvtbl ( interface IASIO : public IUnknown )
+ {
+ (IUnknown functions)
+ 0 virtual HRESULT STDMETHODCALLTYPE (*QueryInterface)(REFIID riid, void **ppv) = 0;
+ 4 virtual ULONG STDMETHODCALLTYPE (*AddRef)() = 0;
+ 8 virtual ULONG STDMETHODCALLTYPE (*Release)() = 0;
+
+ (IASIO functions)
+ 12 virtual ASIOBool (*init)(void *sysHandle) = 0;
+ 16 virtual void (*getDriverName)(char *name) = 0;
+ 20 virtual long (*getDriverVersion)() = 0;
+ 24 virtual void (*getErrorMessage)(char *string) = 0;
+ 28 virtual ASIOError (*start)() = 0;
+ 32 virtual ASIOError (*stop)() = 0;
+ 36 virtual ASIOError (*getChannels)(long *numInputChannels, long *numOutputChannels) = 0;
+ 40 virtual ASIOError (*getLatencies)(long *inputLatency, long *outputLatency) = 0;
+ 44 virtual ASIOError (*getBufferSize)(long *minSize, long *maxSize,
+ long *preferredSize, long *granularity) = 0;
+ 48 virtual ASIOError (*canSampleRate)(ASIOSampleRate sampleRate) = 0;
+ 52 virtual ASIOError (*getSampleRate)(ASIOSampleRate *sampleRate) = 0;
+ 56 virtual ASIOError (*setSampleRate)(ASIOSampleRate sampleRate) = 0;
+ 60 virtual ASIOError (*getClockSources)(ASIOClockSource *clocks, long *numSources) = 0;
+ 64 virtual ASIOError (*setClockSource)(long reference) = 0;
+ 68 virtual ASIOError (*getSamplePosition)(ASIOSamples *sPos, ASIOTimeStamp *tStamp) = 0;
+ 72 virtual ASIOError (*getChannelInfo)(ASIOChannelInfo *info) = 0;
+ 76 virtual ASIOError (*createBuffers)(ASIOBufferInfo *bufferInfos, long numChannels,
+ long bufferSize, ASIOCallbacks *callbacks) = 0;
+ 80 virtual ASIOError (*disposeBuffers)() = 0;
+ 84 virtual ASIOError (*controlPanel)() = 0;
+ 88 virtual ASIOError (*future)(long selector,void *opt) = 0;
+ 92 virtual ASIOError (*outputReady)() = 0;
+ };
+
+ The numbers in the left column show the byte offset of each function ptr
+ from the beginning of the vtbl. These numbers are used in the code below
+ to select different functions.
+
+ In order to find the address of a particular function, theAsioDriver
+ must first be dereferenced to find the value of the vtbl pointer:
+
+ mov eax, theAsioDriver
+ mov edx, [theAsioDriver] // edx now points to vtbl[0]
+
+ Then an offset must be added to the vtbl pointer to select a
+ particular function, for example vtbl+44 points to the slot containing
+ a pointer to the getBufferSize function.
+
+ Finally vtbl+x must be dereferenced to obtain the value of the function
+ pointer stored in that address:
+
+ call [edx+44] // call the function pointed to by
+ // the value in the getBufferSize field of the vtbl
+
+
+ SEE ALSO
+
+ Martin Fay's OpenASIO DLL at http://www.martinfay.com solves the same
+ problem by providing a new COM interface which wraps IASIO with an
+ interface that uses portable calling conventions. OpenASIO must be compiled
+ with MSVC, and requires that you ship the OpenASIO DLL with your
+ application.
+
+
+ ACKNOWLEDGEMENTS
+
+ Ross Bencina: worked out the thiscall details above, wrote the original
+ Borland asm macros, and a patch for asio.cpp (which is no longer needed).
+ Thanks to Martin Fay for introducing me to the issues discussed here,
+ and to Rene G. Ceballos for assisting with asm dumps from MSVC++.
+
+ Antti Silvast: converted the original calliasio to work with gcc and NASM
+ by implementing the asm code in a separate file.
+
+ Fraser Adams: modified the original calliasio containing the Borland inline
+ asm to add inline asm for gcc i.e. Intel syntax for Borland and AT&T syntax
+ for gcc. This seems a neater approach for gcc than to have a separate .asm
+ file and it means that we only need one version of the thiscall patch.
+
+ Fraser Adams: rewrote the original calliasio patch in the form of the
+ IASIOThiscallResolver class in order to avoid modifications to files from
+ the Steinberg SDK, which may have had potential licence issues.
+
+ Andrew Baldwin: contributed fixes for compatibility problems with more
+ recent versions of the gcc assembler.
+*/
+
+
+// We only need IASIOThiscallResolver at all if we are on Win32. For other
+// platforms we simply bypass the IASIOThiscallResolver definition to allow us
+// to be safely #include'd whatever the platform to keep client code portable
+#if (defined(WIN32) || defined(_WIN32) || defined(__WIN32__)) && !defined(_WIN64)
+
+
+// If microsoft compiler we can call IASIO directly so IASIOThiscallResolver
+// is not used.
+#if !defined(_MSC_VER)
+
+
+#include <new>
+#include <assert.h>
+
+// We have a mechanism in iasiothiscallresolver.h to ensure that asio.h is
+// #include'd before it in client code, we do NOT want to do this test here.
+#define iasiothiscallresolver_sourcefile 1
+#include "iasiothiscallresolver.h"
+#undef iasiothiscallresolver_sourcefile
+
+// iasiothiscallresolver.h redefines ASIOInit for clients, but we don't want
+// this macro defined in this translation unit.
+#undef ASIOInit
+
+
+// theAsioDriver is a global pointer to the current IASIO instance which the
+// ASIO SDK uses to perform all actions on the IASIO interface. We substitute
+// our own forwarding interface into this pointer.
+extern IASIO* theAsioDriver;
+
+
+// The following macros define the inline assembler for BORLAND first then gcc
+
+#if defined(__BCPLUSPLUS__) || defined(__BORLANDC__)
+
+
+#define CALL_THISCALL_0( resultName, thisPtr, funcOffset )\
+ void *this_ = (thisPtr); \
+ __asm { \
+ mov ecx, this_ ; \
+ mov eax, [ecx] ; \
+ call [eax+funcOffset] ; \
+ mov resultName, eax ; \
+ }
+
+
+#define CALL_VOID_THISCALL_1( thisPtr, funcOffset, param1 )\
+ void *this_ = (thisPtr); \
+ __asm { \
+ mov eax, param1 ; \
+ push eax ; \
+ mov ecx, this_ ; \
+ mov eax, [ecx] ; \
+ call [eax+funcOffset] ; \
+ }
+
+
+#define CALL_THISCALL_1( resultName, thisPtr, funcOffset, param1 )\
+ void *this_ = (thisPtr); \
+ __asm { \
+ mov eax, param1 ; \
+ push eax ; \
+ mov ecx, this_ ; \
+ mov eax, [ecx] ; \
+ call [eax+funcOffset] ; \
+ mov resultName, eax ; \
+ }
+
+
+#define CALL_THISCALL_1_DOUBLE( resultName, thisPtr, funcOffset, param1 )\
+ void *this_ = (thisPtr); \
+ void *doubleParamPtr_ (¶m1); \
+ __asm { \
+ mov eax, doubleParamPtr_ ; \
+ push [eax+4] ; \
+ push [eax] ; \
+ mov ecx, this_ ; \
+ mov eax, [ecx] ; \
+ call [eax+funcOffset] ; \
+ mov resultName, eax ; \
+ }
+
+
+#define CALL_THISCALL_2( resultName, thisPtr, funcOffset, param1, param2 )\
+ void *this_ = (thisPtr); \
+ __asm { \
+ mov eax, param2 ; \
+ push eax ; \
+ mov eax, param1 ; \
+ push eax ; \
+ mov ecx, this_ ; \
+ mov eax, [ecx] ; \
+ call [eax+funcOffset] ; \
+ mov resultName, eax ; \
+ }
+
+
+#define CALL_THISCALL_4( resultName, thisPtr, funcOffset, param1, param2, param3, param4 )\
+ void *this_ = (thisPtr); \
+ __asm { \
+ mov eax, param4 ; \
+ push eax ; \
+ mov eax, param3 ; \
+ push eax ; \
+ mov eax, param2 ; \
+ push eax ; \
+ mov eax, param1 ; \
+ push eax ; \
+ mov ecx, this_ ; \
+ mov eax, [ecx] ; \
+ call [eax+funcOffset] ; \
+ mov resultName, eax ; \
+ }
+
+
+#elif defined(__GNUC__)
+
+
+#define CALL_THISCALL_0( resultName, thisPtr, funcOffset ) \
+ __asm__ __volatile__ ("movl (%1), %%edx\n\t" \
+ "call *"#funcOffset"(%%edx)\n\t" \
+ :"=a"(resultName) /* Output Operands */ \
+ :"c"(thisPtr) /* Input Operands */ \
+ : "%edx" /* Clobbered Registers */ \
+ ); \
+
+
+#define CALL_VOID_THISCALL_1( thisPtr, funcOffset, param1 ) \
+ __asm__ __volatile__ ("pushl %0\n\t" \
+ "movl (%1), %%edx\n\t" \
+ "call *"#funcOffset"(%%edx)\n\t" \
+ : /* Output Operands */ \
+ :"r"(param1), /* Input Operands */ \
+ "c"(thisPtr) \
+ : "%edx" /* Clobbered Registers */ \
+ ); \
+
+
+#define CALL_THISCALL_1( resultName, thisPtr, funcOffset, param1 ) \
+ __asm__ __volatile__ ("pushl %1\n\t" \
+ "movl (%2), %%edx\n\t" \
+ "call *"#funcOffset"(%%edx)\n\t" \
+ :"=a"(resultName) /* Output Operands */ \
+ :"r"(param1), /* Input Operands */ \
+ "c"(thisPtr) \
+ : "%edx" /* Clobbered Registers */ \
+ ); \
+
+
+#define CALL_THISCALL_1_DOUBLE( resultName, thisPtr, funcOffset, param1 ) \
+ do { \
+ double param1f64 = param1; /* Cast explicitly to double */ \
+ double *param1f64Ptr = ¶m1f64; /* Make pointer to address */ \
+ __asm__ __volatile__ ("pushl 4(%1)\n\t" \
+ "pushl (%1)\n\t" \
+ "movl (%2), %%edx\n\t" \
+ "call *"#funcOffset"(%%edx);\n\t" \
+ : "=a"(resultName) /* Output Operands */ \
+ : "r"(param1f64Ptr), /* Input Operands */ \
+ "c"(thisPtr), \
+ "m"(*param1f64Ptr) /* Using address */ \
+ : "%edx" /* Clobbered Registers */ \
+ ); \
+ } while (0); \
+
+
+#define CALL_THISCALL_2( resultName, thisPtr, funcOffset, param1, param2 ) \
+ __asm__ __volatile__ ("pushl %1\n\t" \
+ "pushl %2\n\t" \
+ "movl (%3), %%edx\n\t" \
+ "call *"#funcOffset"(%%edx)\n\t" \
+ :"=a"(resultName) /* Output Operands */ \
+ :"r"(param2), /* Input Operands */ \
+ "r"(param1), \
+ "c"(thisPtr) \
+ : "%edx" /* Clobbered Registers */ \
+ ); \
+
+
+#define CALL_THISCALL_4( resultName, thisPtr, funcOffset, param1, param2, param3, param4 )\
+ __asm__ __volatile__ ("pushl %1\n\t" \
+ "pushl %2\n\t" \
+ "pushl %3\n\t" \
+ "pushl %4\n\t" \
+ "movl (%5), %%edx\n\t" \
+ "call *"#funcOffset"(%%edx)\n\t" \
+ :"=a"(resultName) /* Output Operands */ \
+ :"r"(param4), /* Input Operands */ \
+ "r"(param3), \
+ "r"(param2), \
+ "r"(param1), \
+ "c"(thisPtr) \
+ : "%edx" /* Clobbered Registers */ \
+ ); \
+
+#endif
+
+
+
+// Our static singleton instance.
+IASIOThiscallResolver IASIOThiscallResolver::instance;
+
+// Constructor called to initialize static Singleton instance above. Note that
+// it is important not to clear that_ incase it has already been set by the call
+// to placement new in ASIOInit().
+IASIOThiscallResolver::IASIOThiscallResolver()
+{
+}
+
+// Constructor called from ASIOInit() below
+IASIOThiscallResolver::IASIOThiscallResolver(IASIO* that)
+: that_( that )
+{
+}
+
+// Implement IUnknown methods as assert(false). IASIOThiscallResolver is not
+// really a COM object, just a wrapper which will work with the ASIO SDK.
+// If you wanted to use ASIO without the SDK you might want to implement COM
+// aggregation in these methods.
+HRESULT STDMETHODCALLTYPE IASIOThiscallResolver::QueryInterface(REFIID riid, void **ppv)
+{
+ (void)riid; // suppress unused variable warning
+
+ assert( false ); // this function should never be called by the ASIO SDK.
+
+ *ppv = NULL;
+ return E_NOINTERFACE;
+}
+
+ULONG STDMETHODCALLTYPE IASIOThiscallResolver::AddRef()
+{
+ assert( false ); // this function should never be called by the ASIO SDK.
+
+ return 1;
+}
+
+ULONG STDMETHODCALLTYPE IASIOThiscallResolver::Release()
+{
+ assert( false ); // this function should never be called by the ASIO SDK.
+
+ return 1;
+}
+
+
+// Implement the IASIO interface methods by performing the vptr manipulation
+// described above then delegating to the real implementation.
+ASIOBool IASIOThiscallResolver::init(void *sysHandle)
+{
+ ASIOBool result;
+ CALL_THISCALL_1( result, that_, 12, sysHandle );
+ return result;
+}
+
+void IASIOThiscallResolver::getDriverName(char *name)
+{
+ CALL_VOID_THISCALL_1( that_, 16, name );
+}
+
+long IASIOThiscallResolver::getDriverVersion()
+{
+ ASIOBool result;
+ CALL_THISCALL_0( result, that_, 20 );
+ return result;
+}
+
+void IASIOThiscallResolver::getErrorMessage(char *string)
+{
+ CALL_VOID_THISCALL_1( that_, 24, string );
+}
+
+ASIOError IASIOThiscallResolver::start()
+{
+ ASIOBool result;
+ CALL_THISCALL_0( result, that_, 28 );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::stop()
+{
+ ASIOBool result;
+ CALL_THISCALL_0( result, that_, 32 );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getChannels(long *numInputChannels, long *numOutputChannels)
+{
+ ASIOBool result;
+ CALL_THISCALL_2( result, that_, 36, numInputChannels, numOutputChannels );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getLatencies(long *inputLatency, long *outputLatency)
+{
+ ASIOBool result;
+ CALL_THISCALL_2( result, that_, 40, inputLatency, outputLatency );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getBufferSize(long *minSize, long *maxSize,
+ long *preferredSize, long *granularity)
+{
+ ASIOBool result;
+ CALL_THISCALL_4( result, that_, 44, minSize, maxSize, preferredSize, granularity );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::canSampleRate(ASIOSampleRate sampleRate)
+{
+ ASIOBool result;
+ CALL_THISCALL_1_DOUBLE( result, that_, 48, sampleRate );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getSampleRate(ASIOSampleRate *sampleRate)
+{
+ ASIOBool result;
+ CALL_THISCALL_1( result, that_, 52, sampleRate );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::setSampleRate(ASIOSampleRate sampleRate)
+{
+ ASIOBool result;
+ CALL_THISCALL_1_DOUBLE( result, that_, 56, sampleRate );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getClockSources(ASIOClockSource *clocks, long *numSources)
+{
+ ASIOBool result;
+ CALL_THISCALL_2( result, that_, 60, clocks, numSources );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::setClockSource(long reference)
+{
+ ASIOBool result;
+ CALL_THISCALL_1( result, that_, 64, reference );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getSamplePosition(ASIOSamples *sPos, ASIOTimeStamp *tStamp)
+{
+ ASIOBool result;
+ CALL_THISCALL_2( result, that_, 68, sPos, tStamp );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::getChannelInfo(ASIOChannelInfo *info)
+{
+ ASIOBool result;
+ CALL_THISCALL_1( result, that_, 72, info );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::createBuffers(ASIOBufferInfo *bufferInfos,
+ long numChannels, long bufferSize, ASIOCallbacks *callbacks)
+{
+ ASIOBool result;
+ CALL_THISCALL_4( result, that_, 76, bufferInfos, numChannels, bufferSize, callbacks );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::disposeBuffers()
+{
+ ASIOBool result;
+ CALL_THISCALL_0( result, that_, 80 );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::controlPanel()
+{
+ ASIOBool result;
+ CALL_THISCALL_0( result, that_, 84 );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::future(long selector,void *opt)
+{
+ ASIOBool result;
+ CALL_THISCALL_2( result, that_, 88, selector, opt );
+ return result;
+}
+
+ASIOError IASIOThiscallResolver::outputReady()
+{
+ ASIOBool result;
+ CALL_THISCALL_0( result, that_, 92 );
+ return result;
+}
+
+
+// Implement our substitute ASIOInit() method
+ASIOError IASIOThiscallResolver::ASIOInit(ASIODriverInfo *info)
+{
+ // To ensure that our instance's vptr is correctly constructed, even if
+ // ASIOInit is called prior to main(), we explicitly call its constructor
+ // (potentially over the top of an existing instance). Note that this is
+ // pretty ugly, and is only safe because IASIOThiscallResolver has no
+ // destructor and contains no objects with destructors.
+ new((void*)&instance) IASIOThiscallResolver( theAsioDriver );
+
+ // Interpose between ASIO client code and the real driver.
+ theAsioDriver = &instance;
+
+ // Note that we never need to switch theAsioDriver back to point to the
+ // real driver because theAsioDriver is reset to zero in ASIOExit().
+
+ // Delegate to the real ASIOInit
+ return ::ASIOInit(info);
+}
+
+
+#endif /* !defined(_MSC_VER) */
+
+#endif /* Win32 */
+
--- /dev/null
+// ****************************************************************************\r
+//\r
+// Changed: I have modified this file slightly (includes) to work with\r
+// RtAudio. RtAudio.cpp must include this file after asio.h. \r
+//\r
+// File: IASIOThiscallResolver.h\r
+// Description: The IASIOThiscallResolver class implements the IASIO\r
+// interface and acts as a proxy to the real IASIO interface by\r
+// calling through its vptr table using the thiscall calling\r
+// convention. To put it another way, we interpose\r
+// IASIOThiscallResolver between ASIO SDK code and the driver.\r
+// This is necessary because most non-Microsoft compilers don't\r
+// implement the thiscall calling convention used by IASIO.\r
+//\r
+// iasiothiscallresolver.cpp contains the background of this\r
+// problem plus a technical description of the vptr\r
+// manipulations.\r
+//\r
+// In order to use this mechanism one simply has to add\r
+// iasiothiscallresolver.cpp to the list of files to compile\r
+// and #include <iasiothiscallresolver.h>\r
+//\r
+// Note that this #include must come after the other ASIO SDK\r
+// #includes, for example:\r
+//\r
+// #include <windows.h>\r
+// #include <asiosys.h>\r
+// #include <asio.h>\r
+// #include <asiodrivers.h>\r
+// #include <iasiothiscallresolver.h>\r
+//\r
+// Actually the important thing is to #include\r
+// <iasiothiscallresolver.h> after <asio.h>. We have\r
+// incorporated a test to enforce this ordering.\r
+//\r
+// The code transparently takes care of the interposition by\r
+// using macro substitution to intercept calls to ASIOInit()\r
+// and ASIOExit(). We save the original ASIO global\r
+// "theAsioDriver" in our "that" variable, and then set\r
+// "theAsioDriver" to equal our IASIOThiscallResolver instance.\r
+//\r
+// Whilst this method of resolving the thiscall problem requires\r
+// the addition of #include <iasiothiscallresolver.h> to client\r
+// code it has the advantage that it does not break the terms\r
+// of the ASIO licence by publishing it. We are NOT modifying\r
+// any Steinberg code here, we are merely implementing the IASIO\r
+// interface in the same way that we would need to do if we\r
+// wished to provide an open source ASIO driver.\r
+//\r
+// For compilation with MinGW -lole32 needs to be added to the\r
+// linker options. For BORLAND, linking with Import32.lib is\r
+// sufficient.\r
+//\r
+// The dependencies are with: CoInitialize, CoUninitialize,\r
+// CoCreateInstance, CLSIDFromString - used by asiolist.cpp\r
+// and are required on Windows whether ThiscallResolver is used\r
+// or not.\r
+//\r
+// Searching for the above strings in the root library path\r
+// of your compiler should enable the correct libraries to be\r
+// identified if they aren't immediately obvious.\r
+//\r
+// Note that the current implementation of IASIOThiscallResolver\r
+// is not COM compliant - it does not correctly implement the\r
+// IUnknown interface. Implementing it is not necessary because\r
+// it is not called by parts of the ASIO SDK which call through\r
+// theAsioDriver ptr. The IUnknown methods are implemented as\r
+// assert(false) to ensure that the code fails if they are\r
+// ever called.\r
+// Restrictions: None. Public Domain & Open Source distribute freely\r
+// You may use IASIOThiscallResolver commercially as well as\r
+// privately.\r
+// You the user assume the responsibility for the use of the\r
+// files, binary or text, and there is no guarantee or warranty,\r
+// expressed or implied, including but not limited to the\r
+// implied warranties of merchantability and fitness for a\r
+// particular purpose. You assume all responsibility and agree\r
+// to hold no entity, copyright holder or distributors liable\r
+// for any loss of data or inaccurate representations of data\r
+// as a result of using IASIOThiscallResolver.\r
+// Version: 1.4 Added separate macro CALL_THISCALL_1_DOUBLE from\r
+// Andrew Baldwin, and volatile for whole gcc asm blocks,\r
+// both for compatibility with newer gcc versions. Cleaned up\r
+// Borland asm to use one less register.\r
+// 1.3 Switched to including assert.h for better compatibility.\r
+// Wrapped entire .h and .cpp contents with a check for\r
+// _MSC_VER to provide better compatibility with MS compilers.\r
+// Changed Singleton implementation to use static instance\r
+// instead of freestore allocated instance. Removed ASIOExit\r
+// macro as it is no longer needed.\r
+// 1.2 Removed semicolons from ASIOInit and ASIOExit macros to\r
+// allow them to be embedded in expressions (if statements).\r
+// Cleaned up some comments. Removed combase.c dependency (it\r
+// doesn't compile with BCB anyway) by stubbing IUnknown.\r
+// 1.1 Incorporated comments from Ross Bencina including things\r
+// such as changing name from ThiscallResolver to\r
+// IASIOThiscallResolver, tidying up the constructor, fixing\r
+// a bug in IASIOThiscallResolver::ASIOExit() and improving\r
+// portability through the use of conditional compilation\r
+// 1.0 Initial working version.\r
+// Created: 6/09/2003\r
+// Authors: Fraser Adams\r
+// Ross Bencina\r
+// Rene G. Ceballos\r
+// Martin Fay\r
+// Antti Silvast\r
+// Andrew Baldwin\r
+//\r
+// ****************************************************************************\r
+\r
+\r
+#ifndef included_iasiothiscallresolver_h\r
+#define included_iasiothiscallresolver_h\r
+\r
+// We only need IASIOThiscallResolver at all if we are on Win32. For other\r
+// platforms we simply bypass the IASIOThiscallResolver definition to allow us\r
+// to be safely #include'd whatever the platform to keep client code portable\r
+//#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__)\r
+#if (defined(WIN32) || defined(_WIN32) || defined(__WIN32__)) && !defined(_WIN64)\r
+\r
+\r
+// If microsoft compiler we can call IASIO directly so IASIOThiscallResolver\r
+// is not used.\r
+#if !defined(_MSC_VER)\r
+\r
+\r
+// The following is in order to ensure that this header is only included after\r
+// the other ASIO headers (except for the case of iasiothiscallresolver.cpp).\r
+// We need to do this because IASIOThiscallResolver works by eclipsing the\r
+// original definition of ASIOInit() with a macro (see below).\r
+#if !defined(iasiothiscallresolver_sourcefile)\r
+ #if !defined(__ASIO_H)\r
+ #error iasiothiscallresolver.h must be included AFTER asio.h\r
+ #endif\r
+#endif\r
+\r
+#include <windows.h>\r
+#include "iasiodrv.h" /* From ASIO SDK */\r
+\r
+\r
+class IASIOThiscallResolver : public IASIO {\r
+private:\r
+ IASIO* that_; // Points to the real IASIO\r
+\r
+ static IASIOThiscallResolver instance; // Singleton instance\r
+\r
+ // Constructors - declared private so construction is limited to\r
+ // our Singleton instance\r
+ IASIOThiscallResolver();\r
+ IASIOThiscallResolver(IASIO* that);\r
+public:\r
+\r
+ // Methods from the IUnknown interface. We don't fully implement IUnknown\r
+ // because the ASIO SDK never calls these methods through theAsioDriver ptr.\r
+ // These methods are implemented as assert(false).\r
+ virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppv);\r
+ virtual ULONG STDMETHODCALLTYPE AddRef();\r
+ virtual ULONG STDMETHODCALLTYPE Release();\r
+\r
+ // Methods from the IASIO interface, implemented as forwarning calls to that.\r
+ virtual ASIOBool init(void *sysHandle);\r
+ virtual void getDriverName(char *name);\r
+ virtual long getDriverVersion();\r
+ virtual void getErrorMessage(char *string);\r
+ virtual ASIOError start();\r
+ virtual ASIOError stop();\r
+ virtual ASIOError getChannels(long *numInputChannels, long *numOutputChannels);\r
+ virtual ASIOError getLatencies(long *inputLatency, long *outputLatency);\r
+ virtual ASIOError getBufferSize(long *minSize, long *maxSize, long *preferredSize, long *granularity);\r
+ virtual ASIOError canSampleRate(ASIOSampleRate sampleRate);\r
+ virtual ASIOError getSampleRate(ASIOSampleRate *sampleRate);\r
+ virtual ASIOError setSampleRate(ASIOSampleRate sampleRate);\r
+ virtual ASIOError getClockSources(ASIOClockSource *clocks, long *numSources);\r
+ virtual ASIOError setClockSource(long reference);\r
+ virtual ASIOError getSamplePosition(ASIOSamples *sPos, ASIOTimeStamp *tStamp);\r
+ virtual ASIOError getChannelInfo(ASIOChannelInfo *info);\r
+ virtual ASIOError createBuffers(ASIOBufferInfo *bufferInfos, long numChannels, long bufferSize, ASIOCallbacks *callbacks);\r
+ virtual ASIOError disposeBuffers();\r
+ virtual ASIOError controlPanel();\r
+ virtual ASIOError future(long selector,void *opt);\r
+ virtual ASIOError outputReady();\r
+\r
+ // Class method, see ASIOInit() macro below.\r
+ static ASIOError ASIOInit(ASIODriverInfo *info); // Delegates to ::ASIOInit\r
+};\r
+\r
+\r
+// Replace calls to ASIOInit with our interposing version.\r
+// This macro enables us to perform thiscall resolution simply by #including\r
+// <iasiothiscallresolver.h> after the asio #includes (this file _must_ be\r
+// included _after_ the asio #includes)\r
+\r
+#define ASIOInit(name) IASIOThiscallResolver::ASIOInit((name))\r
+\r
+\r
+#endif /* !defined(_MSC_VER) */\r
+\r
+#endif /* Win32 */\r
+\r
+#endif /* included_iasiothiscallresolver_h */\r
+\r
+\r
--- /dev/null
+/*
+ * soundcard.h
+ */
+
+/*-
+ * Copyright by Hannu Savolainen 1993 / 4Front Technologies 1993-2006
+ * Modified for the new FreeBSD sound driver by Luigi Rizzo, 1997
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * $FreeBSD: src/sys/sys/soundcard.h,v 1.48 2006/11/26 11:55:48 netchild Exp $
+ */
+
+/*
+ * Unless coordinating changes with 4Front Technologies, do NOT make any
+ * modifications to ioctl commands, types, etc. that would break
+ * compatibility with the OSS API.
+ */
+
+#ifndef _SYS_SOUNDCARD_H_
+#define _SYS_SOUNDCARD_H_
+ /*
+ * If you make modifications to this file, please contact me before
+ * distributing the modified version. There is already enough
+ * diversity in the world.
+ *
+ * Regards,
+ * Hannu Savolainen
+ * hannu@voxware.pp.fi
+ *
+ **********************************************************************
+ * PS. The Hacker's Guide to VoxWare available from
+ * nic.funet.fi:pub/Linux/ALPHA/sound. The file is
+ * snd-sdk-doc-0.1.ps.gz (gzipped postscript). It contains
+ * some useful information about programming with VoxWare.
+ * (NOTE! The pub/Linux/ALPHA/ directories are hidden. You have
+ * to cd inside them before the files are accessible.)
+ **********************************************************************
+ */
+
+/*
+ * SOUND_VERSION is only used by the voxware driver. Hopefully apps
+ * should not depend on it, but rather look at the capabilities
+ * of the driver in the kernel!
+ */
+#define SOUND_VERSION 301
+#define VOXWARE /* does this have any use ? */
+
+/*
+ * Supported card ID numbers (Should be somewhere else? We keep
+ * them here just for compativility with the old driver, but these
+ * constants are of little or no use).
+ */
+
+#define SNDCARD_ADLIB 1
+#define SNDCARD_SB 2
+#define SNDCARD_PAS 3
+#define SNDCARD_GUS 4
+#define SNDCARD_MPU401 5
+#define SNDCARD_SB16 6
+#define SNDCARD_SB16MIDI 7
+#define SNDCARD_UART6850 8
+#define SNDCARD_GUS16 9
+#define SNDCARD_MSS 10
+#define SNDCARD_PSS 11
+#define SNDCARD_SSCAPE 12
+#define SNDCARD_PSS_MPU 13
+#define SNDCARD_PSS_MSS 14
+#define SNDCARD_SSCAPE_MSS 15
+#define SNDCARD_TRXPRO 16
+#define SNDCARD_TRXPRO_SB 17
+#define SNDCARD_TRXPRO_MPU 18
+#define SNDCARD_MAD16 19
+#define SNDCARD_MAD16_MPU 20
+#define SNDCARD_CS4232 21
+#define SNDCARD_CS4232_MPU 22
+#define SNDCARD_MAUI 23
+#define SNDCARD_PSEUDO_MSS 24
+#define SNDCARD_AWE32 25
+#define SNDCARD_NSS 26
+#define SNDCARD_UART16550 27
+#define SNDCARD_OPL 28
+
+#include <sys/types.h>
+#include <machine/endian.h>
+#ifndef _IOWR
+#include <sys/ioccom.h>
+#endif /* !_IOWR */
+
+/*
+ * The first part of this file contains the new FreeBSD sound ioctl
+ * interface. Tries to minimize the number of different ioctls, and
+ * to be reasonably general.
+ *
+ * 970821: some of the new calls have not been implemented yet.
+ */
+
+/*
+ * the following three calls extend the generic file descriptor
+ * interface. AIONWRITE is the dual of FIONREAD, i.e. returns the max
+ * number of bytes for a write operation to be non-blocking.
+ *
+ * AIOGSIZE/AIOSSIZE are used to change the behaviour of the device,
+ * from a character device (default) to a block device. In block mode,
+ * (not to be confused with blocking mode) the main difference for the
+ * application is that select() will return only when a complete
+ * block can be read/written to the device, whereas in character mode
+ * select will return true when one byte can be exchanged. For audio
+ * devices, character mode makes select almost useless since one byte
+ * will always be ready by the next sample time (which is often only a
+ * handful of microseconds away).
+ * Use a size of 0 or 1 to return to character mode.
+ */
+#define AIONWRITE _IOR('A', 10, int) /* get # bytes to write */
+struct snd_size {
+ int play_size;
+ int rec_size;
+};
+#define AIOGSIZE _IOR('A', 11, struct snd_size)/* read current blocksize */
+#define AIOSSIZE _IOWR('A', 11, struct snd_size) /* sets blocksize */
+
+/*
+ * The following constants define supported audio formats. The
+ * encoding follows voxware conventions, i.e. 1 bit for each supported
+ * format. We extend it by using bit 31 (RO) to indicate full-duplex
+ * capability, and bit 29 (RO) to indicate that the card supports/
+ * needs different formats on capture & playback channels.
+ * Bit 29 (RW) is used to indicate/ask stereo.
+ *
+ * The number of bits required to store the sample is:
+ * o 4 bits for the IDA ADPCM format,
+ * o 8 bits for 8-bit formats, mu-law and A-law,
+ * o 16 bits for the 16-bit formats, and
+ * o 32 bits for the 24/32-bit formats.
+ * o undefined for the MPEG audio format.
+ */
+
+#define AFMT_QUERY 0x00000000 /* Return current format */
+#define AFMT_MU_LAW 0x00000001 /* Logarithmic mu-law */
+#define AFMT_A_LAW 0x00000002 /* Logarithmic A-law */
+#define AFMT_IMA_ADPCM 0x00000004 /* A 4:1 compressed format where 16-bit
+ * squence represented using the
+ * the average 4 bits per sample */
+#define AFMT_U8 0x00000008 /* Unsigned 8-bit */
+#define AFMT_S16_LE 0x00000010 /* Little endian signed 16-bit */
+#define AFMT_S16_BE 0x00000020 /* Big endian signed 16-bit */
+#define AFMT_S8 0x00000040 /* Signed 8-bit */
+#define AFMT_U16_LE 0x00000080 /* Little endian unsigned 16-bit */
+#define AFMT_U16_BE 0x00000100 /* Big endian unsigned 16-bit */
+#define AFMT_MPEG 0x00000200 /* MPEG MP2/MP3 audio */
+#define AFMT_AC3 0x00000400 /* Dolby Digital AC3 */
+
+#if _BYTE_ORDER == _LITTLE_ENDIAN
+#define AFMT_S16_NE AFMT_S16_LE /* native endian signed 16 */
+#else
+#define AFMT_S16_NE AFMT_S16_BE
+#endif
+
+/*
+ * 32-bit formats below used for 24-bit audio data where the data is stored
+ * in the 24 most significant bits and the least significant bits are not used
+ * (should be set to 0).
+ */
+#define AFMT_S32_LE 0x00001000 /* Little endian signed 32-bit */
+#define AFMT_S32_BE 0x00002000 /* Big endian signed 32-bit */
+#define AFMT_U32_LE 0x00004000 /* Little endian unsigned 32-bit */
+#define AFMT_U32_BE 0x00008000 /* Big endian unsigned 32-bit */
+#define AFMT_S24_LE 0x00010000 /* Little endian signed 24-bit */
+#define AFMT_S24_BE 0x00020000 /* Big endian signed 24-bit */
+#define AFMT_U24_LE 0x00040000 /* Little endian unsigned 24-bit */
+#define AFMT_U24_BE 0x00080000 /* Big endian unsigned 24-bit */
+
+#define AFMT_STEREO 0x10000000 /* can do/want stereo */
+
+/*
+ * the following are really capabilities
+ */
+#define AFMT_WEIRD 0x20000000 /* weird hardware... */
+ /*
+ * AFMT_WEIRD reports that the hardware might need to operate
+ * with different formats in the playback and capture
+ * channels when operating in full duplex.
+ * As an example, SoundBlaster16 cards only support U8 in one
+ * direction and S16 in the other one, and applications should
+ * be aware of this limitation.
+ */
+#define AFMT_FULLDUPLEX 0x80000000 /* can do full duplex */
+
+/*
+ * The following structure is used to get/set format and sampling rate.
+ * While it would be better to have things such as stereo, bits per
+ * sample, endiannes, etc split in different variables, it turns out
+ * that formats are not that many, and not all combinations are possible.
+ * So we followed the Voxware approach of associating one bit to each
+ * format.
+ */
+
+typedef struct _snd_chan_param {
+ u_long play_rate; /* sampling rate */
+ u_long rec_rate; /* sampling rate */
+ u_long play_format; /* everything describing the format */
+ u_long rec_format; /* everything describing the format */
+} snd_chan_param;
+#define AIOGFMT _IOR('f', 12, snd_chan_param) /* get format */
+#define AIOSFMT _IOWR('f', 12, snd_chan_param) /* sets format */
+
+/*
+ * The following structure is used to get/set the mixer setting.
+ * Up to 32 mixers are supported, each one with up to 32 channels.
+ */
+typedef struct _snd_mix_param {
+ u_char subdev; /* which output */
+ u_char line; /* which input */
+ u_char left,right; /* volumes, 0..255, 0 = mute */
+} snd_mix_param ;
+
+/* XXX AIOGMIX, AIOSMIX not implemented yet */
+#define AIOGMIX _IOWR('A', 13, snd_mix_param) /* return mixer status */
+#define AIOSMIX _IOWR('A', 14, snd_mix_param) /* sets mixer status */
+
+/*
+ * channel specifiers used in AIOSTOP and AIOSYNC
+ */
+#define AIOSYNC_PLAY 0x1 /* play chan */
+#define AIOSYNC_CAPTURE 0x2 /* capture chan */
+/* AIOSTOP stop & flush a channel, returns the residual count */
+#define AIOSTOP _IOWR ('A', 15, int)
+
+/* alternate method used to notify the sync condition */
+#define AIOSYNC_SIGNAL 0x100
+#define AIOSYNC_SELECT 0x200
+
+/* what the 'pos' field refers to */
+#define AIOSYNC_READY 0x400
+#define AIOSYNC_FREE 0x800
+
+typedef struct _snd_sync_parm {
+ long chan ; /* play or capture channel, plus modifier */
+ long pos;
+} snd_sync_parm;
+#define AIOSYNC _IOWR ('A', 15, snd_sync_parm) /* misc. synchronization */
+
+/*
+ * The following is used to return device capabilities. If the structure
+ * passed to the ioctl is zeroed, default values are returned for rate
+ * and formats, a bitmap of available mixers is returned, and values
+ * (inputs, different levels) for the first one are returned.
+ *
+ * If formats, mixers, inputs are instantiated, then detailed info
+ * are returned depending on the call.
+ */
+typedef struct _snd_capabilities {
+ u_long rate_min, rate_max; /* min-max sampling rate */
+ u_long formats;
+ u_long bufsize; /* DMA buffer size */
+ u_long mixers; /* bitmap of available mixers */
+ u_long inputs; /* bitmap of available inputs (per mixer) */
+ u_short left, right; /* how many levels are supported */
+} snd_capabilities;
+#define AIOGCAP _IOWR('A', 15, snd_capabilities) /* get capabilities */
+
+/*
+ * here is the old (Voxware) ioctl interface
+ */
+
+/*
+ * IOCTL Commands for /dev/sequencer
+ */
+
+#define SNDCTL_SEQ_RESET _IO ('Q', 0)
+#define SNDCTL_SEQ_SYNC _IO ('Q', 1)
+#define SNDCTL_SYNTH_INFO _IOWR('Q', 2, struct synth_info)
+#define SNDCTL_SEQ_CTRLRATE _IOWR('Q', 3, int) /* Set/get timer res.(hz) */
+#define SNDCTL_SEQ_GETOUTCOUNT _IOR ('Q', 4, int)
+#define SNDCTL_SEQ_GETINCOUNT _IOR ('Q', 5, int)
+#define SNDCTL_SEQ_PERCMODE _IOW ('Q', 6, int)
+#define SNDCTL_FM_LOAD_INSTR _IOW ('Q', 7, struct sbi_instrument) /* Valid for FM only */
+#define SNDCTL_SEQ_TESTMIDI _IOW ('Q', 8, int)
+#define SNDCTL_SEQ_RESETSAMPLES _IOW ('Q', 9, int)
+#define SNDCTL_SEQ_NRSYNTHS _IOR ('Q',10, int)
+#define SNDCTL_SEQ_NRMIDIS _IOR ('Q',11, int)
+#define SNDCTL_MIDI_INFO _IOWR('Q',12, struct midi_info)
+#define SNDCTL_SEQ_THRESHOLD _IOW ('Q',13, int)
+#define SNDCTL_SEQ_TRESHOLD SNDCTL_SEQ_THRESHOLD /* there was once a typo */
+#define SNDCTL_SYNTH_MEMAVL _IOWR('Q',14, int) /* in=dev#, out=memsize */
+#define SNDCTL_FM_4OP_ENABLE _IOW ('Q',15, int) /* in=dev# */
+#define SNDCTL_PMGR_ACCESS _IOWR('Q',16, struct patmgr_info)
+#define SNDCTL_SEQ_PANIC _IO ('Q',17)
+#define SNDCTL_SEQ_OUTOFBAND _IOW ('Q',18, struct seq_event_rec)
+#define SNDCTL_SEQ_GETTIME _IOR ('Q',19, int)
+
+struct seq_event_rec {
+ u_char arr[8];
+};
+
+#define SNDCTL_TMR_TIMEBASE _IOWR('T', 1, int)
+#define SNDCTL_TMR_START _IO ('T', 2)
+#define SNDCTL_TMR_STOP _IO ('T', 3)
+#define SNDCTL_TMR_CONTINUE _IO ('T', 4)
+#define SNDCTL_TMR_TEMPO _IOWR('T', 5, int)
+#define SNDCTL_TMR_SOURCE _IOWR('T', 6, int)
+# define TMR_INTERNAL 0x00000001
+# define TMR_EXTERNAL 0x00000002
+# define TMR_MODE_MIDI 0x00000010
+# define TMR_MODE_FSK 0x00000020
+# define TMR_MODE_CLS 0x00000040
+# define TMR_MODE_SMPTE 0x00000080
+#define SNDCTL_TMR_METRONOME _IOW ('T', 7, int)
+#define SNDCTL_TMR_SELECT _IOW ('T', 8, int)
+
+/*
+ * Endian aware patch key generation algorithm.
+ */
+
+#if defined(_AIX) || defined(AIX)
+# define _PATCHKEY(id) (0xfd00|id)
+#else
+# define _PATCHKEY(id) ((id<<8)|0xfd)
+#endif
+
+/*
+ * Sample loading mechanism for internal synthesizers (/dev/sequencer)
+ * The following patch_info structure has been designed to support
+ * Gravis UltraSound. It tries to be universal format for uploading
+ * sample based patches but is probably too limited.
+ */
+
+struct patch_info {
+/* u_short key; Use GUS_PATCH here */
+ short key; /* Use GUS_PATCH here */
+#define GUS_PATCH _PATCHKEY(0x04)
+#define OBSOLETE_GUS_PATCH _PATCHKEY(0x02)
+
+ short device_no; /* Synthesizer number */
+ short instr_no; /* Midi pgm# */
+
+ u_long mode;
+/*
+ * The least significant byte has the same format than the GUS .PAT
+ * files
+ */
+#define WAVE_16_BITS 0x01 /* bit 0 = 8 or 16 bit wave data. */
+#define WAVE_UNSIGNED 0x02 /* bit 1 = Signed - Unsigned data. */
+#define WAVE_LOOPING 0x04 /* bit 2 = looping enabled-1. */
+#define WAVE_BIDIR_LOOP 0x08 /* bit 3 = Set is bidirectional looping. */
+#define WAVE_LOOP_BACK 0x10 /* bit 4 = Set is looping backward. */
+#define WAVE_SUSTAIN_ON 0x20 /* bit 5 = Turn sustaining on. (Env. pts. 3)*/
+#define WAVE_ENVELOPES 0x40 /* bit 6 = Enable envelopes - 1 */
+ /* (use the env_rate/env_offs fields). */
+/* Linux specific bits */
+#define WAVE_VIBRATO 0x00010000 /* The vibrato info is valid */
+#define WAVE_TREMOLO 0x00020000 /* The tremolo info is valid */
+#define WAVE_SCALE 0x00040000 /* The scaling info is valid */
+/* Other bits must be zeroed */
+
+ long len; /* Size of the wave data in bytes */
+ long loop_start, loop_end; /* Byte offsets from the beginning */
+
+/*
+ * The base_freq and base_note fields are used when computing the
+ * playback speed for a note. The base_note defines the tone frequency
+ * which is heard if the sample is played using the base_freq as the
+ * playback speed.
+ *
+ * The low_note and high_note fields define the minimum and maximum note
+ * frequencies for which this sample is valid. It is possible to define
+ * more than one samples for an instrument number at the same time. The
+ * low_note and high_note fields are used to select the most suitable one.
+ *
+ * The fields base_note, high_note and low_note should contain
+ * the note frequency multiplied by 1000. For example value for the
+ * middle A is 440*1000.
+ */
+
+ u_int base_freq;
+ u_long base_note;
+ u_long high_note;
+ u_long low_note;
+ int panning; /* -128=left, 127=right */
+ int detuning;
+
+/* New fields introduced in version 1.99.5 */
+
+ /* Envelope. Enabled by mode bit WAVE_ENVELOPES */
+ u_char env_rate[ 6 ]; /* GUS HW ramping rate */
+ u_char env_offset[ 6 ]; /* 255 == 100% */
+
+ /*
+ * The tremolo, vibrato and scale info are not supported yet.
+ * Enable by setting the mode bits WAVE_TREMOLO, WAVE_VIBRATO or
+ * WAVE_SCALE
+ */
+
+ u_char tremolo_sweep;
+ u_char tremolo_rate;
+ u_char tremolo_depth;
+
+ u_char vibrato_sweep;
+ u_char vibrato_rate;
+ u_char vibrato_depth;
+
+ int scale_frequency;
+ u_int scale_factor; /* from 0 to 2048 or 0 to 2 */
+
+ int volume;
+ int spare[4];
+ char data[1]; /* The waveform data starts here */
+};
+
+struct sysex_info {
+ short key; /* Use GUS_PATCH here */
+#define SYSEX_PATCH _PATCHKEY(0x05)
+#define MAUI_PATCH _PATCHKEY(0x06)
+ short device_no; /* Synthesizer number */
+ long len; /* Size of the sysex data in bytes */
+ u_char data[1]; /* Sysex data starts here */
+};
+
+/*
+ * Patch management interface (/dev/sequencer, /dev/patmgr#)
+ * Don't use these calls if you want to maintain compatibility with
+ * the future versions of the driver.
+ */
+
+#define PS_NO_PATCHES 0 /* No patch support on device */
+#define PS_MGR_NOT_OK 1 /* Plain patch support (no mgr) */
+#define PS_MGR_OK 2 /* Patch manager supported */
+#define PS_MANAGED 3 /* Patch manager running */
+
+#define SNDCTL_PMGR_IFACE _IOWR('P', 1, struct patmgr_info)
+
+/*
+ * The patmgr_info is a fixed size structure which is used for two
+ * different purposes. The intended use is for communication between
+ * the application using /dev/sequencer and the patch manager daemon
+ * associated with a synthesizer device (ioctl(SNDCTL_PMGR_ACCESS)).
+ *
+ * This structure is also used with ioctl(SNDCTL_PGMR_IFACE) which allows
+ * a patch manager daemon to read and write device parameters. This
+ * ioctl available through /dev/sequencer also. Avoid using it since it's
+ * extremely hardware dependent. In addition access trough /dev/sequencer
+ * may confuse the patch manager daemon.
+ */
+
+struct patmgr_info { /* Note! size must be < 4k since kmalloc() is used */
+ u_long key; /* Don't worry. Reserved for communication
+ between the patch manager and the driver. */
+#define PM_K_EVENT 1 /* Event from the /dev/sequencer driver */
+#define PM_K_COMMAND 2 /* Request from an application */
+#define PM_K_RESPONSE 3 /* From patmgr to application */
+#define PM_ERROR 4 /* Error returned by the patmgr */
+ int device;
+ int command;
+
+/*
+ * Commands 0x000 to 0xfff reserved for patch manager programs
+ */
+#define PM_GET_DEVTYPE 1 /* Returns type of the patch mgr interface of dev */
+#define PMTYPE_FM2 1 /* 2 OP fm */
+#define PMTYPE_FM4 2 /* Mixed 4 or 2 op FM (OPL-3) */
+#define PMTYPE_WAVE 3 /* Wave table synthesizer (GUS) */
+#define PM_GET_NRPGM 2 /* Returns max # of midi programs in parm1 */
+#define PM_GET_PGMMAP 3 /* Returns map of loaded midi programs in data8 */
+#define PM_GET_PGM_PATCHES 4 /* Return list of patches of a program (parm1) */
+#define PM_GET_PATCH 5 /* Return patch header of patch parm1 */
+#define PM_SET_PATCH 6 /* Set patch header of patch parm1 */
+#define PM_READ_PATCH 7 /* Read patch (wave) data */
+#define PM_WRITE_PATCH 8 /* Write patch (wave) data */
+
+/*
+ * Commands 0x1000 to 0xffff are for communication between the patch manager
+ * and the client
+ */
+#define _PM_LOAD_PATCH 0x100
+
+/*
+ * Commands above 0xffff reserved for device specific use
+ */
+
+ long parm1;
+ long parm2;
+ long parm3;
+
+ union {
+ u_char data8[4000];
+ u_short data16[2000];
+ u_long data32[1000];
+ struct patch_info patch;
+ } data;
+};
+
+/*
+ * When a patch manager daemon is present, it will be informed by the
+ * driver when something important happens. For example when the
+ * /dev/sequencer is opened or closed. A record with key == PM_K_EVENT is
+ * returned. The command field contains the event type:
+ */
+#define PM_E_OPENED 1 /* /dev/sequencer opened */
+#define PM_E_CLOSED 2 /* /dev/sequencer closed */
+#define PM_E_PATCH_RESET 3 /* SNDCTL_RESETSAMPLES called */
+#define PM_E_PATCH_LOADED 4 /* A patch has been loaded by appl */
+
+/*
+ * /dev/sequencer input events.
+ *
+ * The data written to the /dev/sequencer is a stream of events. Events
+ * are records of 4 or 8 bytes. The first byte defines the size.
+ * Any number of events can be written with a write call. There
+ * is a set of macros for sending these events. Use these macros if you
+ * want to maximize portability of your program.
+ *
+ * Events SEQ_WAIT, SEQ_MIDIPUTC and SEQ_ECHO. Are also input events.
+ * (All input events are currently 4 bytes long. Be prepared to support
+ * 8 byte events also. If you receive any event having first byte >= 128,
+ * it's a 8 byte event.
+ *
+ * The events are documented at the end of this file.
+ *
+ * Normal events (4 bytes)
+ * There is also a 8 byte version of most of the 4 byte events. The
+ * 8 byte one is recommended.
+ */
+#define SEQ_NOTEOFF 0
+#define SEQ_FMNOTEOFF SEQ_NOTEOFF /* Just old name */
+#define SEQ_NOTEON 1
+#define SEQ_FMNOTEON SEQ_NOTEON
+#define SEQ_WAIT TMR_WAIT_ABS
+#define SEQ_PGMCHANGE 3
+#define SEQ_FMPGMCHANGE SEQ_PGMCHANGE
+#define SEQ_SYNCTIMER TMR_START
+#define SEQ_MIDIPUTC 5
+#define SEQ_DRUMON 6 /*** OBSOLETE ***/
+#define SEQ_DRUMOFF 7 /*** OBSOLETE ***/
+#define SEQ_ECHO TMR_ECHO /* For synching programs with output */
+#define SEQ_AFTERTOUCH 9
+#define SEQ_CONTROLLER 10
+
+/*
+ * Midi controller numbers
+ *
+ * Controllers 0 to 31 (0x00 to 0x1f) and 32 to 63 (0x20 to 0x3f)
+ * are continuous controllers.
+ * In the MIDI 1.0 these controllers are sent using two messages.
+ * Controller numbers 0 to 31 are used to send the MSB and the
+ * controller numbers 32 to 63 are for the LSB. Note that just 7 bits
+ * are used in MIDI bytes.
+ */
+
+#define CTL_BANK_SELECT 0x00
+#define CTL_MODWHEEL 0x01
+#define CTL_BREATH 0x02
+/* undefined 0x03 */
+#define CTL_FOOT 0x04
+#define CTL_PORTAMENTO_TIME 0x05
+#define CTL_DATA_ENTRY 0x06
+#define CTL_MAIN_VOLUME 0x07
+#define CTL_BALANCE 0x08
+/* undefined 0x09 */
+#define CTL_PAN 0x0a
+#define CTL_EXPRESSION 0x0b
+/* undefined 0x0c - 0x0f */
+#define CTL_GENERAL_PURPOSE1 0x10
+#define CTL_GENERAL_PURPOSE2 0x11
+#define CTL_GENERAL_PURPOSE3 0x12
+#define CTL_GENERAL_PURPOSE4 0x13
+/* undefined 0x14 - 0x1f */
+
+/* undefined 0x20 */
+
+/*
+ * The controller numbers 0x21 to 0x3f are reserved for the
+ * least significant bytes of the controllers 0x00 to 0x1f.
+ * These controllers are not recognised by the driver.
+ *
+ * Controllers 64 to 69 (0x40 to 0x45) are on/off switches.
+ * 0=OFF and 127=ON (intermediate values are possible)
+ */
+#define CTL_DAMPER_PEDAL 0x40
+#define CTL_SUSTAIN CTL_DAMPER_PEDAL /* Alias */
+#define CTL_HOLD CTL_DAMPER_PEDAL /* Alias */
+#define CTL_PORTAMENTO 0x41
+#define CTL_SOSTENUTO 0x42
+#define CTL_SOFT_PEDAL 0x43
+/* undefined 0x44 */
+#define CTL_HOLD2 0x45
+/* undefined 0x46 - 0x4f */
+
+#define CTL_GENERAL_PURPOSE5 0x50
+#define CTL_GENERAL_PURPOSE6 0x51
+#define CTL_GENERAL_PURPOSE7 0x52
+#define CTL_GENERAL_PURPOSE8 0x53
+/* undefined 0x54 - 0x5a */
+#define CTL_EXT_EFF_DEPTH 0x5b
+#define CTL_TREMOLO_DEPTH 0x5c
+#define CTL_CHORUS_DEPTH 0x5d
+#define CTL_DETUNE_DEPTH 0x5e
+#define CTL_CELESTE_DEPTH CTL_DETUNE_DEPTH /* Alias for the above one */
+#define CTL_PHASER_DEPTH 0x5f
+#define CTL_DATA_INCREMENT 0x60
+#define CTL_DATA_DECREMENT 0x61
+#define CTL_NONREG_PARM_NUM_LSB 0x62
+#define CTL_NONREG_PARM_NUM_MSB 0x63
+#define CTL_REGIST_PARM_NUM_LSB 0x64
+#define CTL_REGIST_PARM_NUM_MSB 0x65
+/* undefined 0x66 - 0x78 */
+/* reserved 0x79 - 0x7f */
+
+/* Pseudo controllers (not midi compatible) */
+#define CTRL_PITCH_BENDER 255
+#define CTRL_PITCH_BENDER_RANGE 254
+#define CTRL_EXPRESSION 253 /* Obsolete */
+#define CTRL_MAIN_VOLUME 252 /* Obsolete */
+
+#define SEQ_BALANCE 11
+#define SEQ_VOLMODE 12
+
+/*
+ * Volume mode decides how volumes are used
+ */
+
+#define VOL_METHOD_ADAGIO 1
+#define VOL_METHOD_LINEAR 2
+
+/*
+ * Note! SEQ_WAIT, SEQ_MIDIPUTC and SEQ_ECHO are used also as
+ * input events.
+ */
+
+/*
+ * Event codes 0xf0 to 0xfc are reserved for future extensions.
+ */
+
+#define SEQ_FULLSIZE 0xfd /* Long events */
+/*
+ * SEQ_FULLSIZE events are used for loading patches/samples to the
+ * synthesizer devices. These events are passed directly to the driver
+ * of the associated synthesizer device. There is no limit to the size
+ * of the extended events. These events are not queued but executed
+ * immediately when the write() is called (execution can take several
+ * seconds of time).
+ *
+ * When a SEQ_FULLSIZE message is written to the device, it must
+ * be written using exactly one write() call. Other events cannot
+ * be mixed to the same write.
+ *
+ * For FM synths (YM3812/OPL3) use struct sbi_instrument and write
+ * it to the /dev/sequencer. Don't write other data together with
+ * the instrument structure Set the key field of the structure to
+ * FM_PATCH. The device field is used to route the patch to the
+ * corresponding device.
+ *
+ * For Gravis UltraSound use struct patch_info. Initialize the key field
+ * to GUS_PATCH.
+ */
+#define SEQ_PRIVATE 0xfe /* Low level HW dependent events (8 bytes) */
+#define SEQ_EXTENDED 0xff /* Extended events (8 bytes) OBSOLETE */
+
+/*
+ * Record for FM patches
+ */
+
+typedef u_char sbi_instr_data[32];
+
+struct sbi_instrument {
+ u_short key; /* FM_PATCH or OPL3_PATCH */
+#define FM_PATCH _PATCHKEY(0x01)
+#define OPL3_PATCH _PATCHKEY(0x03)
+ short device; /* Synth# (0-4) */
+ int channel; /* Program# to be initialized */
+ sbi_instr_data operators; /* Reg. settings for operator cells
+ * (.SBI format) */
+};
+
+struct synth_info { /* Read only */
+ char name[30];
+ int device; /* 0-N. INITIALIZE BEFORE CALLING */
+ int synth_type;
+#define SYNTH_TYPE_FM 0
+#define SYNTH_TYPE_SAMPLE 1
+#define SYNTH_TYPE_MIDI 2 /* Midi interface */
+
+ int synth_subtype;
+#define FM_TYPE_ADLIB 0x00
+#define FM_TYPE_OPL3 0x01
+#define MIDI_TYPE_MPU401 0x401
+
+#define SAMPLE_TYPE_BASIC 0x10
+#define SAMPLE_TYPE_GUS SAMPLE_TYPE_BASIC
+#define SAMPLE_TYPE_AWE32 0x20
+
+ int perc_mode; /* No longer supported */
+ int nr_voices;
+ int nr_drums; /* Obsolete field */
+ int instr_bank_size;
+ u_long capabilities;
+#define SYNTH_CAP_PERCMODE 0x00000001 /* No longer used */
+#define SYNTH_CAP_OPL3 0x00000002 /* Set if OPL3 supported */
+#define SYNTH_CAP_INPUT 0x00000004 /* Input (MIDI) device */
+ int dummies[19]; /* Reserve space */
+};
+
+struct sound_timer_info {
+ char name[32];
+ int caps;
+};
+
+struct midi_info {
+ char name[30];
+ int device; /* 0-N. INITIALIZE BEFORE CALLING */
+ u_long capabilities; /* To be defined later */
+ int dev_type;
+ int dummies[18]; /* Reserve space */
+};
+
+/*
+ * ioctl commands for the /dev/midi##
+ */
+typedef struct {
+ u_char cmd;
+ char nr_args, nr_returns;
+ u_char data[30];
+} mpu_command_rec;
+
+#define SNDCTL_MIDI_PRETIME _IOWR('m', 0, int)
+#define SNDCTL_MIDI_MPUMODE _IOWR('m', 1, int)
+#define SNDCTL_MIDI_MPUCMD _IOWR('m', 2, mpu_command_rec)
+#define MIOSPASSTHRU _IOWR('m', 3, int)
+#define MIOGPASSTHRU _IOWR('m', 4, int)
+
+/*
+ * IOCTL commands for /dev/dsp and /dev/audio
+ */
+
+#define SNDCTL_DSP_RESET _IO ('P', 0)
+#define SNDCTL_DSP_SYNC _IO ('P', 1)
+#define SNDCTL_DSP_SPEED _IOWR('P', 2, int)
+#define SNDCTL_DSP_STEREO _IOWR('P', 3, int)
+#define SNDCTL_DSP_GETBLKSIZE _IOR('P', 4, int)
+#define SNDCTL_DSP_SETBLKSIZE _IOW('P', 4, int)
+#define SNDCTL_DSP_SETFMT _IOWR('P',5, int) /* Selects ONE fmt*/
+
+/*
+ * SOUND_PCM_WRITE_CHANNELS is not that different
+ * from SNDCTL_DSP_STEREO
+ */
+#define SOUND_PCM_WRITE_CHANNELS _IOWR('P', 6, int)
+#define SNDCTL_DSP_CHANNELS SOUND_PCM_WRITE_CHANNELS
+#define SOUND_PCM_WRITE_FILTER _IOWR('P', 7, int)
+#define SNDCTL_DSP_POST _IO ('P', 8)
+
+/*
+ * SNDCTL_DSP_SETBLKSIZE and the following two calls mostly do
+ * the same thing, i.e. set the block size used in DMA transfers.
+ */
+#define SNDCTL_DSP_SUBDIVIDE _IOWR('P', 9, int)
+#define SNDCTL_DSP_SETFRAGMENT _IOWR('P',10, int)
+
+
+#define SNDCTL_DSP_GETFMTS _IOR ('P',11, int) /* Returns a mask */
+/*
+ * Buffer status queries.
+ */
+typedef struct audio_buf_info {
+ int fragments; /* # of avail. frags (partly used ones not counted) */
+ int fragstotal; /* Total # of fragments allocated */
+ int fragsize; /* Size of a fragment in bytes */
+
+ int bytes; /* Avail. space in bytes (includes partly used fragments) */
+ /* Note! 'bytes' could be more than fragments*fragsize */
+} audio_buf_info;
+
+#define SNDCTL_DSP_GETOSPACE _IOR ('P',12, audio_buf_info)
+#define SNDCTL_DSP_GETISPACE _IOR ('P',13, audio_buf_info)
+
+/*
+ * SNDCTL_DSP_NONBLOCK is the same (but less powerful, since the
+ * action cannot be undone) of FIONBIO. The same can be achieved
+ * by opening the device with O_NDELAY
+ */
+#define SNDCTL_DSP_NONBLOCK _IO ('P',14)
+
+#define SNDCTL_DSP_GETCAPS _IOR ('P',15, int)
+#define DSP_CAP_REVISION 0x000000ff /* revision level (0 to 255) */
+#define DSP_CAP_DUPLEX 0x00000100 /* Full duplex record/playback */
+#define DSP_CAP_REALTIME 0x00000200 /* Real time capability */
+#define DSP_CAP_BATCH 0x00000400
+ /*
+ * Device has some kind of internal buffers which may
+ * cause some delays and decrease precision of timing
+ */
+#define DSP_CAP_COPROC 0x00000800
+ /* Has a coprocessor, sometimes it's a DSP but usually not */
+#define DSP_CAP_TRIGGER 0x00001000 /* Supports SETTRIGGER */
+#define DSP_CAP_MMAP 0x00002000 /* Supports mmap() */
+
+/*
+ * What do these function do ?
+ */
+#define SNDCTL_DSP_GETTRIGGER _IOR ('P',16, int)
+#define SNDCTL_DSP_SETTRIGGER _IOW ('P',16, int)
+#define PCM_ENABLE_INPUT 0x00000001
+#define PCM_ENABLE_OUTPUT 0x00000002
+
+typedef struct count_info {
+ int bytes; /* Total # of bytes processed */
+ int blocks; /* # of fragment transitions since last time */
+ int ptr; /* Current DMA pointer value */
+} count_info;
+
+/*
+ * GETIPTR and GETISPACE are not that different... same for out.
+ */
+#define SNDCTL_DSP_GETIPTR _IOR ('P',17, count_info)
+#define SNDCTL_DSP_GETOPTR _IOR ('P',18, count_info)
+
+typedef struct buffmem_desc {
+ caddr_t buffer;
+ int size;
+} buffmem_desc;
+
+#define SNDCTL_DSP_MAPINBUF _IOR ('P', 19, buffmem_desc)
+#define SNDCTL_DSP_MAPOUTBUF _IOR ('P', 20, buffmem_desc)
+#define SNDCTL_DSP_SETSYNCRO _IO ('P', 21)
+#define SNDCTL_DSP_SETDUPLEX _IO ('P', 22)
+#define SNDCTL_DSP_GETODELAY _IOR ('P', 23, int)
+
+/*
+ * I guess these are the readonly version of the same
+ * functions that exist above as SNDCTL_DSP_...
+ */
+#define SOUND_PCM_READ_RATE _IOR ('P', 2, int)
+#define SOUND_PCM_READ_CHANNELS _IOR ('P', 6, int)
+#define SOUND_PCM_READ_BITS _IOR ('P', 5, int)
+#define SOUND_PCM_READ_FILTER _IOR ('P', 7, int)
+
+/*
+ * ioctl calls to be used in communication with coprocessors and
+ * DSP chips.
+ */
+
+typedef struct copr_buffer {
+ int command; /* Set to 0 if not used */
+ int flags;
+#define CPF_NONE 0x0000
+#define CPF_FIRST 0x0001 /* First block */
+#define CPF_LAST 0x0002 /* Last block */
+ int len;
+ int offs; /* If required by the device (0 if not used) */
+
+ u_char data[4000]; /* NOTE! 4000 is not 4k */
+} copr_buffer;
+
+typedef struct copr_debug_buf {
+ int command; /* Used internally. Set to 0 */
+ int parm1;
+ int parm2;
+ int flags;
+ int len; /* Length of data in bytes */
+} copr_debug_buf;
+
+typedef struct copr_msg {
+ int len;
+ u_char data[4000];
+} copr_msg;
+
+#define SNDCTL_COPR_RESET _IO ('C', 0)
+#define SNDCTL_COPR_LOAD _IOWR('C', 1, copr_buffer)
+#define SNDCTL_COPR_RDATA _IOWR('C', 2, copr_debug_buf)
+#define SNDCTL_COPR_RCODE _IOWR('C', 3, copr_debug_buf)
+#define SNDCTL_COPR_WDATA _IOW ('C', 4, copr_debug_buf)
+#define SNDCTL_COPR_WCODE _IOW ('C', 5, copr_debug_buf)
+#define SNDCTL_COPR_RUN _IOWR('C', 6, copr_debug_buf)
+#define SNDCTL_COPR_HALT _IOWR('C', 7, copr_debug_buf)
+#define SNDCTL_COPR_SENDMSG _IOW ('C', 8, copr_msg)
+#define SNDCTL_COPR_RCVMSG _IOR ('C', 9, copr_msg)
+
+/*
+ * IOCTL commands for /dev/mixer
+ */
+
+/*
+ * Mixer devices
+ *
+ * There can be up to 20 different analog mixer channels. The
+ * SOUND_MIXER_NRDEVICES gives the currently supported maximum.
+ * The SOUND_MIXER_READ_DEVMASK returns a bitmask which tells
+ * the devices supported by the particular mixer.
+ */
+
+#define SOUND_MIXER_NRDEVICES 25
+#define SOUND_MIXER_VOLUME 0 /* Master output level */
+#define SOUND_MIXER_BASS 1 /* Treble level of all output channels */
+#define SOUND_MIXER_TREBLE 2 /* Bass level of all output channels */
+#define SOUND_MIXER_SYNTH 3 /* Volume of synthesier input */
+#define SOUND_MIXER_PCM 4 /* Output level for the audio device */
+#define SOUND_MIXER_SPEAKER 5 /* Output level for the PC speaker
+ * signals */
+#define SOUND_MIXER_LINE 6 /* Volume level for the line in jack */
+#define SOUND_MIXER_MIC 7 /* Volume for the signal coming from
+ * the microphone jack */
+#define SOUND_MIXER_CD 8 /* Volume level for the input signal
+ * connected to the CD audio input */
+#define SOUND_MIXER_IMIX 9 /* Recording monitor. It controls the
+ * output volume of the selected
+ * recording sources while recording */
+#define SOUND_MIXER_ALTPCM 10 /* Volume of the alternative codec
+ * device */
+#define SOUND_MIXER_RECLEV 11 /* Global recording level */
+#define SOUND_MIXER_IGAIN 12 /* Input gain */
+#define SOUND_MIXER_OGAIN 13 /* Output gain */
+/*
+ * The AD1848 codec and compatibles have three line level inputs
+ * (line, aux1 and aux2). Since each card manufacturer have assigned
+ * different meanings to these inputs, it's inpractical to assign
+ * specific meanings (line, cd, synth etc.) to them.
+ */
+#define SOUND_MIXER_LINE1 14 /* Input source 1 (aux1) */
+#define SOUND_MIXER_LINE2 15 /* Input source 2 (aux2) */
+#define SOUND_MIXER_LINE3 16 /* Input source 3 (line) */
+#define SOUND_MIXER_DIGITAL1 17 /* Digital (input) 1 */
+#define SOUND_MIXER_DIGITAL2 18 /* Digital (input) 2 */
+#define SOUND_MIXER_DIGITAL3 19 /* Digital (input) 3 */
+#define SOUND_MIXER_PHONEIN 20 /* Phone input */
+#define SOUND_MIXER_PHONEOUT 21 /* Phone output */
+#define SOUND_MIXER_VIDEO 22 /* Video/TV (audio) in */
+#define SOUND_MIXER_RADIO 23 /* Radio in */
+#define SOUND_MIXER_MONITOR 24 /* Monitor (usually mic) volume */
+
+
+/*
+ * Some on/off settings (SOUND_SPECIAL_MIN - SOUND_SPECIAL_MAX)
+ * Not counted to SOUND_MIXER_NRDEVICES, but use the same number space
+ */
+#define SOUND_ONOFF_MIN 28
+#define SOUND_ONOFF_MAX 30
+#define SOUND_MIXER_MUTE 28 /* 0 or 1 */
+#define SOUND_MIXER_ENHANCE 29 /* Enhanced stereo (0, 40, 60 or 80) */
+#define SOUND_MIXER_LOUD 30 /* 0 or 1 */
+
+/* Note! Number 31 cannot be used since the sign bit is reserved */
+#define SOUND_MIXER_NONE 31
+
+#define SOUND_DEVICE_LABELS { \
+ "Vol ", "Bass ", "Trebl", "Synth", "Pcm ", "Spkr ", "Line ", \
+ "Mic ", "CD ", "Mix ", "Pcm2 ", "Rec ", "IGain", "OGain", \
+ "Line1", "Line2", "Line3", "Digital1", "Digital2", "Digital3", \
+ "PhoneIn", "PhoneOut", "Video", "Radio", "Monitor"}
+
+#define SOUND_DEVICE_NAMES { \
+ "vol", "bass", "treble", "synth", "pcm", "speaker", "line", \
+ "mic", "cd", "mix", "pcm2", "rec", "igain", "ogain", \
+ "line1", "line2", "line3", "dig1", "dig2", "dig3", \
+ "phin", "phout", "video", "radio", "monitor"}
+
+/* Device bitmask identifiers */
+
+#define SOUND_MIXER_RECSRC 0xff /* 1 bit per recording source */
+#define SOUND_MIXER_DEVMASK 0xfe /* 1 bit per supported device */
+#define SOUND_MIXER_RECMASK 0xfd /* 1 bit per supp. recording source */
+#define SOUND_MIXER_CAPS 0xfc
+#define SOUND_CAP_EXCL_INPUT 0x00000001 /* Only 1 rec. src at a time */
+#define SOUND_MIXER_STEREODEVS 0xfb /* Mixer channels supporting stereo */
+
+/* Device mask bits */
+
+#define SOUND_MASK_VOLUME (1 << SOUND_MIXER_VOLUME)
+#define SOUND_MASK_BASS (1 << SOUND_MIXER_BASS)
+#define SOUND_MASK_TREBLE (1 << SOUND_MIXER_TREBLE)
+#define SOUND_MASK_SYNTH (1 << SOUND_MIXER_SYNTH)
+#define SOUND_MASK_PCM (1 << SOUND_MIXER_PCM)
+#define SOUND_MASK_SPEAKER (1 << SOUND_MIXER_SPEAKER)
+#define SOUND_MASK_LINE (1 << SOUND_MIXER_LINE)
+#define SOUND_MASK_MIC (1 << SOUND_MIXER_MIC)
+#define SOUND_MASK_CD (1 << SOUND_MIXER_CD)
+#define SOUND_MASK_IMIX (1 << SOUND_MIXER_IMIX)
+#define SOUND_MASK_ALTPCM (1 << SOUND_MIXER_ALTPCM)
+#define SOUND_MASK_RECLEV (1 << SOUND_MIXER_RECLEV)
+#define SOUND_MASK_IGAIN (1 << SOUND_MIXER_IGAIN)
+#define SOUND_MASK_OGAIN (1 << SOUND_MIXER_OGAIN)
+#define SOUND_MASK_LINE1 (1 << SOUND_MIXER_LINE1)
+#define SOUND_MASK_LINE2 (1 << SOUND_MIXER_LINE2)
+#define SOUND_MASK_LINE3 (1 << SOUND_MIXER_LINE3)
+#define SOUND_MASK_DIGITAL1 (1 << SOUND_MIXER_DIGITAL1)
+#define SOUND_MASK_DIGITAL2 (1 << SOUND_MIXER_DIGITAL2)
+#define SOUND_MASK_DIGITAL3 (1 << SOUND_MIXER_DIGITAL3)
+#define SOUND_MASK_PHONEIN (1 << SOUND_MIXER_PHONEIN)
+#define SOUND_MASK_PHONEOUT (1 << SOUND_MIXER_PHONEOUT)
+#define SOUND_MASK_RADIO (1 << SOUND_MIXER_RADIO)
+#define SOUND_MASK_VIDEO (1 << SOUND_MIXER_VIDEO)
+#define SOUND_MASK_MONITOR (1 << SOUND_MIXER_MONITOR)
+
+/* Obsolete macros */
+#define SOUND_MASK_MUTE (1 << SOUND_MIXER_MUTE)
+#define SOUND_MASK_ENHANCE (1 << SOUND_MIXER_ENHANCE)
+#define SOUND_MASK_LOUD (1 << SOUND_MIXER_LOUD)
+
+#define MIXER_READ(dev) _IOR('M', dev, int)
+#define SOUND_MIXER_READ_VOLUME MIXER_READ(SOUND_MIXER_VOLUME)
+#define SOUND_MIXER_READ_BASS MIXER_READ(SOUND_MIXER_BASS)
+#define SOUND_MIXER_READ_TREBLE MIXER_READ(SOUND_MIXER_TREBLE)
+#define SOUND_MIXER_READ_SYNTH MIXER_READ(SOUND_MIXER_SYNTH)
+#define SOUND_MIXER_READ_PCM MIXER_READ(SOUND_MIXER_PCM)
+#define SOUND_MIXER_READ_SPEAKER MIXER_READ(SOUND_MIXER_SPEAKER)
+#define SOUND_MIXER_READ_LINE MIXER_READ(SOUND_MIXER_LINE)
+#define SOUND_MIXER_READ_MIC MIXER_READ(SOUND_MIXER_MIC)
+#define SOUND_MIXER_READ_CD MIXER_READ(SOUND_MIXER_CD)
+#define SOUND_MIXER_READ_IMIX MIXER_READ(SOUND_MIXER_IMIX)
+#define SOUND_MIXER_READ_ALTPCM MIXER_READ(SOUND_MIXER_ALTPCM)
+#define SOUND_MIXER_READ_RECLEV MIXER_READ(SOUND_MIXER_RECLEV)
+#define SOUND_MIXER_READ_IGAIN MIXER_READ(SOUND_MIXER_IGAIN)
+#define SOUND_MIXER_READ_OGAIN MIXER_READ(SOUND_MIXER_OGAIN)
+#define SOUND_MIXER_READ_LINE1 MIXER_READ(SOUND_MIXER_LINE1)
+#define SOUND_MIXER_READ_LINE2 MIXER_READ(SOUND_MIXER_LINE2)
+#define SOUND_MIXER_READ_LINE3 MIXER_READ(SOUND_MIXER_LINE3)
+#define SOUND_MIXER_READ_DIGITAL1 MIXER_READ(SOUND_MIXER_DIGITAL1)
+#define SOUND_MIXER_READ_DIGITAL2 MIXER_READ(SOUND_MIXER_DIGITAL2)
+#define SOUND_MIXER_READ_DIGITAL3 MIXER_READ(SOUND_MIXER_DIGITAL3)
+#define SOUND_MIXER_READ_PHONEIN MIXER_READ(SOUND_MIXER_PHONEIN)
+#define SOUND_MIXER_READ_PHONEOUT MIXER_READ(SOUND_MIXER_PHONEOUT)
+#define SOUND_MIXER_READ_RADIO MIXER_READ(SOUND_MIXER_RADIO)
+#define SOUND_MIXER_READ_VIDEO MIXER_READ(SOUND_MIXER_VIDEO)
+#define SOUND_MIXER_READ_MONITOR MIXER_READ(SOUND_MIXER_MONITOR)
+
+/* Obsolete macros */
+#define SOUND_MIXER_READ_MUTE MIXER_READ(SOUND_MIXER_MUTE)
+#define SOUND_MIXER_READ_ENHANCE MIXER_READ(SOUND_MIXER_ENHANCE)
+#define SOUND_MIXER_READ_LOUD MIXER_READ(SOUND_MIXER_LOUD)
+
+#define SOUND_MIXER_READ_RECSRC MIXER_READ(SOUND_MIXER_RECSRC)
+#define SOUND_MIXER_READ_DEVMASK MIXER_READ(SOUND_MIXER_DEVMASK)
+#define SOUND_MIXER_READ_RECMASK MIXER_READ(SOUND_MIXER_RECMASK)
+#define SOUND_MIXER_READ_STEREODEVS MIXER_READ(SOUND_MIXER_STEREODEVS)
+#define SOUND_MIXER_READ_CAPS MIXER_READ(SOUND_MIXER_CAPS)
+
+#define MIXER_WRITE(dev) _IOWR('M', dev, int)
+#define SOUND_MIXER_WRITE_VOLUME MIXER_WRITE(SOUND_MIXER_VOLUME)
+#define SOUND_MIXER_WRITE_BASS MIXER_WRITE(SOUND_MIXER_BASS)
+#define SOUND_MIXER_WRITE_TREBLE MIXER_WRITE(SOUND_MIXER_TREBLE)
+#define SOUND_MIXER_WRITE_SYNTH MIXER_WRITE(SOUND_MIXER_SYNTH)
+#define SOUND_MIXER_WRITE_PCM MIXER_WRITE(SOUND_MIXER_PCM)
+#define SOUND_MIXER_WRITE_SPEAKER MIXER_WRITE(SOUND_MIXER_SPEAKER)
+#define SOUND_MIXER_WRITE_LINE MIXER_WRITE(SOUND_MIXER_LINE)
+#define SOUND_MIXER_WRITE_MIC MIXER_WRITE(SOUND_MIXER_MIC)
+#define SOUND_MIXER_WRITE_CD MIXER_WRITE(SOUND_MIXER_CD)
+#define SOUND_MIXER_WRITE_IMIX MIXER_WRITE(SOUND_MIXER_IMIX)
+#define SOUND_MIXER_WRITE_ALTPCM MIXER_WRITE(SOUND_MIXER_ALTPCM)
+#define SOUND_MIXER_WRITE_RECLEV MIXER_WRITE(SOUND_MIXER_RECLEV)
+#define SOUND_MIXER_WRITE_IGAIN MIXER_WRITE(SOUND_MIXER_IGAIN)
+#define SOUND_MIXER_WRITE_OGAIN MIXER_WRITE(SOUND_MIXER_OGAIN)
+#define SOUND_MIXER_WRITE_LINE1 MIXER_WRITE(SOUND_MIXER_LINE1)
+#define SOUND_MIXER_WRITE_LINE2 MIXER_WRITE(SOUND_MIXER_LINE2)
+#define SOUND_MIXER_WRITE_LINE3 MIXER_WRITE(SOUND_MIXER_LINE3)
+#define SOUND_MIXER_WRITE_DIGITAL1 MIXER_WRITE(SOUND_MIXER_DIGITAL1)
+#define SOUND_MIXER_WRITE_DIGITAL2 MIXER_WRITE(SOUND_MIXER_DIGITAL2)
+#define SOUND_MIXER_WRITE_DIGITAL3 MIXER_WRITE(SOUND_MIXER_DIGITAL3)
+#define SOUND_MIXER_WRITE_PHONEIN MIXER_WRITE(SOUND_MIXER_PHONEIN)
+#define SOUND_MIXER_WRITE_PHONEOUT MIXER_WRITE(SOUND_MIXER_PHONEOUT)
+#define SOUND_MIXER_WRITE_RADIO MIXER_WRITE(SOUND_MIXER_RADIO)
+#define SOUND_MIXER_WRITE_VIDEO MIXER_WRITE(SOUND_MIXER_VIDEO)
+#define SOUND_MIXER_WRITE_MONITOR MIXER_WRITE(SOUND_MIXER_MONITOR)
+
+#define SOUND_MIXER_WRITE_MUTE MIXER_WRITE(SOUND_MIXER_MUTE)
+#define SOUND_MIXER_WRITE_ENHANCE MIXER_WRITE(SOUND_MIXER_ENHANCE)
+#define SOUND_MIXER_WRITE_LOUD MIXER_WRITE(SOUND_MIXER_LOUD)
+
+#define SOUND_MIXER_WRITE_RECSRC MIXER_WRITE(SOUND_MIXER_RECSRC)
+
+typedef struct mixer_info {
+ char id[16];
+ char name[32];
+ int modify_counter;
+ int fillers[10];
+} mixer_info;
+
+#define SOUND_MIXER_INFO _IOR('M', 101, mixer_info)
+
+#define LEFT_CHN 0
+#define RIGHT_CHN 1
+
+/*
+ * Level 2 event types for /dev/sequencer
+ */
+
+/*
+ * The 4 most significant bits of byte 0 specify the class of
+ * the event:
+ *
+ * 0x8X = system level events,
+ * 0x9X = device/port specific events, event[1] = device/port,
+ * The last 4 bits give the subtype:
+ * 0x02 = Channel event (event[3] = chn).
+ * 0x01 = note event (event[4] = note).
+ * (0x01 is not used alone but always with bit 0x02).
+ * event[2] = MIDI message code (0x80=note off etc.)
+ *
+ */
+
+#define EV_SEQ_LOCAL 0x80
+#define EV_TIMING 0x81
+#define EV_CHN_COMMON 0x92
+#define EV_CHN_VOICE 0x93
+#define EV_SYSEX 0x94
+/*
+ * Event types 200 to 220 are reserved for application use.
+ * These numbers will not be used by the driver.
+ */
+
+/*
+ * Events for event type EV_CHN_VOICE
+ */
+
+#define MIDI_NOTEOFF 0x80
+#define MIDI_NOTEON 0x90
+#define MIDI_KEY_PRESSURE 0xA0
+
+/*
+ * Events for event type EV_CHN_COMMON
+ */
+
+#define MIDI_CTL_CHANGE 0xB0
+#define MIDI_PGM_CHANGE 0xC0
+#define MIDI_CHN_PRESSURE 0xD0
+#define MIDI_PITCH_BEND 0xE0
+
+#define MIDI_SYSTEM_PREFIX 0xF0
+
+/*
+ * Timer event types
+ */
+#define TMR_WAIT_REL 1 /* Time relative to the prev time */
+#define TMR_WAIT_ABS 2 /* Absolute time since TMR_START */
+#define TMR_STOP 3
+#define TMR_START 4
+#define TMR_CONTINUE 5
+#define TMR_TEMPO 6
+#define TMR_ECHO 8
+#define TMR_CLOCK 9 /* MIDI clock */
+#define TMR_SPP 10 /* Song position pointer */
+#define TMR_TIMESIG 11 /* Time signature */
+
+/*
+ * Local event types
+ */
+#define LOCL_STARTAUDIO 1
+
+#if (!defined(_KERNEL) && !defined(INKERNEL)) || defined(USE_SEQ_MACROS)
+/*
+ * Some convenience macros to simplify programming of the
+ * /dev/sequencer interface
+ *
+ * These macros define the API which should be used when possible.
+ */
+
+#ifndef USE_SIMPLE_MACROS
+void seqbuf_dump(void); /* This function must be provided by programs */
+
+/* Sample seqbuf_dump() implementation:
+ *
+ * SEQ_DEFINEBUF (2048); -- Defines a buffer for 2048 bytes
+ *
+ * int seqfd; -- The file descriptor for /dev/sequencer.
+ *
+ * void
+ * seqbuf_dump ()
+ * {
+ * if (_seqbufptr)
+ * if (write (seqfd, _seqbuf, _seqbufptr) == -1)
+ * {
+ * perror ("write /dev/sequencer");
+ * exit (-1);
+ * }
+ * _seqbufptr = 0;
+ * }
+ */
+
+#define SEQ_DEFINEBUF(len) \
+ u_char _seqbuf[len]; int _seqbuflen = len;int _seqbufptr = 0
+#define SEQ_USE_EXTBUF() \
+ extern u_char _seqbuf[]; \
+ extern int _seqbuflen;extern int _seqbufptr
+#define SEQ_DECLAREBUF() SEQ_USE_EXTBUF()
+#define SEQ_PM_DEFINES struct patmgr_info _pm_info
+#define _SEQ_NEEDBUF(len) \
+ if ((_seqbufptr+(len)) > _seqbuflen) \
+ seqbuf_dump()
+#define _SEQ_ADVBUF(len) _seqbufptr += len
+#define SEQ_DUMPBUF seqbuf_dump
+#else
+/*
+ * This variation of the sequencer macros is used just to format one event
+ * using fixed buffer.
+ *
+ * The program using the macro library must define the following macros before
+ * using this library.
+ *
+ * #define _seqbuf name of the buffer (u_char[])
+ * #define _SEQ_ADVBUF(len) If the applic needs to know the exact
+ * size of the event, this macro can be used.
+ * Otherwise this must be defined as empty.
+ * #define _seqbufptr Define the name of index variable or 0 if
+ * not required.
+ */
+#define _SEQ_NEEDBUF(len) /* empty */
+#endif
+
+#define PM_LOAD_PATCH(dev, bank, pgm) \
+ (SEQ_DUMPBUF(), _pm_info.command = _PM_LOAD_PATCH, \
+ _pm_info.device=dev, _pm_info.data.data8[0]=pgm, \
+ _pm_info.parm1 = bank, _pm_info.parm2 = 1, \
+ ioctl(seqfd, SNDCTL_PMGR_ACCESS, &_pm_info))
+#define PM_LOAD_PATCHES(dev, bank, pgm) \
+ (SEQ_DUMPBUF(), _pm_info.command = _PM_LOAD_PATCH, \
+ _pm_info.device=dev, bcopy( pgm, _pm_info.data.data8, 128), \
+ _pm_info.parm1 = bank, _pm_info.parm2 = 128, \
+ ioctl(seqfd, SNDCTL_PMGR_ACCESS, &_pm_info))
+
+#define SEQ_VOLUME_MODE(dev, mode) { \
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr] = SEQ_EXTENDED;\
+ _seqbuf[_seqbufptr+1] = SEQ_VOLMODE;\
+ _seqbuf[_seqbufptr+2] = (dev);\
+ _seqbuf[_seqbufptr+3] = (mode);\
+ _seqbuf[_seqbufptr+4] = 0;\
+ _seqbuf[_seqbufptr+5] = 0;\
+ _seqbuf[_seqbufptr+6] = 0;\
+ _seqbuf[_seqbufptr+7] = 0;\
+ _SEQ_ADVBUF(8);}
+
+/*
+ * Midi voice messages
+ */
+
+#define _CHN_VOICE(dev, event, chn, note, parm) { \
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr] = EV_CHN_VOICE;\
+ _seqbuf[_seqbufptr+1] = (dev);\
+ _seqbuf[_seqbufptr+2] = (event);\
+ _seqbuf[_seqbufptr+3] = (chn);\
+ _seqbuf[_seqbufptr+4] = (note);\
+ _seqbuf[_seqbufptr+5] = (parm);\
+ _seqbuf[_seqbufptr+6] = (0);\
+ _seqbuf[_seqbufptr+7] = 0;\
+ _SEQ_ADVBUF(8);}
+
+#define SEQ_START_NOTE(dev, chn, note, vol) \
+ _CHN_VOICE(dev, MIDI_NOTEON, chn, note, vol)
+
+#define SEQ_STOP_NOTE(dev, chn, note, vol) \
+ _CHN_VOICE(dev, MIDI_NOTEOFF, chn, note, vol)
+
+#define SEQ_KEY_PRESSURE(dev, chn, note, pressure) \
+ _CHN_VOICE(dev, MIDI_KEY_PRESSURE, chn, note, pressure)
+
+/*
+ * Midi channel messages
+ */
+
+#define _CHN_COMMON(dev, event, chn, p1, p2, w14) { \
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr] = EV_CHN_COMMON;\
+ _seqbuf[_seqbufptr+1] = (dev);\
+ _seqbuf[_seqbufptr+2] = (event);\
+ _seqbuf[_seqbufptr+3] = (chn);\
+ _seqbuf[_seqbufptr+4] = (p1);\
+ _seqbuf[_seqbufptr+5] = (p2);\
+ *(short *)&_seqbuf[_seqbufptr+6] = (w14);\
+ _SEQ_ADVBUF(8);}
+/*
+ * SEQ_SYSEX permits sending of sysex messages. (It may look that it permits
+ * sending any MIDI bytes but it's absolutely not possible. Trying to do
+ * so _will_ cause problems with MPU401 intelligent mode).
+ *
+ * Sysex messages are sent in blocks of 1 to 6 bytes. Longer messages must be
+ * sent by calling SEQ_SYSEX() several times (there must be no other events
+ * between them). First sysex fragment must have 0xf0 in the first byte
+ * and the last byte (buf[len-1] of the last fragment must be 0xf7. No byte
+ * between these sysex start and end markers cannot be larger than 0x7f. Also
+ * lengths of each fragments (except the last one) must be 6.
+ *
+ * Breaking the above rules may work with some MIDI ports but is likely to
+ * cause fatal problems with some other devices (such as MPU401).
+ */
+#define SEQ_SYSEX(dev, buf, len) { \
+ int i, l=(len); if (l>6)l=6;\
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr] = EV_SYSEX;\
+ for(i=0;i<l;i++)_seqbuf[_seqbufptr+i+1] = (buf)[i];\
+ for(i=l;i<6;i++)_seqbuf[_seqbufptr+i+1] = 0xff;\
+ _SEQ_ADVBUF(8);}
+
+#define SEQ_CHN_PRESSURE(dev, chn, pressure) \
+ _CHN_COMMON(dev, MIDI_CHN_PRESSURE, chn, pressure, 0, 0)
+
+#define SEQ_SET_PATCH(dev, chn, patch) \
+ _CHN_COMMON(dev, MIDI_PGM_CHANGE, chn, patch, 0, 0)
+
+#define SEQ_CONTROL(dev, chn, controller, value) \
+ _CHN_COMMON(dev, MIDI_CTL_CHANGE, chn, controller, 0, value)
+
+#define SEQ_BENDER(dev, chn, value) \
+ _CHN_COMMON(dev, MIDI_PITCH_BEND, chn, 0, 0, value)
+
+
+#define SEQ_V2_X_CONTROL(dev, voice, controller, value) { \
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr] = SEQ_EXTENDED;\
+ _seqbuf[_seqbufptr+1] = SEQ_CONTROLLER;\
+ _seqbuf[_seqbufptr+2] = (dev);\
+ _seqbuf[_seqbufptr+3] = (voice);\
+ _seqbuf[_seqbufptr+4] = (controller);\
+ *(short *)&_seqbuf[_seqbufptr+5] = (value);\
+ _seqbuf[_seqbufptr+7] = 0;\
+ _SEQ_ADVBUF(8);}
+
+/*
+ * The following 5 macros are incorrectly implemented and obsolete.
+ * Use SEQ_BENDER and SEQ_CONTROL (with proper controller) instead.
+ */
+
+#define SEQ_PITCHBEND(dev, voice, value) \
+ SEQ_V2_X_CONTROL(dev, voice, CTRL_PITCH_BENDER, value)
+#define SEQ_BENDER_RANGE(dev, voice, value) \
+ SEQ_V2_X_CONTROL(dev, voice, CTRL_PITCH_BENDER_RANGE, value)
+#define SEQ_EXPRESSION(dev, voice, value) \
+ SEQ_CONTROL(dev, voice, CTL_EXPRESSION, value*128)
+#define SEQ_MAIN_VOLUME(dev, voice, value) \
+ SEQ_CONTROL(dev, voice, CTL_MAIN_VOLUME, (value*16383)/100)
+#define SEQ_PANNING(dev, voice, pos) \
+ SEQ_CONTROL(dev, voice, CTL_PAN, (pos+128) / 2)
+
+/*
+ * Timing and syncronization macros
+ */
+
+#define _TIMER_EVENT(ev, parm) { \
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr+0] = EV_TIMING; \
+ _seqbuf[_seqbufptr+1] = (ev); \
+ _seqbuf[_seqbufptr+2] = 0;\
+ _seqbuf[_seqbufptr+3] = 0;\
+ *(u_int *)&_seqbuf[_seqbufptr+4] = (parm); \
+ _SEQ_ADVBUF(8); \
+ }
+
+#define SEQ_START_TIMER() _TIMER_EVENT(TMR_START, 0)
+#define SEQ_STOP_TIMER() _TIMER_EVENT(TMR_STOP, 0)
+#define SEQ_CONTINUE_TIMER() _TIMER_EVENT(TMR_CONTINUE, 0)
+#define SEQ_WAIT_TIME(ticks) _TIMER_EVENT(TMR_WAIT_ABS, ticks)
+#define SEQ_DELTA_TIME(ticks) _TIMER_EVENT(TMR_WAIT_REL, ticks)
+#define SEQ_ECHO_BACK(key) _TIMER_EVENT(TMR_ECHO, key)
+#define SEQ_SET_TEMPO(value) _TIMER_EVENT(TMR_TEMPO, value)
+#define SEQ_SONGPOS(pos) _TIMER_EVENT(TMR_SPP, pos)
+#define SEQ_TIME_SIGNATURE(sig) _TIMER_EVENT(TMR_TIMESIG, sig)
+
+/*
+ * Local control events
+ */
+
+#define _LOCAL_EVENT(ev, parm) { \
+ _SEQ_NEEDBUF(8);\
+ _seqbuf[_seqbufptr+0] = EV_SEQ_LOCAL; \
+ _seqbuf[_seqbufptr+1] = (ev); \
+ _seqbuf[_seqbufptr+2] = 0;\
+ _seqbuf[_seqbufptr+3] = 0;\
+ *(u_int *)&_seqbuf[_seqbufptr+4] = (parm); \
+ _SEQ_ADVBUF(8); \
+ }
+
+#define SEQ_PLAYAUDIO(devmask) _LOCAL_EVENT(LOCL_STARTAUDIO, devmask)
+/*
+ * Events for the level 1 interface only
+ */
+
+#define SEQ_MIDIOUT(device, byte) { \
+ _SEQ_NEEDBUF(4);\
+ _seqbuf[_seqbufptr] = SEQ_MIDIPUTC;\
+ _seqbuf[_seqbufptr+1] = (byte);\
+ _seqbuf[_seqbufptr+2] = (device);\
+ _seqbuf[_seqbufptr+3] = 0;\
+ _SEQ_ADVBUF(4);}
+
+/*
+ * Patch loading.
+ */
+#define SEQ_WRPATCH(patchx, len) { \
+ if (_seqbufptr) seqbuf_dump(); \
+ if (write(seqfd, (char*)(patchx), len)==-1) \
+ perror("Write patch: /dev/sequencer"); \
+ }
+
+#define SEQ_WRPATCH2(patchx, len) \
+ ( seqbuf_dump(), write(seqfd, (char*)(patchx), len) )
+
+#endif
+
+/*
+ * Here I have moved all the aliases for ioctl names.
+ */
+
+#define SNDCTL_DSP_SAMPLESIZE SNDCTL_DSP_SETFMT
+#define SOUND_PCM_WRITE_BITS SNDCTL_DSP_SETFMT
+#define SOUND_PCM_SETFMT SNDCTL_DSP_SETFMT
+
+#define SOUND_PCM_WRITE_RATE SNDCTL_DSP_SPEED
+#define SOUND_PCM_POST SNDCTL_DSP_POST
+#define SOUND_PCM_RESET SNDCTL_DSP_RESET
+#define SOUND_PCM_SYNC SNDCTL_DSP_SYNC
+#define SOUND_PCM_SUBDIVIDE SNDCTL_DSP_SUBDIVIDE
+#define SOUND_PCM_SETFRAGMENT SNDCTL_DSP_SETFRAGMENT
+#define SOUND_PCM_GETFMTS SNDCTL_DSP_GETFMTS
+#define SOUND_PCM_GETOSPACE SNDCTL_DSP_GETOSPACE
+#define SOUND_PCM_GETISPACE SNDCTL_DSP_GETISPACE
+#define SOUND_PCM_NONBLOCK SNDCTL_DSP_NONBLOCK
+#define SOUND_PCM_GETCAPS SNDCTL_DSP_GETCAPS
+#define SOUND_PCM_GETTRIGGER SNDCTL_DSP_GETTRIGGER
+#define SOUND_PCM_SETTRIGGER SNDCTL_DSP_SETTRIGGER
+#define SOUND_PCM_SETSYNCRO SNDCTL_DSP_SETSYNCRO
+#define SOUND_PCM_GETIPTR SNDCTL_DSP_GETIPTR
+#define SOUND_PCM_GETOPTR SNDCTL_DSP_GETOPTR
+#define SOUND_PCM_MAPINBUF SNDCTL_DSP_MAPINBUF
+#define SOUND_PCM_MAPOUTBUF SNDCTL_DSP_MAPOUTBUF
+
+/***********************************************************************/
+
+/**
+ * XXX OSSv4 defines -- some bits taken straight out of the new
+ * sys/soundcard.h bundled with recent OSS releases.
+ *
+ * NB: These macros and structures will be reorganized and inserted
+ * in appropriate places throughout this file once the code begins
+ * to take shape.
+ *
+ * @todo reorganize layout more like the 4Front version
+ * @todo ask about maintaining __SIOWR vs. _IOWR ioctl cmd defines
+ */
+
+/**
+ * @note The @c OSSV4_EXPERIMENT macro is meant to wrap new development code
+ * in the sound system relevant to adopting 4Front's OSSv4 specification.
+ * Users should not enable this! Really!
+ */
+#if 0
+# define OSSV4_EXPERIMENT 1
+#else
+# undef OSSV4_EXPERIMENT
+#endif
+
+#ifdef SOUND_VERSION
+# undef SOUND_VERSION
+# define SOUND_VERSION 0x040000
+#endif /* !SOUND_VERSION */
+
+#define OSS_LONGNAME_SIZE 64
+#define OSS_LABEL_SIZE 16
+#define OSS_DEVNODE_SIZE 32
+typedef char oss_longname_t[OSS_LONGNAME_SIZE];
+typedef char oss_label_t[OSS_LABEL_SIZE];
+typedef char oss_devnode_t[OSS_DEVNODE_SIZE];
+
+typedef struct audio_errinfo
+{
+ int play_underruns;
+ int rec_overruns;
+ unsigned int play_ptradjust;
+ unsigned int rec_ptradjust;
+ int play_errorcount;
+ int rec_errorcount;
+ int play_lasterror;
+ int rec_lasterror;
+ long play_errorparm;
+ long rec_errorparm;
+ int filler[16];
+} audio_errinfo;
+
+#define SNDCTL_DSP_GETPLAYVOL _IOR ('P', 24, int)
+#define SNDCTL_DSP_SETPLAYVOL _IOWR('P', 24, int)
+#define SNDCTL_DSP_GETERROR _IOR ('P', 25, audio_errinfo)
+
+
+/*
+ ****************************************************************************
+ * Sync groups for audio devices
+ */
+typedef struct oss_syncgroup
+{
+ int id;
+ int mode;
+ int filler[16];
+} oss_syncgroup;
+
+#define SNDCTL_DSP_SYNCGROUP _IOWR('P', 28, oss_syncgroup)
+#define SNDCTL_DSP_SYNCSTART _IOW ('P', 29, int)
+
+/*
+ **************************************************************************
+ * "cooked" mode enables software based conversions for sample rate, sample
+ * format (bits) and number of channels (mono/stereo). These conversions are
+ * required with some devices that support only one sample rate or just stereo
+ * to let the applications to use other formats. The cooked mode is enabled by
+ * default. However it's necessary to disable this mode when mmap() is used or
+ * when very deterministic timing is required. SNDCTL_DSP_COOKEDMODE is an
+ * optional call introduced in OSS 3.9.6f. It's _error return must be ignored_
+ * since normally this call will return erno=EINVAL.
+ *
+ * SNDCTL_DSP_COOKEDMODE must be called immediately after open before doing
+ * anything else. Otherwise the call will not have any effect.
+ */
+#define SNDCTL_DSP_COOKEDMODE _IOW ('P', 30, int)
+
+/*
+ **************************************************************************
+ * SNDCTL_DSP_SILENCE and SNDCTL_DSP_SKIP are new calls in OSS 3.99.0
+ * that can be used to implement pause/continue during playback (no effect
+ * on recording).
+ */
+#define SNDCTL_DSP_SILENCE _IO ('P', 31)
+#define SNDCTL_DSP_SKIP _IO ('P', 32)
+
+/*
+ ****************************************************************************
+ * Abort transfer (reset) functions for input and output
+ */
+#define SNDCTL_DSP_HALT_INPUT _IO ('P', 33)
+#define SNDCTL_DSP_RESET_INPUT SNDCTL_DSP_HALT_INPUT /* Old name */
+#define SNDCTL_DSP_HALT_OUTPUT _IO ('P', 34)
+#define SNDCTL_DSP_RESET_OUTPUT SNDCTL_DSP_HALT_OUTPUT /* Old name */
+
+/*
+ ****************************************************************************
+ * Low water level control
+ */
+#define SNDCTL_DSP_LOW_WATER _IOW ('P', 34, int)
+
+/** @todo Get rid of OSS_NO_LONG_LONG references? */
+
+/*
+ ****************************************************************************
+ * 64 bit pointer support. Only available in environments that support
+ * the 64 bit (long long) integer type.
+ */
+#ifndef OSS_NO_LONG_LONG
+typedef struct
+{
+ long long samples;
+ int fifo_samples;
+ int filler[32]; /* For future use */
+} oss_count_t;
+
+#define SNDCTL_DSP_CURRENT_IPTR _IOR ('P', 35, oss_count_t)
+#define SNDCTL_DSP_CURRENT_OPTR _IOR ('P', 36, oss_count_t)
+#endif
+
+/*
+ ****************************************************************************
+ * Interface for selecting recording sources and playback output routings.
+ */
+#define SNDCTL_DSP_GET_RECSRC_NAMES _IOR ('P', 37, oss_mixer_enuminfo)
+#define SNDCTL_DSP_GET_RECSRC _IOR ('P', 38, int)
+#define SNDCTL_DSP_SET_RECSRC _IOWR('P', 38, int)
+
+#define SNDCTL_DSP_GET_PLAYTGT_NAMES _IOR ('P', 39, oss_mixer_enuminfo)
+#define SNDCTL_DSP_GET_PLAYTGT _IOR ('P', 40, int)
+#define SNDCTL_DSP_SET_PLAYTGT _IOWR('P', 40, int)
+#define SNDCTL_DSP_GETRECVOL _IOR ('P', 41, int)
+#define SNDCTL_DSP_SETRECVOL _IOWR('P', 41, int)
+
+/*
+ ***************************************************************************
+ * Some calls for setting the channel assignment with multi channel devices
+ * (see the manual for details). */
+#define SNDCTL_DSP_GET_CHNORDER _IOR ('P', 42, unsigned long long)
+#define SNDCTL_DSP_SET_CHNORDER _IOWR('P', 42, unsigned long long)
+# define CHID_UNDEF 0
+# define CHID_L 1 # define CHID_R 2
+# define CHID_C 3
+# define CHID_LFE 4
+# define CHID_LS 5
+# define CHID_RS 6
+# define CHID_LR 7
+# define CHID_RR 8
+#define CHNORDER_UNDEF 0x0000000000000000ULL
+#define CHNORDER_NORMAL 0x0000000087654321ULL
+
+#define MAX_PEAK_CHANNELS 128
+typedef unsigned short oss_peaks_t[MAX_PEAK_CHANNELS];
+#define SNDCTL_DSP_GETIPEAKS _IOR('P', 43, oss_peaks_t)
+#define SNDCTL_DSP_GETOPEAKS _IOR('P', 44, oss_peaks_t)
+#define SNDCTL_DSP_POLICY _IOW('P', 45, int) /* See the manual */
+
+/*
+ * OSS_SYSIFO is obsolete. Use SNDCTL_SYSINFO insteads.
+ */
+#define OSS_GETVERSION _IOR ('M', 118, int)
+
+/**
+ * @brief Argument for SNDCTL_SYSINFO ioctl.
+ *
+ * For use w/ the SNDCTL_SYSINFO ioctl available on audio (/dev/dsp*),
+ * mixer, and MIDI devices.
+ */
+typedef struct oss_sysinfo
+{
+ char product[32]; /* For example OSS/Free, OSS/Linux or
+ OSS/Solaris */
+ char version[32]; /* For example 4.0a */
+ int versionnum; /* See OSS_GETVERSION */
+ char options[128]; /* Reserved */
+
+ int numaudios; /* # of audio/dsp devices */
+ int openedaudio[8]; /* Bit mask telling which audio devices
+ are busy */
+
+ int numsynths; /* # of availavle synth devices */
+ int nummidis; /* # of available MIDI ports */
+ int numtimers; /* # of available timer devices */
+ int nummixers; /* # of mixer devices */
+
+ int openedmidi[8]; /* Bit mask telling which midi devices
+ are busy */
+ int numcards; /* Number of sound cards in the system */
+ int filler[241]; /* For future expansion (set to -1) */
+} oss_sysinfo;
+
+typedef struct oss_mixext
+{
+ int dev; /* Mixer device number */
+ int ctrl; /* Controller number */
+ int type; /* Entry type */
+# define MIXT_DEVROOT 0 /* Device root entry */
+# define MIXT_GROUP 1 /* Controller group */
+# define MIXT_ONOFF 2 /* OFF (0) or ON (1) */
+# define MIXT_ENUM 3 /* Enumerated (0 to maxvalue) */
+# define MIXT_MONOSLIDER 4 /* Mono slider (0 to 100) */
+# define MIXT_STEREOSLIDER 5 /* Stereo slider (dual 0 to 100) */
+# define MIXT_MESSAGE 6 /* (Readable) textual message */
+# define MIXT_MONOVU 7 /* VU meter value (mono) */
+# define MIXT_STEREOVU 8 /* VU meter value (stereo) */
+# define MIXT_MONOPEAK 9 /* VU meter peak value (mono) */
+# define MIXT_STEREOPEAK 10 /* VU meter peak value (stereo) */
+# define MIXT_RADIOGROUP 11 /* Radio button group */
+# define MIXT_MARKER 12 /* Separator between normal and extension entries */
+# define MIXT_VALUE 13 /* Decimal value entry */
+# define MIXT_HEXVALUE 14 /* Hexadecimal value entry */
+# define MIXT_MONODB 15 /* Mono atten. slider (0 to -144) */
+# define MIXT_STEREODB 16 /* Stereo atten. slider (dual 0 to -144) */
+# define MIXT_SLIDER 17 /* Slider (mono) with full integer range */
+# define MIXT_3D 18
+
+ /* Possible value range (minvalue to maxvalue) */
+ /* Note that maxvalue may also be smaller than minvalue */
+ int maxvalue;
+ int minvalue;
+
+ int flags;
+# define MIXF_READABLE 0x00000001 /* Has readable value */
+# define MIXF_WRITEABLE 0x00000002 /* Has writeable value */
+# define MIXF_POLL 0x00000004 /* May change itself */
+# define MIXF_HZ 0x00000008 /* Herz scale */
+# define MIXF_STRING 0x00000010 /* Use dynamic extensions for value */
+# define MIXF_DYNAMIC 0x00000010 /* Supports dynamic extensions */
+# define MIXF_OKFAIL 0x00000020 /* Interpret value as 1=OK, 0=FAIL */
+# define MIXF_FLAT 0x00000040 /* Flat vertical space requirements */
+# define MIXF_LEGACY 0x00000080 /* Legacy mixer control group */
+ char id[16]; /* Mnemonic ID (mainly for internal use) */
+ int parent; /* Entry# of parent (group) node (-1 if root) */
+
+ int dummy; /* Internal use */
+
+ int timestamp;
+
+ char data[64]; /* Misc data (entry type dependent) */
+ unsigned char enum_present[32]; /* Mask of allowed enum values */
+ int control_no; /* SOUND_MIXER_VOLUME..SOUND_MIXER_MIDI */
+ /* (-1 means not indicated) */
+
+/*
+ * The desc field is reserved for internal purposes of OSS. It should not be
+ * used by applications.
+ */
+ unsigned int desc;
+#define MIXEXT_SCOPE_MASK 0x0000003f
+#define MIXEXT_SCOPE_OTHER 0x00000000
+#define MIXEXT_SCOPE_INPUT 0x00000001
+#define MIXEXT_SCOPE_OUTPUT 0x00000002
+#define MIXEXT_SCOPE_MONITOR 0x00000003
+#define MIXEXT_SCOPE_RECSWITCH 0x00000004
+
+ char extname[32];
+ int update_counter;
+ int filler[7];
+} oss_mixext;
+
+typedef struct oss_mixext_root
+{
+ char id[16];
+ char name[48];
+} oss_mixext_root;
+
+typedef struct oss_mixer_value
+{
+ int dev;
+ int ctrl;
+ int value;
+ int flags; /* Reserved for future use. Initialize to 0 */
+ int timestamp; /* Must be set to oss_mixext.timestamp */
+ int filler[8]; /* Reserved for future use. Initialize to 0 */
+} oss_mixer_value;
+
+#define OSS_ENUM_MAXVALUE 255
+typedef struct oss_mixer_enuminfo
+{
+ int dev;
+ int ctrl;
+ int nvalues;
+ int version; /* Read the manual */
+ short strindex[OSS_ENUM_MAXVALUE];
+ char strings[3000];
+} oss_mixer_enuminfo;
+
+#define OPEN_READ PCM_ENABLE_INPUT
+#define OPEN_WRITE PCM_ENABLE_OUTPUT
+#define OPEN_READWRITE (OPEN_READ|OPEN_WRITE)
+
+/**
+ * @brief Argument for SNDCTL_AUDIOINFO ioctl.
+ *
+ * For use w/ the SNDCTL_AUDIOINFO ioctl available on audio (/dev/dsp*)
+ * devices.
+ */
+typedef struct oss_audioinfo
+{
+ int dev; /* Audio device number */
+ char name[64];
+ int busy; /* 0, OPEN_READ, OPEN_WRITE or OPEN_READWRITE */
+ int pid;
+ int caps; /* DSP_CAP_INPUT, DSP_CAP_OUTPUT */
+ int iformats;
+ int oformats;
+ int magic; /* Reserved for internal use */
+ char cmd[64]; /* Command using the device (if known) */
+ int card_number;
+ int port_number;
+ int mixer_dev;
+ int real_device; /* Obsolete field. Replaced by devnode */
+ int enabled; /* 1=enabled, 0=device not ready at this
+ moment */
+ int flags; /* For internal use only - no practical
+ meaning */
+ int min_rate; /* Sample rate limits */
+ int max_rate;
+ int min_channels; /* Number of channels supported */
+ int max_channels;
+ int binding; /* DSP_BIND_FRONT, etc. 0 means undefined */
+ int rate_source;
+ char handle[32];
+ #define OSS_MAX_SAMPLE_RATES 20 /* Cannot be changed */
+ unsigned int nrates;
+ unsigned int rates[OSS_MAX_SAMPLE_RATES]; /* Please read the manual before using these */
+ oss_longname_t song_name; /* Song name (if given) */
+ oss_label_t label; /* Device label (if given) */
+ int latency; /* In usecs, -1=unknown */
+ oss_devnode_t devnode; /* Device special file name (inside
+ /dev) */
+ int filler[186];
+} oss_audioinfo;
+
+typedef struct oss_mixerinfo
+{
+ int dev;
+ char id[16];
+ char name[32];
+ int modify_counter;
+ int card_number;
+ int port_number;
+ char handle[32];
+ int magic; /* Reserved */
+ int enabled; /* Reserved */
+ int caps;
+#define MIXER_CAP_VIRTUAL 0x00000001
+ int flags; /* Reserved */
+ int nrext;
+ /*
+ * The priority field can be used to select the default (motherboard)
+ * mixer device. The mixer with the highest priority is the
+ * most preferred one. -2 or less means that this device cannot be used
+ * as the default mixer.
+ */
+ int priority;
+ int filler[254]; /* Reserved */
+} oss_mixerinfo;
+
+typedef struct oss_midi_info
+{
+ int dev; /* Midi device number */
+ char name[64];
+ int busy; /* 0, OPEN_READ, OPEN_WRITE or OPEN_READWRITE */
+ int pid;
+ char cmd[64]; /* Command using the device (if known) */
+ int caps;
+#define MIDI_CAP_MPU401 0x00000001 /**** OBSOLETE ****/
+#define MIDI_CAP_INPUT 0x00000002
+#define MIDI_CAP_OUTPUT 0x00000004
+#define MIDI_CAP_INOUT (MIDI_CAP_INPUT|MIDI_CAP_OUTPUT)
+#define MIDI_CAP_VIRTUAL 0x00000008 /* Pseudo device */
+#define MIDI_CAP_MTCINPUT 0x00000010 /* Supports SNDCTL_MIDI_MTCINPUT */
+#define MIDI_CAP_CLIENT 0x00000020 /* Virtual client side device */
+#define MIDI_CAP_SERVER 0x00000040 /* Virtual server side device */
+#define MIDI_CAP_INTERNAL 0x00000080 /* Internal (synth) device */
+#define MIDI_CAP_EXTERNAL 0x00000100 /* external (MIDI port) device */
+#define MIDI_CAP_PTOP 0x00000200 /* Point to point link to one device */
+#define MIDI_CAP_MTC 0x00000400 /* MTC/SMPTE (control) device */
+ int magic; /* Reserved for internal use */
+ int card_number;
+ int port_number;
+ int enabled; /* 1=enabled, 0=device not ready at this moment */
+ int flags; /* For internal use only - no practical meaning */
+ char handle[32];
+ oss_longname_t song_name; /* Song name (if known) */
+ oss_label_t label; /* Device label (if given) */
+ int latency; /* In usecs, -1=unknown */
+ int filler[244];
+} oss_midi_info;
+
+typedef struct oss_card_info
+{
+ int card;
+ char shortname[16];
+ char longname[128];
+ int flags;
+ int filler[256];
+} oss_card_info;
+
+#define SNDCTL_SYSINFO _IOR ('X', 1, oss_sysinfo)
+#define OSS_SYSINFO SNDCTL_SYSINFO /* Old name */
+
+#define SNDCTL_MIX_NRMIX _IOR ('X', 2, int)
+#define SNDCTL_MIX_NREXT _IOWR('X', 3, int)
+#define SNDCTL_MIX_EXTINFO _IOWR('X', 4, oss_mixext)
+#define SNDCTL_MIX_READ _IOWR('X', 5, oss_mixer_value)
+#define SNDCTL_MIX_WRITE _IOWR('X', 6, oss_mixer_value)
+
+#define SNDCTL_AUDIOINFO _IOWR('X', 7, oss_audioinfo)
+#define SNDCTL_MIX_ENUMINFO _IOWR('X', 8, oss_mixer_enuminfo)
+#define SNDCTL_MIDIINFO _IOWR('X', 9, oss_midi_info)
+#define SNDCTL_MIXERINFO _IOWR('X',10, oss_mixerinfo)
+#define SNDCTL_CARDINFO _IOWR('X',11, oss_card_info)
+
+/*
+ * Few more "globally" available ioctl calls.
+ */
+#define SNDCTL_SETSONG _IOW ('Y', 2, oss_longname_t)
+#define SNDCTL_GETSONG _IOR ('Y', 2, oss_longname_t)
+#define SNDCTL_SETNAME _IOW ('Y', 3, oss_longname_t)
+#define SNDCTL_SETLABEL _IOW ('Y', 4, oss_label_t)
+#define SNDCTL_GETLABEL _IOR ('Y', 4, oss_label_t)
+
+#endif /* !_SYS_SOUNDCARD_H_ */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* Save the patch and take the last browser's dir in order to re-use it the
next time. */
- m::conf::samplePath = u::fs::dirname(fname);
+ m::conf::conf.samplePath = u::fs::dirname(fname);
int res = m::mh::loadChannel(channelId, fname);
if (res != G_RES_OK)
/* -------------------------------------------------------------------------- */
-void addChannel(ID columnId, ChannelType type, int size)
+void addChannel(ID columnId, ChannelType type)
{
m::mh::addChannel(type, columnId);
}
m::model::onSwap(m::model::channels, channelId, [&](m::Channel& ch)
{
+ if (!ch.hasActions)
+ return;
if (ch.readActions || (!ch.readActions && ch.recStatus == ChannelStatus::WAIT))
- ch.stopReadingActions(m::clock::isRunning(), m::conf::treatRecsAsLoops,
- m::conf::recsStopOnChanHalt);
+ ch.stopReadingActions(m::clock::isRunning(), m::conf::conf.treatRecsAsLoops,
+ m::conf::conf.recsStopOnChanHalt);
else
- ch.startReadingActions(m::conf::treatRecsAsLoops, m::conf::recsStopOnChanHalt);
+ ch.startReadingActions(m::conf::conf.treatRecsAsLoops, m::conf::conf.recsStopOnChanHalt);
});
}
{
m::model::onSwap(m::model::channels, channelId, [&](m::Channel& ch)
{
- ch.startReadingActions(m::conf::treatRecsAsLoops, m::conf::recsStopOnChanHalt);
+ ch.startReadingActions(m::conf::conf.treatRecsAsLoops, m::conf::conf.recsStopOnChanHalt);
});
}
{
m::model::onSwap(m::model::channels, channelId, [&](m::Channel& ch)
{
- ch.stopReadingActions(m::clock::isRunning(), m::conf::treatRecsAsLoops,
- m::conf::recsStopOnChanHalt);
+ ch.stopReadingActions(m::clock::isRunning(), m::conf::conf.treatRecsAsLoops,
+ m::conf::conf.recsStopOnChanHalt);
});
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* addChannel
Adds an empty new channel to the stack. */
-void addChannel(ID columnId, ChannelType type, int size);
+void addChannel(ID columnId, ChannelType type);
/* loadChannel
Fills an existing channel with a wave. */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace c {
namespace io
{
+namespace
+{
+void refreshMidiWindows_()
+{
+ Fl::lock();
+ u::gui::refreshSubWindow(WID_MIDI_INPUT);
+ u::gui::refreshSubWindow(WID_MIDI_OUTPUT);
+ Fl::unlock();
+}
+} // {anonymous}
+
+/* -------------------------------------------------------------------------- */
+/* -------------------------------------------------------------------------- */
+/* -------------------------------------------------------------------------- */
+
+
void keyPress(ID channelId, bool ctrl, bool shift, int velocity)
{
if (ctrl)
/* -------------------------------------------------------------------------- */
-void midiLearn(m::MidiEvent e, std::atomic<uint32_t>& param, ID channelId)
+void startChannelMidiLearn(int param, ID channelId)
{
- /* No MIDI learning if we are learning a Channel (channelId != 0) and
- the selected MIDI channel is filtered OR if we are learning a global
- parameter (channel == 0) and the selected MIDI channel is filtered. */
-
- if (channelId == 0) {
- if (!m::conf::isMidiInAllowed(e.getChannel()))
- return;
- }
- else {
- m::model::ChannelsLock l(m::model::channels);
- if (!m::model::get(m::model::channels, channelId).isMidiInAllowed(e.getChannel()))
- return;
- }
-
- param.store(e.getRawNoVelocity());
- m::midiDispatcher::stopMidiLearn();
+ m::midiDispatcher::startChannelLearn(param, channelId, refreshMidiWindows_);
+}
- Fl::lock();
- u::gui::refreshSubWindow(WID_MIDI_INPUT);
- u::gui::refreshSubWindow(WID_MIDI_OUTPUT);
- Fl::unlock();
+
+void startMasterMidiLearn(int param)
+{
+ m::midiDispatcher::startMasterLearn(param, refreshMidiWindows_);
}
+
+
+#ifdef WITH_VST
+
+void startPluginMidiLearn(int paramIndex, ID pluginId)
+{
+ m::midiDispatcher::startPluginLearn(paramIndex, pluginId, refreshMidiWindows_);
+}
+
+#endif
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void stopMidiLearn()
+{
+ m::midiDispatcher::stopLearn();
+ refreshMidiWindows_();
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void clearChannelMidiLearn(int param, ID channelId)
+{
+ m::midiDispatcher::clearChannelLearn(param, channelId, refreshMidiWindows_);
+}
+
+
+void clearMasterMidiLearn (int param)
+{
+ m::midiDispatcher::clearMasterLearn(param, refreshMidiWindows_);
+}
+
+
+#ifdef WITH_VST
+
+void clearPluginMidiLearn (int param, ID pluginId)
+{
+ m::midiDispatcher::clearPluginLearn(param, pluginId, refreshMidiWindows_);
+}
+
+#endif
}}} // giada::c::io::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <atomic>
#include "core/types.h"
#include "core/midiEvent.h"
+#include "core/model/model.h"
namespace giada {
void setSampleChannelKey(ID channelId, int k);
-void midiLearn(m::MidiEvent e, std::atomic<uint32_t>& param, ID channelId);
+void startChannelMidiLearn(int param, ID channelId);
+void startMasterMidiLearn (int param);
+void stopMidiLearn();
+void clearChannelMidiLearn(int param, ID channelId);
+void clearMasterMidiLearn (int param);
+#ifdef WITH_VST
+void startPluginMidiLearn (int paramIndex, ID pluginId);
+void clearPluginMidiLearn (int param, ID pluginId);
+#endif
}}} // giada::c::io::
#endif
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
void toggleInputRec()
{
- if (!m::recManager::toggleInputRec(static_cast<RecTriggerMode>(m::conf::recTriggerMode)))
+ if (!m::recManager::toggleInputRec(static_cast<RecTriggerMode>(m::conf::conf.recTriggerMode)))
v::gdAlert("No channels armed/available for audio recording.");
}
void startActionRec()
{
- m::recManager::startActionRec(static_cast<RecTriggerMode>(m::conf::recTriggerMode));
+ m::recManager::startActionRec(static_cast<RecTriggerMode>(m::conf::conf.recTriggerMode));
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
return;
}
- if (!m::conf::pluginPath.empty() && m::conf::pluginPath.back() != ';')
- m::conf::pluginPath += ";";
- m::conf::pluginPath += browser->getCurrentPath();
+ if (!m::conf::conf.pluginPath.empty() && m::conf::conf.pluginPath.back() != ';')
+ m::conf::conf.pluginPath += ";";
+ m::conf::conf.pluginPath += browser->getCurrentPath();
browser->do_callback();
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
m::model::onGet(m::model::channels, channelId, [&](m::Channel& c)
{
- static_cast<m::SampleChannel&>(c).trackerPreview.store(f);
+ static_cast<m::SampleChannel&>(c).trackerPreview = f;
});
getSampleEditorWindow()->refresh();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <cassert>
#include "core/model/model.h"
+#include "core/model/storage.h"
#include "core/channels/channel.h"
#include "core/channels/sampleChannel.h"
#include "core/channels/midiChannel.h"
#include "utils/log.h"
#include "utils/string.h"
#include "utils/fs.h"
+#include "gui/model.h"
#include "gui/elems/basics/progress.h"
#include "gui/elems/mainWindow/keyboard/column.h"
#include "gui/elems/mainWindow/keyboard/keyboard.h"
/* -------------------------------------------------------------------------- */
-bool savePatch_(const std::string& path, const std::string& name, bool isProject)
+bool savePatch_(const std::string& path, const std::string& name)
{
- if (!m::patch::write(name, path, isProject))
+ m::patch::init();
+ m::patch::patch.name = name;
+ m::model::store(m::patch::patch);
+ v::model::store(m::patch::patch);
+
+ if (!m::patch::write(name, path))
return false;
+
u::gui::updateMainWinLabel(name);
- m::conf::patchPath = isProject ? u::fs::getUpDir(u::fs::getUpDir(path)) : u::fs::dirname(path);
- m::patch::name = name;
+ m::conf::conf.patchPath = u::fs::getUpDir(u::fs::getUpDir(path));
u::log::print("[savePatch] patch saved as %s\n", path.c_str());
+
return true;
}
/* -------------------------------------------------------------------------- */
-void savePatch(void* data)
-{
- v::gdBrowserSave* browser = (v::gdBrowserSave*) data;
- std::string name = u::fs::stripExt(browser->getName());
- std::string fullPath = browser->getCurrentPath() + G_SLASH + name + ".gptc";
-
- if (name == "") {
- v::gdAlert("Please choose a file name.");
- return;
- }
-
- if (u::fs::fileExists(fullPath))
- if (!v::gdConfirmWin("Warning", "File exists: overwrite?"))
- return;
-
- if (savePatch_(fullPath, name, /*isProject=*/false))
- browser->do_callback();
- else
- v::gdAlert("Unable to save the patch!");
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void loadPatch(void* data)
+void loadProject(void* data)
{
v::gdBrowserLoad* browser = (v::gdBrowserLoad*) data;
std::string fullPath = browser->getSelectedItem();
browser->showStatusBar();
- u::log::print("[glue] loading %s...\n", fullPath.c_str());
+ u::log::print("[loadProject] load from %s\n", fullPath.c_str());
std::string fileToLoad = fullPath; // patch file to read from
std::string basePath = ""; // base path, in case of reading from a project
basePath = fullPath + G_SLASH;
}
- /* Verify that the patch file is valid first. */
+ /* Read the patch from file. */
- int ver = m::patch::verify(fileToLoad);
- if (ver != G_PATCH_OK) {
- if (ver == G_PATCH_UNREADABLE)
+ m::patch::init();
+ int res = m::patch::read(fileToLoad, basePath);
+ if (res != G_PATCH_OK) {
+ if (res == G_PATCH_UNREADABLE)
v::gdAlert("This patch is unreadable.");
else
- if (ver == G_PATCH_INVALID)
+ if (res == G_PATCH_INVALID)
v::gdAlert("This patch is not valid.");
else
- if (ver == G_PATCH_UNSUPPORTED)
+ if (res == G_PATCH_UNSUPPORTED)
v::gdAlert("This patch format is no longer supported.");
browser->hideStatusBar();
return;
- }
+ }
- /* Then reset the system and read the patch. */
+ if (!isProject)
+ v::gdAlert("Support for raw patches is deprecated\nand will be removed soon!");
- m::init::reset();
+ /* Then reset the system (it disables mixer) and fill the model. */
- if (m::patch::read(fileToLoad, basePath) != G_PATCH_OK) {
- v::gdAlert("This patch is unreadable.");
- m::mixer::enable();
- return;
- }
+ m::init::reset();
+ m::model::load(m::patch::patch);
+ v::model::load(m::patch::patch);
- /* Prepare Mixer and Recorder. The latter has to recompute the actions
- positions if the current samplerate != patch samplerate. */
+ /* Prepare the engine. Recorder has to recompute the actions positions if
+ the current samplerate != patch samplerate. Clock needs to update frames
+ in sequencer. */
+ m::mixer::allocVirtualInput(m::clock::getFramesInLoop());
m::mh::updateSoloCount();
- m::recorderHandler::updateSamplerate(m::conf::samplerate, m::patch::samplerate);
-
- /* Save patchPath by taking the last dir of the broswer, in order to reuse
- it the next time. */
-
- m::conf::patchPath = u::fs::dirname(fullPath);
+ m::recorderHandler::updateSamplerate(m::conf::conf.samplerate, m::patch::patch.samplerate);
+ m::clock::recomputeFrames();
/* Mixer is ready to go back online. */
m::mixer::enable();
- /* Update Main Window's title. */
-
- u::gui::updateMainWinLabel(m::patch::name);
+ /* Utilities and cosmetics. Save patchPath by taking the last dir of the
+ broswer, in order to reuse it the next time. Also update UI. */
- u::log::print("[glue] patch loaded successfully\n");
+ m::conf::conf.patchPath = u::fs::dirname(fullPath);
+ u::gui::updateMainWinLabel(m::patch::patch.name);
#ifdef WITH_VST
saveWavesToProject_(fullPath);
- if (savePatch_(gptcPath, name, /*isProject=*/true))
+ if (savePatch_(gptcPath, name))
browser->do_callback();
else
v::gdAlert("Unable to save the project!");
int res = c::channel::loadChannel(browser->getChannelId(), fullPath);
if (res == G_RES_OK) {
- m::conf::samplePath = u::fs::dirname(fullPath);
+ m::conf::conf.samplePath = u::fs::dirname(fullPath);
browser->do_callback();
G_MainWin->delSubWindow(WID_SAMPLE_EDITOR); // if editor is open
}
/* Update last used path in conf, so that it can be reused next time. */
- m::conf::samplePath = u::fs::dirname(filePath);
+ m::conf::conf.samplePath = u::fs::dirname(filePath);
/* Update logical and edited states in Wave. */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace c {
namespace storage
{
-void loadPatch (void* data);
-void savePatch (void* data);
+void loadProject(void* data);
void saveProject(void* data);
void saveSample (void* data);
void loadSample (void* data);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <FL/Fl_Pixmap.H>
#include <FL/fl_draw.H>
-#include <jansson.h>
#include "core/conf.h"
#include "core/const.h"
#include "core/graphics.h"
#endif
#include "utils/gui.h"
#include "utils/string.h"
-#include "utils/ver.h"
#include "gui/elems/basics/button.h"
#include "gui/elems/basics/box.h"
#include "about.h"
{
gdAbout::gdAbout()
#ifdef WITH_VST
-: gdWindow(340, 435, "About Giada")
+: gdWindow(340, 415, "About Giada")
#else
-: gdWindow(340, 350, "About Giada")
+: gdWindow(340, 330, "About Giada")
#endif
{
- if (m::conf::aboutX)
- resize(m::conf::aboutX, m::conf::aboutY, w(), h());
-
set_modal();
logo = new geBox(8, 20, 324, 86);
- text = new geBox(8, 120, 324, 145);
+ text = new geBox(8, 120, 324, 140);
close = new geButton(252, h()-28, 80, 20, "Close");
#ifdef WITH_VST
vstLogo = new geBox(8, 265, 324, 50);
logo->image(new Fl_Pixmap(giada_logo_xpm));
text->align(FL_ALIGN_CENTER | FL_ALIGN_INSIDE | FL_ALIGN_TOP);
-
- std::string message = u::string::format(
- "Version %s (" BUILD_DATE ")\n\n"
- "Developed by Monocasual Laboratories\n"
- "Based on FLTK (%d.%d.%d), RtAudio (%s),\n"
- "RtMidi (%s), Libsamplerate, Jansson (%s),\n"
- "Libsndfile (%s)"
-#ifdef WITH_VST
- ", JUCE (%d.%d.%d)\n\n"
-#else
- "\n\n"
-#endif
+ text->copy_label(std::string(
+ "Version " + std::string(G_VERSION_STR) + " (" BUILD_DATE ")\n\n"
+ "Developed by Monocasual Laboratories\n\n"
"Released under the terms of the GNU General\n"
"Public License (GPL v3)\n\n"
"News, infos, contacts and documentation:\n"
- "www.giadamusic.com",
- G_VERSION_STR, FL_MAJOR_VERSION, FL_MINOR_VERSION, FL_PATCH_VERSION,
- u::ver::getRtAudioVersion().c_str(),
- u::ver::getRtMidiVersion().c_str(),
- JANSSON_VERSION, u::ver::getLibsndfileVersion().c_str()
-#ifdef WITH_VST
- , JUCE_MAJOR_VERSION, JUCE_MINOR_VERSION, JUCE_BUILDNUMBER
-#endif
- );
-
- int tw = 0;
- int th = 0;
- fl_measure(message.c_str(), tw, th);
- text->copy_label(message.c_str());
- text->size(text->w(), th);
+ "www.giadamusic.com").c_str());
#ifdef WITH_VST
+
vstLogo->image(new Fl_Pixmap(vstLogo_xpm));
- vstLogo->position(vstLogo->x(), text->y()+text->h()+8);
+ vstLogo->position(vstLogo->x(), text->y() + text->h() + 8);
vstText->label(
"VST Plug-In Technology by Steinberg\n"
"VST is a trademark of Steinberg\nMedia Technologies GmbH"
/* -------------------------------------------------------------------------- */
-gdAbout::~gdAbout()
-{
- m::conf::aboutX = x();
- m::conf::aboutY = y();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
void gdAbout::cb_close(Fl_Widget* w, void* p) { ((gdAbout*)p)->cb_close(); }
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
gdAbout();
- ~gdAbout();
static void cb_close(Fl_Widget* w, void* p);
inline void cb_close();
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
using namespace giada::m;
- if (conf::actionEditorW) {
- resize(conf::actionEditorX, conf::actionEditorY, conf::actionEditorW, conf::actionEditorH);
- ratio = conf::actionEditorZoom;
+ if (conf::conf.actionEditorW) {
+ resize(conf::conf.actionEditorX, conf::conf.actionEditorY,
+ conf::conf.actionEditorW, conf::conf.actionEditorH);
+ ratio = conf::conf.actionEditorZoom;
}
}
{
using namespace giada::m;
- conf::actionEditorX = x();
- conf::actionEditorY = y();
- conf::actionEditorW = w();
- conf::actionEditorH = h();
- conf::actionEditorZoom = ratio;
+ conf::conf.actionEditorX = x();
+ conf::conf.actionEditorY = y();
+ conf::conf.actionEditorW = w();
+ conf::conf.actionEditorH = h();
+ conf::conf.actionEditorZoom = ratio;
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
viewport = new geScroll(G_GUI_OUTER_MARGIN, upperArea->y()+upperArea->h()+G_GUI_OUTER_MARGIN, w()-16, h()-44);
m_ne = new geNoteEditor(viewport->x(), viewport->y(), this);
- m_ner = new geResizerBar(m_ne->x(), m_ne->y()+m_ne->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H);
+ m_ner = new geResizerBar(m_ne->x(), m_ne->y()+m_ne->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H, geResizerBar::VERTICAL);
viewport->add(m_ne);
viewport->add(m_ner);
m_ve = new geVelocityEditor(viewport->x(), m_ne->y()+m_ne->h()+RESIZER_BAR_H);
- m_ver = new geResizerBar(m_ve->x(), m_ve->y()+m_ve->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H);
+ m_ver = new geResizerBar(m_ve->x(), m_ve->y()+m_ve->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H, geResizerBar::VERTICAL);
viewport->add(m_ve);
viewport->add(m_ver);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
viewport = new geScroll(8, 36, w()-16, h()-44);
m_ae = new geSampleActionEditor(viewport->x(), viewport->y());
- m_aer = new geResizerBar(m_ae->x(), m_ae->y()+m_ae->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H);
+ m_aer = new geResizerBar(m_ae->x(), m_ae->y()+m_ae->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H, geResizerBar::VERTICAL);
viewport->add(m_ae);
viewport->add(m_aer);
m_ee = new geEnvelopeEditor(viewport->x(), m_ae->y()+m_ae->h()+RESIZER_BAR_H, "volume");
- m_eer = new geResizerBar(m_ee->x(), m_ee->y()+m_ee->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H);
+ m_eer = new geResizerBar(m_ee->x(), m_ee->y()+m_ee->h(), viewport->w(), RESIZER_BAR_H, MIN_WIDGET_H, geResizerBar::VERTICAL);
viewport->add(m_ee);
viewport->add(m_eer);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
gdBeatsInput::gdBeatsInput()
-: gdWindow(180, 36, "Beats")
+: gdWindow(u::gui::centerWindowX(180), u::gui::centerWindowY(36), 180, 36, "Beats")
{
- if (m::conf::beatsX)
- resize(m::conf::beatsX, m::conf::beatsY, w(), h());
-
set_modal();
beats = new geInput(8, 8, 43, G_GUI_UNIT);
/* -------------------------------------------------------------------------- */
-gdBeatsInput::~gdBeatsInput()
-{
- m::conf::beatsX = x();
- m::conf::beatsY = y();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
void gdBeatsInput::cb_update(Fl_Widget* w, void* p) { ((gdBeatsInput*)p)->cb_update(); }
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
gdBeatsInput();
- ~gdBeatsInput();
private:
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
gdBpmInput::gdBpmInput(const char* label)
-: gdWindow(144, 36, "Bpm")
+: gdWindow(u::gui::centerWindowX(144), u::gui::centerWindowY(36), 144, 36, "Bpm")
{
- if (m::conf::bpmX)
- resize(m::conf::bpmX, m::conf::bpmY, w(), h());
-
set_modal();
input_a = new geInput(8, 8, 30, G_GUI_UNIT);
/* -------------------------------------------------------------------------- */
-gdBpmInput::~gdBpmInput()
-{
- m::conf::bpmX = x();
- m::conf::bpmY = y();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
void gdBpmInput::cb_update(Fl_Widget* w, void* p) { ((gdBpmInput*)p)->cb_update(); }
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
gdBpmInput(const char* label); // pointer to mainWin->timing->bpm->label()
- ~gdBpmInput();
private:
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
gdBrowserBase::gdBrowserBase(const std::string& title, const std::string& path,
std::function<void(void*)> callback, ID channelId)
-: gdWindow (m::conf::browserX, m::conf::browserY, m::conf::browserW,
- m::conf::browserH, title.c_str()),
+: gdWindow (m::conf::conf.browserX, m::conf::conf.browserY, m::conf::conf.browserW,
+ m::conf::conf.browserH, title.c_str()),
m_callback (callback),
m_channelId(channelId)
{
browser = new geBrowser(8, groupTop->y()+groupTop->h()+8, w()-16, h()-101);
browser->loadDir(path);
- if (path == m::conf::browserLastPath)
- browser->preselect(m::conf::browserPosition, m::conf::browserLastValue);
+ if (path == m::conf::conf.browserLastPath)
+ browser->preselect(m::conf::conf.browserPosition, m::conf::conf.browserLastValue);
Fl_Group* groupButtons = new Fl_Group(8, browser->y()+browser->h()+8, w()-16, 20);
ok = new geButton(w()-88, groupButtons->y(), 80, 20);
gdBrowserBase::~gdBrowserBase()
{
- m::conf::browserX = x();
- m::conf::browserY = y();
- m::conf::browserW = w();
- m::conf::browserH = h();
- m::conf::browserPosition = browser->position();
- m::conf::browserLastPath = browser->getCurrentDir();
- m::conf::browserLastValue = browser->value();
+ m::conf::conf.browserX = x();
+ m::conf::conf.browserY = y();
+ m::conf::conf.browserW = w();
+ m::conf::conf.browserH = h();
+ m::conf::conf.browserPosition = browser->position();
+ m::conf::conf.browserLastPath = browser->getCurrentDir();
+ m::conf::conf.browserLastValue = browser->value();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
gdChannelNameInput::gdChannelNameInput(ID channelId)
-: gdWindow (400, 64, "New channel name"),
+: gdWindow (u::gui::centerWindowX(400), u::gui::centerWindowY(64), 400, 64, "New channel name"),
m_channelId(channelId)
{
- if (m::conf::nameX)
- resize(m::conf::nameX, m::conf::nameY, w(), h());
-
set_modal();
m_name = new geInput(G_GUI_OUTER_MARGIN, G_GUI_OUTER_MARGIN, w() - (G_GUI_OUTER_MARGIN * 2), G_GUI_UNIT);
/* -------------------------------------------------------------------------- */
-gdChannelNameInput::~gdChannelNameInput()
-{
- m::conf::nameX = x();
- m::conf::nameY = y();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
void gdChannelNameInput::cb_update(Fl_Widget* w, void* p) { ((gdChannelNameInput*)p)->cb_update(); }
void gdChannelNameInput::cb_cancel(Fl_Widget* w, void* p) { ((gdChannelNameInput*)p)->cb_cancel(); }
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
gdChannelNameInput(ID channelId);
- ~gdChannelNameInput();
private:
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace giada {
namespace v
{
-gdConfig::gdConfig(int w, int h) : gdWindow(w, h, "Configuration")
+gdConfig::gdConfig(int w, int h)
+: gdWindow(u::gui::centerWindowX(w), u::gui::centerWindowY(h), w, h, "Configuration")
{
- if (m::conf::configX)
- resize(m::conf::configX, m::conf::configY, this->w(), this->h());
-
Fl_Tabs* tabs = new Fl_Tabs(8, 8, w-16, h-44);
tabs->box(G_CUSTOM_BORDER_BOX);
tabs->labelcolor(G_COLOR_LIGHT_2);
/* -------------------------------------------------------------------------- */
-gdConfig::~gdConfig()
-{
- m::conf::configX = x();
- m::conf::configY = y();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
void gdConfig::cb_save_config(Fl_Widget* w, void* p) { ((gdConfig*)p)->cb_save_config(); }
void gdConfig::cb_cancel (Fl_Widget* w, void* p) { ((gdConfig*)p)->cb_cancel(); }
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
gdConfig(int w, int h);
- ~gdConfig();
#ifdef WITH_VST
void refreshVstPath();
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
body = "Device name: " + m::kernelAudio::getDeviceName(dev) + "\n";
body += "Total output(s): " + std::to_string(m::kernelAudio::getMaxOutChans(dev)) + "\n";
- body += "Total intput(s): " + std::to_string(m::kernelAudio::getMaxInChans(dev)) + "\n";
+ body += "Total input(s): " + std::to_string(m::kernelAudio::getMaxInChans(dev)) + "\n";
body += "Duplex channel(s): " + std::to_string(m::kernelAudio::getDuplexChans(dev)) + "\n";
body += "Default output: " + std::string(m::kernelAudio::isDefaultOut(dev) ? "yes" : "no") + "\n";
body += "Default input: " + std::string(m::kernelAudio::isDefaultIn(dev) ? "yes" : "no") + "\n";
u::gui::setFavicon(this);
show();
}
-}} // giada::v::
\ No newline at end of file
+}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
Fl::visible_focus(0);
- Fl::background(25, 25, 25);
+ Fl::background(25, 25, 25); // TODO use G_COLOR_GREY_1
Fl::set_boxtype(G_CUSTOM_BORDER_BOX, g_customBorderBox, 1, 1, 2, 2);
Fl::set_boxtype(G_CUSTOM_UP_BOX, g_customUpBox, 1, 1, 2, 2);
size_range(G_MIN_GUI_WIDTH, G_MIN_GUI_HEIGHT);
mainMenu = new v::geMainMenu(8, 0);
+#if defined(WITH_VST)
mainIO = new v::geMainIO(408, 8);
+#else
+ mainIO = new v::geMainIO(476, 8);
+#endif
mainTransport = new v::geMainTransport(8, 39);
mainTimer = new v::geMainTimer(598, 44);
beatMeter = new v::geBeatMeter(100, 83, 609, 20);
gdMainWindow::~gdMainWindow()
{
- m::conf::mainWindowX = x();
- m::conf::mainWindowY = y();
- m::conf::mainWindowW = w();
- m::conf::mainWindowH = h();
+ m::conf::conf.mainWindowX = x();
+ m::conf::conf.mainWindowY = y();
+ m::conf::conf.mainWindowW = w();
+ m::conf::conf.mainWindowH = h();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include "window.h"
-class Fl_Widget;
-
-
namespace giada {
namespace v
{
void refresh() override;
void rebuild() override;
+ /* clearKeyboard
+ Resets Keyboard to initial state, with no columns. */
+
void clearKeyboard();
geKeyboard* keyboard;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
-#include "core/channels/channel.h"
-#include "core/midiDispatcher.h"
+#include "glue/io.h"
#include "core/conf.h"
-#include "utils/log.h"
-#include "gui/elems/midiLearner.h"
#include "midiInputBase.h"
gdMidiInputBase::~gdMidiInputBase()
{
- m::midiDispatcher::stopMidiLearn();
+ c::io::stopMidiLearn();
- m::conf::midiInputX = x();
- m::conf::midiInputY = y();
- m::conf::midiInputW = w();
- m::conf::midiInputH = h();
+ m::conf::conf.midiInputX = x();
+ m::conf::conf.midiInputY = y();
+ m::conf::conf.midiInputW = w();
+ m::conf::conf.midiInputH = h();
}
void gdMidiInputBase::refresh()
{
- for (geMidiLearner* l : m_learners)
+ for (geMidiLearnerBase* l : m_learners)
l->refresh();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include "gui/dialogs/window.h"
-#include "gui/elems/midiLearner.h"
+#include "gui/elems/midiIO/midiLearnerBase.h"
class geButton;
{
public:
- gdMidiInputBase(int x, int y, int w, int h, const char* title);
- ~gdMidiInputBase();
+ virtual ~gdMidiInputBase();
void refresh() override;
protected:
+ gdMidiInputBase(int x, int y, int w, int h, const char* title);
+
static const int LEARNER_WIDTH = 284;
static void cb_close(Fl_Widget* w, void* p);
void cb_close();
- std::vector<geMidiLearner*> m_learners;
+ std::vector<geMidiLearnerBase*> m_learners;
geButton* m_ok;
geCheck* m_enable;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include "core/const.h"
#include "core/conf.h"
#ifdef WITH_VST
-#include "core/pluginHost.h"
#include "core/plugin.h"
#endif
#include "utils/string.h"
-#include "gui/elems/midiLearner.h"
+#include "gui/elems/midiIO/midiLearnerChannel.h"
+#include "gui/elems/midiIO/midiLearnerPlugin.h"
#include "gui/elems/basics/scroll.h"
#include "gui/elems/basics/box.h"
#include "gui/elems/basics/button.h"
namespace v
{
gdMidiInputChannel::gdMidiInputChannel(ID channelId)
-: gdMidiInputBase(m::conf::midiInputX, m::conf::midiInputY, m::conf::midiInputW,
- m::conf::midiInputH, "MIDI Input Setup"),
+: gdMidiInputBase(m::conf::conf.midiInputX, m::conf::conf.midiInputY, m::conf::conf.midiInputW,
+ m::conf::conf.midiInputH, "MIDI Input Setup"),
m_channelId (channelId)
{
m::model::ChannelsLock l(m::model::channels);
m_container->end();
+ for (auto* l : m_learners)
+ c.midiIn ? l->activate() : l->deactivate();
+
Fl_Group* groupButtons = new Fl_Group(8, m_container->y()+m_container->h()+8, m_container->w(), 20);
groupButtons->begin();
groupButtons->resizable(spacer);
groupButtons->end();
-
m_ok->callback(cb_close, (void*)this);
m_enable->value(c.midiIn);
end();
- size_range(G_DEFAULT_MIDI_INPUT_UI_W, G_DEFAULT_MIDI_INPUT_UI_H);
-
u::gui::setFavicon(this);
set_modal();
show();
void gdMidiInputChannel::addChannelLearners()
{
m::model::ChannelsLock l(m::model::channels);
- m::Channel& c = m::model::get(m::model::channels, m_channelId);
+ const m::Channel& c = m::model::get(m::model::channels, m_channelId);
Fl_Pack* pack = new Fl_Pack(m_container->x(), m_container->y(), LEARNER_WIDTH, 200);
pack->spacing(G_GUI_INNER_MARGIN);
pack->begin();
geBox* header = new geBox(0, 0, LEARNER_WIDTH, G_GUI_UNIT, "Channel");
header->box(FL_BORDER_BOX);
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "key press", c.midiInKeyPress, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "key release", c.midiInKeyRel, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "key kill", c.midiInKill, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "arm", c.midiInArm, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "mute", c.midiInMute, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "solo", c.midiInSolo, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "volume", c.midiInVolume, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "key press", G_MIDI_IN_KEYPRESS, c.midiInKeyPress, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "key release", G_MIDI_IN_KEYREL, c.midiInKeyRel, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "key kill", G_MIDI_IN_KILL, c.midiInKill, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "arm", G_MIDI_IN_ARM, c.midiInArm, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "mute", G_MIDI_IN_MUTE, c.midiInVolume, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "solo", G_MIDI_IN_SOLO, c.midiInMute, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "volume", G_MIDI_IN_VOLUME, c.midiInSolo, m_channelId));
if (c.type == ChannelType::SAMPLE) {
- m::SampleChannel& sc = static_cast<m::SampleChannel&>(c);
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "pitch", sc.midiInPitch, m_channelId));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "read actions", sc.midiInReadActions, m_channelId));
+ const m::SampleChannel& sc = static_cast<const m::SampleChannel&>(c);
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "pitch", G_MIDI_IN_PITCH, sc.midiInPitch, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(0, 0, LEARNER_WIDTH, "read actions", G_MIDI_IN_READ_ACTIONS, sc.midiInReadActions, m_channelId));
}
pack->end();
}
geBox* header = new geBox(0, 0, LEARNER_WIDTH, G_GUI_UNIT, p.getName().c_str());
header->box(FL_BORDER_BOX);
- for (int k=0; k<p.getNumParameters(); k++)
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH,
- p.getParameterName(k).c_str(), p.midiInParams.at(k), m_channelId));
+ for (int k = 0; k < p.getNumParameters(); k++)
+ m_learners.push_back(new geMidiLearnerPlugin(0, 0, LEARNER_WIDTH, p.getParameterName(k), k, p.midiInParams.at(k), p.id));
pack->end();
}
{
c.midiIn = m_enable->value();
});
+
m_enable->value() ? m_channel->activate() : m_channel->deactivate();
+
+ for (auto* l : m_learners)
+ m_enable->value() ? l->activate() : l->deactivate();
}
m::model::onSwap(m::model::channels, m_channelId, [&](m::Channel& c)
{
c.midiInFilter = m_channel->value() == 0 ? -1 : m_channel->value() - 1;
- u::log::print("[gdMidiInputChannel] Set MIDI channel to %d\n", c.midiInFilter.load());
+ u::log::print("[gdMidiInputChannel] Set MIDI channel to %d\n", c.midiInFilter);
});
}
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include "utils/gui.h"
#include "core/conf.h"
#include "core/const.h"
-#include "gui/elems/midiLearner.h"
+#include "core/model/model.h"
+#include "gui/elems/midiIO/midiLearnerMaster.h"
#include "gui/elems/basics/button.h"
#include "gui/elems/basics/check.h"
#include "gui/elems/basics/choice.h"
namespace v
{
gdMidiInputMaster::gdMidiInputMaster()
-: gdMidiInputBase(0, 0, 300, 284, "MIDI Input Setup (global)")
+: gdMidiInputBase(m::conf::conf.midiInputX, m::conf::conf.midiInputY, 300, 284, "MIDI Input Setup (global)")
{
set_modal();
+ m::model::midiIn.lock();
+ const m::model::MidiIn* midiIn = m::model::midiIn.get();
+
Fl_Group* groupHeader = new Fl_Group(G_GUI_OUTER_MARGIN, G_GUI_OUTER_MARGIN, w(), 20);
groupHeader->begin();
m_enable = new geCheck(G_GUI_OUTER_MARGIN, G_GUI_OUTER_MARGIN, 120, G_GUI_UNIT,
- "m_enable MIDI input");
+ "Enable MIDI input");
m_channel = new geChoice(m_enable->x()+m_enable->w()+44, G_GUI_OUTER_MARGIN, 120, G_GUI_UNIT);
groupHeader->resizable(nullptr);
LEARNER_WIDTH, 212);
pack->spacing(G_GUI_INNER_MARGIN);
pack->begin();
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "rewind", m::conf::midiInRewind, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "play/stop", m::conf::midiInStartStop, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "action recording", m::conf::midiInActionRec, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "input recording", m::conf::midiInInputRec, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "metronome", m::conf::midiInMetronome, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "input volume", m::conf::midiInVolumeIn, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "output volume", m::conf::midiInVolumeOut, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "sequencer ×2", m::conf::midiInBeatDouble, 0));
- m_learners.push_back(new geMidiLearner(0, 0, LEARNER_WIDTH, "sequencer ÷2", m::conf::midiInBeatHalf, 0));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "rewind", G_MIDI_IN_REWIND, midiIn->rewind));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "play/stop", G_MIDI_IN_START_STOP, midiIn->startStop));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "action recording", G_MIDI_IN_ACTION_REC, midiIn->actionRec));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "input recording", G_MIDI_IN_INPUT_REC, midiIn->inputRec));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "metronome", G_MIDI_IN_METRONOME, midiIn->volumeIn));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "input volume", G_MIDI_IN_VOLUME_IN, midiIn->volumeOut));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "output volume", G_MIDI_IN_VOLUME_OUT, midiIn->beatDouble));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "sequencer ×2", G_MIDI_IN_BEAT_DOUBLE, midiIn->beatHalf));
+ m_learners.push_back(new geMidiLearnerMaster(0, 0, LEARNER_WIDTH, "sequencer ÷2", G_MIDI_IN_BEAT_HALF, midiIn->metronome));
pack->end();
-
m_ok = new geButton(w()-88, pack->y()+pack->h()+G_GUI_OUTER_MARGIN, 80, G_GUI_UNIT, "Close");
end();
+ for (geMidiLearnerBase* l : m_learners)
+ midiIn->enabled ? l->activate() : l->deactivate();
+
m_ok->callback(cb_close, (void*)this);
- m_enable->value(m::conf::midiIn);
+ m_enable->value(midiIn->enabled);
m_enable->callback(cb_enable, (void*)this);
m_channel->add("Channel (any)");
m_channel->add("Channel 14");
m_channel->add("Channel 15");
m_channel->add("Channel 16");
- m_channel->value(m::conf::midiInFilter -1 ? 0 : m::conf::midiInFilter + 1);
+
+ m_channel->value(midiIn->filter - 1 ? 0 : midiIn->filter + 1);
m_channel->callback(cb_setChannel, (void*)this);
+ midiIn->enabled ? m_channel->activate() : m_channel->deactivate();
+
+ m::model::midiIn.unlock();
u::gui::setFavicon(this);
show();
void gdMidiInputMaster::cb_enable()
{
- m::conf::midiIn = m_enable->value();
+ m::model::onSwap(m::model::midiIn, [&](m::model::MidiIn& m)
+ {
+ m.enabled = m_enable->value();
+ });
+
m_enable->value() ? m_channel->activate() : m_channel->deactivate();
+
+ for (geMidiLearnerBase* l : m_learners)
+ m_enable->value() ? l->activate() : l->deactivate();
}
void gdMidiInputMaster::cb_setChannel()
{
- m::conf::midiInFilter = m_channel->value() == 0 ? -1 : m_channel->value() - 1;
+ m::model::onSwap(m::model::midiIn, [&](m::model::MidiIn& m)
+ {
+ m.filter = m_channel->value() == 0 ? -1 : m_channel->value() - 1;
+ });
}
-
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#define GD_MIDI_INPUT_MASTER_H
+#include "core/model/model.h"
#include "midiInputBase.h"
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
-#include "core/midiDispatcher.h"
-#include "utils/log.h"
-#include "utils/string.h"
-#include "gui/elems/midiLearner.h"
+#include "glue/io.h"
+#include "gui/elems/basics/check.h"
#include "midiOutputBase.h"
namespace giada {
namespace v
{
-gdMidiOutputBase::gdMidiOutputBase(int w, int h)
- : gdWindow(w, h, "Midi Output Setup")
+gdMidiOutputBase::gdMidiOutputBase(int w, int h, ID channelId)
+: gdWindow (w, h, "Midi Output Setup"),
+ m_channelId(channelId)
{
}
gdMidiOutputBase::~gdMidiOutputBase()
{
- m::midiDispatcher::stopMidiLearn();
+ c::io::stopMidiLearn();
}
void gdMidiOutputBase::refresh()
{
- for (geMidiLearner* l : m_learners)
+ for (geMidiLearnerBase* l : m_learners)
l->refresh();
}
/* -------------------------------------------------------------------------- */
-void gdMidiOutputBase::cb_close(Fl_Widget* w, void* p) { ((gdMidiOutputBase*)p)->cb_close(); }
+void gdMidiOutputBase::cb_close(Fl_Widget* w, void* p) { ((gdMidiOutputBase*)p)->cb_close(); }
+void gdMidiOutputBase::cb_enableLightning(Fl_Widget *w, void *p) { ((gdMidiOutputBase*)p)->cb_enableLightning(); }
/* -------------------------------------------------------------------------- */
/* -------------------------------------------------------------------------- */
-void gdMidiOutputBase::cb_enableLightning(Fl_Widget* w, void* p)
+void gdMidiOutputBase::cb_enableLightning()
{
- ((gdMidiOutputBase*)p)->cb_enableLightning();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
+ m::model::onSwap(m::model::channels, m_channelId, [&](m::Channel& c)
+ {
+ c.midiOutL = m_enableLightning->value();
+ });
-void gdMidiOutputBase::cb_enableLightning() {}
+ for (geMidiLearnerBase* l : m_learners)
+ m_enableLightning->value() ? l->activate() : l->deactivate();
+}
/* -------------------------------------------------------------------------- */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#define GD_MIDI_OUTPUT_BASE_H
-#include <FL/Fl.H>
+#include "core/types.h"
#include "gui/dialogs/window.h"
-#include "gui/elems/midiLearner.h"
+#include "gui/elems/midiIO/midiLearnerBase.h"
class geButton;
{
public:
- gdMidiOutputBase(int w, int h);
+ gdMidiOutputBase(int w, int h, ID channelId);
~gdMidiOutputBase();
void refresh() override;
static void cb_close(Fl_Widget* w, void* p);
void cb_close();
- /* cb_enableLightning
- enable MIDI lightning output. */
-
static void cb_enableLightning(Fl_Widget* w, void* p);
void cb_enableLightning();
void setTitle(int chanNum);
- std::vector<geMidiLearner*> m_learners;
+ ID m_channelId;
+
+ std::vector<geMidiLearnerBase*> m_learners;
geButton* m_close;
geCheck* m_enableLightning;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include "core/channels/midiChannel.h"
#include "core/model/model.h"
#include "utils/gui.h"
-#include "gui/elems/midiLearner.h"
+#include "gui/elems/midiIO/midiLearnerChannel.h"
#include "gui/elems/basics/button.h"
#include "gui/elems/basics/check.h"
#include "gui/elems/basics/choice.h"
-#include "gui/elems/mainWindow/keyboard/channel.h"
#include "midiOutputMidiCh.h"
namespace v
{
gdMidiOutputMidiCh::gdMidiOutputMidiCh(ID channelId)
-: gdMidiOutputBase(300, 168),
- m_channelId (channelId)
+: gdMidiOutputBase(300, 168, channelId)
{
m::model::ChannelsLock l(m::model::channels);
m::MidiChannel& c = static_cast<m::MidiChannel&>(m::model::get(m::model::channels, m_channelId));
m_chanListOut = new geChoice(w()-108, y()+8, 100, 20);
m_enableLightning = new geCheck(x()+8, m_chanListOut->y()+m_chanListOut->h()+8, 120, 20, "Enable MIDI lightning output");
- m_learners.push_back(new geMidiLearner(x()+8, m_enableLightning->y()+m_enableLightning->h()+8,
- w()-16, "playing", c.midiOutLplaying, m_channelId));
- m_learners.push_back(new geMidiLearner(x()+8, m_enableLightning->y()+m_enableLightning->h()+32,
- w()-16, "mute", c.midiOutLmute, m_channelId));
- m_learners.push_back(new geMidiLearner(x()+8, m_enableLightning->y()+m_enableLightning->h()+56,
- w()-16, "solo", c.midiOutLsolo, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(x()+8, m_enableLightning->y()+m_enableLightning->h()+8,
+ w()-16, "playing", G_MIDI_OUT_L_PLAYING, c.midiOutLplaying, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(x()+8, m_enableLightning->y()+m_enableLightning->h()+32,
+ w()-16, "mute", G_MIDI_OUT_L_MUTE, c.midiOutLmute, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(x()+8, m_enableLightning->y()+m_enableLightning->h()+56,
+ w()-16, "solo", G_MIDI_OUT_L_SOLO, c.midiOutLsolo, m_channelId));
m_close = new geButton(w()-88, m_enableLightning->y()+m_enableLightning->h()+84, 80, 20, "Close");
m_chanListOut->add("Channel 15");
m_chanListOut->add("Channel 16");
m_chanListOut->value(0);
-
+
if (c.midiOut)
m_enableOut->value(1);
else
m_chanListOut->deactivate();
- if (c.midiOutL)
- m_enableLightning->value(1);
+ m_enableLightning->value(c.midiOutL);
+ for (geMidiLearnerBase* l : m_learners)
+ c.midiOutL ? l->activate() : l->deactivate();
m_chanListOut->value(c.midiOutChan);
+ m_chanListOut->callback(cb_setChannel, (void*)this);
- m_enableOut->callback(cb_enableChanList, (void*)this);
+ m_enableOut->callback(cb_enableOut, (void*)this);
+ m_enableLightning->callback(cb_enableLightning, (void*)this);
m_close->callback(cb_close, (void*)this);
set_modal();
/* -------------------------------------------------------------------------- */
-void gdMidiOutputMidiCh::cb_close (Fl_Widget *w, void *p) { ((gdMidiOutputMidiCh*)p)->cb_close(); }
-void gdMidiOutputMidiCh::cb_enableChanList(Fl_Widget *w, void *p) { ((gdMidiOutputMidiCh*)p)->cb_enableChanList(); }
+void gdMidiOutputMidiCh::cb_enableOut (Fl_Widget *w, void *p) { ((gdMidiOutputMidiCh*)p)->cb_enableOut(); }
+void gdMidiOutputMidiCh::cb_setChannel(Fl_Widget *w, void *p) { ((gdMidiOutputMidiCh*)p)->cb_setChannel(); }
/* -------------------------------------------------------------------------- */
-void gdMidiOutputMidiCh::cb_enableChanList()
+void gdMidiOutputMidiCh::cb_enableOut()
{
+ m::model::onSwap(m::model::channels, m_channelId, [&](m::Channel& c)
+ {
+ static_cast<m::MidiChannel&>(c).midiOut = m_enableOut->value();
+ static_cast<m::MidiChannel&>(c).midiOutChan = m_chanListOut->value();
+ });
+
m_enableOut->value() ? m_chanListOut->activate() : m_chanListOut->deactivate();
}
/* -------------------------------------------------------------------------- */
-void gdMidiOutputMidiCh::cb_close()
+void gdMidiOutputMidiCh::cb_setChannel()
{
- m::model::onGet(m::model::channels, m_channelId, [&](m::Channel& c)
+ m::model::onSwap(m::model::channels, m_channelId, [&](m::Channel& c)
{
- m::MidiChannel& mc = static_cast<m::MidiChannel&>(c);
- mc.midiOut = m_enableOut->value();
- mc.midiOutChan = m_chanListOut->value();
- mc.midiOutL = m_enableLightning->value();
+ static_cast<m::MidiChannel&>(c).midiOutChan = m_chanListOut->value();
});
- do_callback();
}
-
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
private:
- static void cb_enableChanList(Fl_Widget* w, void* p);
- void cb_enableChanList();
-
- /* cb_close
- override parent method, we need to do more stuff on close. */
-
- static void cb_close(Fl_Widget* w, void* p);
- void cb_close();
+ static void cb_enableOut (Fl_Widget* w, void* p);
+ static void cb_setChannel(Fl_Widget* w, void* p);
+ void cb_enableOut();
+ void cb_setChannel();
geCheck* m_enableOut;
geChoice* m_chanListOut;
-
- ID m_channelId;
};
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include "core/model/model.h"
#include "core/channels/sampleChannel.h"
#include "utils/gui.h"
-#include "gui/elems/midiLearner.h"
+#include "gui/elems/midiIO/midiLearnerChannel.h"
#include "gui/elems/basics/button.h"
#include "gui/elems/basics/check.h"
#include "midiOutputSampleCh.h"
namespace v
{
gdMidiOutputSampleCh::gdMidiOutputSampleCh(ID channelId)
-: gdMidiOutputBase(300, 140),
- m_channelId (channelId)
+: gdMidiOutputBase(300, 140, channelId)
{
m::model::ChannelsLock l(m::model::channels);
m::Channel& c = m::model::get(m::model::channels, m_channelId);
setTitle(c.id);
m_enableLightning = new geCheck(8, 8, 120, 20, "Enable MIDI lightning output");
- m_learners.push_back(new geMidiLearner(8, m_enableLightning->y()+m_enableLightning->h()+8, w()-16, "playing",
- c.midiOutLplaying, m_channelId));
- m_learners.push_back(new geMidiLearner(8, m_enableLightning->y()+m_enableLightning->h()+32, w()-16, "mute",
- c.midiOutLmute, m_channelId));
- m_learners.push_back(new geMidiLearner(8, m_enableLightning->y()+m_enableLightning->h()+56, w()-16, "solo",
- c.midiOutLsolo, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(8, m_enableLightning->y()+m_enableLightning->h()+8, w()-16, "playing",
+ G_MIDI_OUT_L_PLAYING, c.midiOutLplaying, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(8, m_enableLightning->y()+m_enableLightning->h()+32, w()-16, "mute",
+ G_MIDI_OUT_L_MUTE, c.midiOutLmute, m_channelId));
+ m_learners.push_back(new geMidiLearnerChannel(8, m_enableLightning->y()+m_enableLightning->h()+56, w()-16, "solo",
+ G_MIDI_OUT_L_SOLO, c.midiOutLsolo, m_channelId));
m_close = new geButton(w()-88, m_enableLightning->y()+m_enableLightning->h()+84, 80, 20, "Close");
m_close->callback(cb_close, (void*)this);
m_enableLightning->value(c.midiOutL);
m_enableLightning->callback(cb_enableLightning, (void*)this);
+
+ for (geMidiLearnerBase* l : m_learners)
+ c.midiOutL ? l->activate() : l->deactivate();
set_modal();
u::gui::setFavicon(this);
show();
}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void gdMidiOutputSampleCh::cb_close(Fl_Widget* w, void* p) { ((gdMidiOutputSampleCh*)p)->cb_close(); }
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void gdMidiOutputSampleCh::cb_close()
-{
- m::model::onGet(m::model::channels, m_channelId, [&](m::Channel& c)
- {
- c.midiOutL = m_enableLightning->value();
- });
- do_callback();
-}
-
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
gdMidiOutputSampleCh(ID channelId);
-
-private:
-
- /* cb_close
- Override parent method, we need to do more stuff on close. */
-
- static void cb_close(Fl_Widget* w, void* p);
- void cb_close();
-
- ID m_channelId;
};
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
sortMethod->add("Category");
sortMethod->add("Manufacturer");
sortMethod->callback(cb_sort, (void*) this);
- sortMethod->value(m::conf::pluginSortMethod);
+ sortMethod->value(m::conf::conf.pluginSortMethod);
addBtn->callback(cb_add, (void*) this);
addBtn->shortcut(FL_Enter);
gdPluginChooser::~gdPluginChooser()
{
- m::conf::pluginChooserX = x();
- m::conf::pluginChooserY = y();
- m::conf::pluginChooserW = w();
- m::conf::pluginChooserH = h();
- m::conf::pluginSortMethod = sortMethod->value();
+ m::conf::conf.pluginChooserX = x();
+ m::conf::conf.pluginChooserY = y();
+ m::conf::conf.pluginChooserW = w();
+ m::conf::conf.pluginChooserH = h();
+ m::conf::conf.pluginSortMethod = sortMethod->value();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
gdPluginList::gdPluginList(ID chanID)
-: gdWindow(m::conf::pluginListX, m::conf::pluginListY, 468, 204),
+: gdWindow(m::conf::conf.pluginListX, m::conf::conf.pluginListY, 468, 204),
m_channelId(chanID)
{
end();
gdPluginList::~gdPluginList()
{
- m::conf::pluginListX = x();
- m::conf::pluginListY = y();
+ m::conf::conf.pluginListX = x();
+ m::conf::conf.pluginListY = y();
}
void gdPluginList::cb_addPlugin()
{
- int wx = m::conf::pluginChooserX;
- int wy = m::conf::pluginChooserY;
- int ww = m::conf::pluginChooserW;
- int wh = m::conf::pluginChooserH;
+ int wx = m::conf::conf.pluginChooserX;
+ int wy = m::conf::conf.pluginChooserY;
+ int ww = m::conf::conf.pluginChooserW;
+ int wh = m::conf::conf.pluginChooserH;
u::gui::openSubWindow(G_MainWin, new v::gdPluginChooser(wx, wy, ww, wh,
m_channelId), WID_FX_CHOOSER);
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
gdSampleEditor::gdSampleEditor(ID channelId, ID waveId)
-: gdWindow (m::conf::sampleEditorX, m::conf::sampleEditorY,
- m::conf::sampleEditorW, m::conf::sampleEditorH),
+: gdWindow (m::conf::conf.sampleEditorX, m::conf::conf.sampleEditorY,
+ m::conf::conf.sampleEditorW, m::conf::conf.sampleEditorH),
m_channelId(channelId),
m_waveId (waveId)
{
gdSampleEditor::~gdSampleEditor()
{
- m::conf::sampleEditorX = x();
- m::conf::sampleEditorY = y();
- m::conf::sampleEditorW = w();
- m::conf::sampleEditorH = h();
- m::conf::sampleEditorGridVal = atoi(grid->text());
- m::conf::sampleEditorGridOn = snap->value();
+ m::conf::conf.sampleEditorX = x();
+ m::conf::conf.sampleEditorY = y();
+ m::conf::conf.sampleEditorW = w();
+ m::conf::conf.sampleEditorH = h();
+ m::conf::conf.sampleEditorGridVal = atoi(grid->text());
+ m::conf::conf.sampleEditorGridOn = snap->value();
c::sampleEditor::setPreview(m_channelId, PreviewMode::NONE);
}
grid->add("16");
grid->add("32");
grid->add("64");
- if (m::conf::sampleEditorGridVal == 0)
+ if (m::conf::conf.sampleEditorGridVal == 0)
grid->value(0);
else
- grid->value(grid->find_item(u::string::iToString(m::conf::sampleEditorGridVal).c_str()));
+ grid->value(grid->find_item(u::string::iToString(m::conf::conf.sampleEditorGridVal).c_str()));
grid->callback(cb_changeGrid, (void*)this);
- snap->value(m::conf::sampleEditorGridOn);
+ snap->value(m::conf::conf.sampleEditorGridOn);
snap->callback(cb_enableSnap, (void*)this);
/* TODO - redraw grid if != (off) */
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
void dispatchChannels_(int event)
{
- G_MainWin->keyboard->forEachChannel([=](geChannel* c)
+ G_MainWin->keyboard->forEachChannel([=](geChannel& c)
{
- if (c->handleKey(event))
- perform_(c, event);
+ if (c.handleKey(event))
+ perform_(&c, event);
});
}
}
else if (Fl::event_key() == FL_Enter && !enter_) {
enter_ = true;
- m::recManager::toggleActionRec(static_cast<RecTriggerMode>(m::conf::recTriggerMode));
+ m::recManager::toggleActionRec(static_cast<RecTriggerMode>(m::conf::conf.recTriggerMode));
}
else if (Fl::event_key() == ' ' && !space_) {
space_ = true;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
geEnvelopeEditor::geEnvelopeEditor(Pixel x, Pixel y, const char* l)
-: geBaseActionEditor(x, y, 200, m::conf::envelopeEditorH)
+: geBaseActionEditor(x, y, 200, m::conf::conf.envelopeEditorH)
{
copy_label(l);
}
geEnvelopeEditor::~geEnvelopeEditor()
{
- m::conf::envelopeEditorH = h();
+ m::conf::conf.envelopeEditorH = h();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* ------------------------------------------------------------------------------
*
-* Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+* Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
active = new geCheck(gridType->x() + gridType->w() + 4, y, 20, 20);
- gridType->value(m::conf::actionEditorGridVal);
- active->value(m::conf::actionEditorGridOn);
+ gridType->value(m::conf::conf.actionEditorGridVal);
+ active->value(m::conf::conf.actionEditorGridOn);
end();
}
geGridTool::~geGridTool()
{
- m::conf::actionEditorGridVal = gridType->value();
- m::conf::actionEditorGridOn = active->value();
+ m::conf::conf.actionEditorGridVal = gridType->value();
+ m::conf::conf.actionEditorGridOn = active->value();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
pianoRoll = new gePianoRoll(x, y, m_base->fullWidth);
- size(m_base->fullWidth, m::conf::pianoRollH);
+ size(m_base->fullWidth, m::conf::conf.pianoRollH);
type(Fl_Scroll::VERTICAL_ALWAYS);
}
geNoteEditor::~geNoteEditor()
{
- m::conf::pianoRollH = h();
- m::conf::pianoRollY = pianoRoll->y();
+ m::conf::conf.pianoRollH = h();
+ m::conf::conf.pianoRollY = pianoRoll->y();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
: geBaseActionEditor(X, Y, W, 40),
pick (0)
{
- position(x(), m::conf::pianoRollY == -1 ? y()-(h()/2) : m::conf::pianoRollY);
+ position(x(), m::conf::conf.pianoRollY == -1 ? y()-(h()/2) : m::conf::conf.pianoRollY);
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
geSampleActionEditor::geSampleActionEditor(Pixel x, Pixel y)
-: geBaseActionEditor(x, y, 200, m::conf::sampleActionEditorH)
+: geBaseActionEditor(x, y, 200, m::conf::conf.sampleActionEditorH)
{
}
geSampleActionEditor::~geSampleActionEditor()
{
- m::conf::sampleActionEditorH = h();
+ m::conf::conf.sampleActionEditorH = h();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
geVelocityEditor::geVelocityEditor(Pixel x, Pixel y)
-: geBaseActionEditor(x, y, 200, m::conf::velocityEditorH)
+: geBaseActionEditor(x, y, 200, m::conf::conf.velocityEditorH)
{
}
geVelocityEditor::~geVelocityEditor()
{
- m::conf::velocityEditorH = h();
+ m::conf::conf.velocityEditorH = h();
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
geButton::geButton(int x, int y, int w, int h, const char* l,
- const char** imgOff, const char** imgOn)
+ const char** imgOff, const char** imgOn, const char** imgDisabled)
: geBaseButton(x, y, w, h, l),
imgOff (imgOff),
imgOn (imgOn),
+ imgDisabled (imgDisabled),
bgColor0 (G_COLOR_GREY_2),
bgColor1 (G_COLOR_GREY_4),
bdColor (G_COLOR_GREY_4),
void geButton::draw()
{
geBaseButton::draw();
-
- if (!active()) txtColor = bdColor;
- else txtColor = G_COLOR_LIGHT_2;
- fl_rect(x(), y(), w(), h(), bdColor); // borders
- if (value()) { // -- clicked
- if (imgOn != nullptr)
- fl_draw_pixmap(imgOn, x()+1, y()+1);
- else
- fl_rectf(x(), y(), w(), h(), bgColor1); // covers the border
+ if (active())
+ if (value()) draw(imgOn, bgColor1, txtColor);
+ else draw(imgOff, bgColor0, txtColor);
+ else
+ draw(imgDisabled, bgColor0, bdColor);
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geButton::draw(const char** img, Fl_Color bgColor, Fl_Color textColor)
+{
+ fl_rect(x(), y(), w(), h(), bdColor); // draw border
+
+ if (img != nullptr) {
+ fl_draw_pixmap(img, x()+1, y()+1);
}
- else { // -- not clicked
- fl_rectf(x()+1, y()+1, w()-2, h()-2, bgColor0); // bg inside the border
- if (imgOff != nullptr)
- fl_draw_pixmap(imgOff, x()+1, y()+1);
+ else {
+ fl_rectf(x()+1, y()+1, w()-2, h()-2, bgColor); // draw background
+ fl_color(textColor);
+ fl_font(FL_HELVETICA, G_GUI_FONT_SIZE_BASE);
+ fl_draw(label(), x()+2, y(), w()-2, h(), FL_ALIGN_CENTER);
}
- if (!active())
- fl_color(FL_INACTIVE_COLOR);
-
- fl_color(txtColor);
- fl_font(FL_HELVETICA, G_GUI_FONT_SIZE_BASE);
- fl_draw(label(), x()+2, y(), w()-2, h(), FL_ALIGN_CENTER);
-}
+}
\ No newline at end of file
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
public:
- geButton(int x, int y, int w, int h, const char *l=nullptr,
- const char** imgOff=nullptr, const char** imgOn=nullptr);
+ geButton(int x, int y, int w, int h, const char* l=nullptr,
+ const char** imgOff=nullptr, const char** imgOn=nullptr,
+ const char** imgDisabled=nullptr);
void draw() override;
+protected:
+
+ void draw(const char** img, Fl_Color bgColor, Fl_Color textColor);
+
const char** imgOff;
const char** imgOn;
+ const char** imgDisabled;
+
Fl_Color bgColor0; // background not clicked
Fl_Color bgColor1; // background clicked
Fl_Color bdColor; // border
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <FL/Fl.H>
#include <FL/Fl_Scroll.H>
#include <FL/fl_draw.H>
-#include "../../../core/const.h"
+#include "core/const.h"
#include "resizerBar.h"
-geResizerBar::geResizerBar(int X, int Y, int W, int H, int minSize, bool type)
- : Fl_Box (X, Y, W, H),
- m_type (type),
- m_minSize(minSize),
- m_lastPos(0),
- m_hover (false)
+geResizerBar::geResizerBar(int X, int Y, int W, int H, int minSize, bool type, Fl_Widget* target)
+: Fl_Box (X, Y, W, H),
+ m_type (type),
+ m_minSize (minSize),
+ m_lastPos (0),
+ m_initialPos(0),
+ m_hover (false),
+ m_target (target)
{
if (m_type == VERTICAL) {
m_origSize = H;
void geResizerBar::handleDrag(int diff)
{
- Fl_Scroll* grp = static_cast<Fl_Scroll*>(parent());
- int top;
- int bot;
- if (m_type == VERTICAL) {
- top = y();
- bot = y()+h();
- }
- else {
- top = x();
- bot = x()+w();
- }
+ Fl_Scroll* group = static_cast<Fl_Scroll*>(parent());
+
+ const int top = m_type == VERTICAL ? y() : x();
+ const int bot = m_type == VERTICAL ? y() + h() : x() + w();
// First pass: find widget directly above us with common edge
- // Possibly clamp 'diff' if widget would get too small..
+ // Possibly clamp 'diff' if widget would get too small..
- for (int t=0; t<grp->children(); t++) {
- Fl_Widget* wd = grp->child(t);
+ for (int t = 0; t < group->children(); t++) {
+ Fl_Widget* wd = group->child(t);
if (m_type == VERTICAL) {
- if ((wd->y()+wd->h()) == top) { // found widget directly above?
- if ((wd->h()+diff) < m_minSize)
+ if ((wd->y() + wd->h()) == top) { // found widget directly above?
+ if ((wd->h() + diff) < m_minSize)
diff = wd->h() - m_minSize; // clamp
- wd->resize(wd->x(), wd->y(), wd->w(), wd->h()+diff); // change height
- break; // done with first pass
+ wd->resize(wd->x(), wd->y(), wd->w(), wd->h() + diff); // change height
+ break; // done with first pass
}
}
else {
- if ((wd->x()+wd->w()) == top) { // found widget directly above?
- if ((wd->w()+diff) < m_minSize)
+ if ((wd->x() + wd->w()) == top) { // found widget directly above?
+ if ((wd->w() + diff) < m_minSize)
diff = wd->w() - m_minSize; // clamp
- wd->resize(wd->x(), wd->y(), wd->w()+diff, wd->h()); // change width
- break; // done with first pass
+ wd->resize(wd->x(), wd->y(), wd->w() + diff, wd->h()); // change width
+ break; // done with first pass
}
}
}
// Second pass: find widgets below us, move based on clamped diff
- for (int t=0; t<grp->children(); t++) {
- Fl_Widget* wd = grp->child(t);
+ for (int t = 0; t < group->children(); t++) {
+ Fl_Widget* wd = group->child(t);
if (m_type == VERTICAL) {
- if (wd->y() >= bot) // found widget below us?
- wd->resize(wd->x(), wd->y()+diff, wd->w(), wd->h()); // change position
+ if (wd->y() >= bot) // found widget below us?
+ wd->resize(wd->x(), wd->y() + diff, wd->w(), wd->h()); // change position
}
else {
if (wd->x() >= bot)
- wd->resize(wd->x()+diff, wd->y(), wd->w(), wd->h());
+ wd->resize(wd->x() + diff, wd->y(), wd->w(), wd->h());
}
}
// Change our position last
if (m_type == VERTICAL)
- resize(x(), y()+diff, w(), h());
+ resize(x(), y() + diff, w(), h());
else
- resize(x()+diff, y(), w(), h());
+ resize(x() + diff, y(), w(), h());
- grp->init_sizes();
- grp->redraw();
+ group->init_sizes();
+ group->redraw();
}
int geResizerBar::handle(int e)
{
int ret = 0;
- int this_y;
- if (m_type == VERTICAL)
- this_y = Fl::event_y_root();
- else
- this_y = Fl::event_x_root();
+ int currentPos = m_type == VERTICAL ? Fl::event_y_root() : Fl::event_x_root();
+
switch (e) {
case FL_FOCUS:
ret = 1;
break;
case FL_PUSH:
ret = 1;
- m_lastPos = this_y;
+ m_lastPos = currentPos;
+ m_initialPos = currentPos;
break;
case FL_DRAG:
- handleDrag(this_y-m_lastPos);
- m_lastPos = this_y;
+ handleDrag(currentPos - m_lastPos);
+ m_lastPos = currentPos;
ret = 1;
+ if (onDrag != nullptr)
+ onDrag(m_target);
+ break;
+ case FL_RELEASE:
+ if (m_initialPos != currentPos && onRelease != nullptr)
+ onRelease(m_target);
break;
default: break;
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#define GE_RESIZER_BAR_H
+#include <functional>
#include <FL/Fl_Box.H>
class geResizerBar : public Fl_Box
{
-private:
-
- bool m_type;
- int m_origSize;
- int m_minSize;
- int m_lastPos;
- bool m_hover;
-
- void handleDrag(int diff);
-
public:
static const int HORIZONTAL = 0;
static const int VERTICAL = 1;
- geResizerBar(int x, int y, int w, int h, int minSize, bool type=VERTICAL);
+ geResizerBar(int x, int y, int w, int h, int minSize, bool type, Fl_Widget* target=nullptr);
int handle(int e) override;
void draw() override;
void resize(int x, int y, int w, int h) override;
int getMinSize() const;
+
+ std::function<void(const Fl_Widget*)> onDrag = nullptr;
+ std::function<void(const Fl_Widget*)> onRelease = nullptr;
+
+private:
+
+ void handleDrag(int diff);
+
+ bool m_type;
+ int m_origSize;
+ int m_minSize;
+ int m_lastPos;
+ int m_initialPos;
+ bool m_hover;
+
+ Fl_Widget* m_target;
};
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
geStatusButton::geStatusButton(int x, int y, int w, int h, const char** imgOff,
- const char** imgOn)
-: geButton(x, y, w, h, nullptr, imgOff, imgOn),
+ const char** imgOn, const char** imgDisabled)
+: geButton(x, y, w, h, "", imgOff, imgOn, imgDisabled),
m_status(false)
{
}
void geStatusButton::draw()
{
- geButton::draw();
- if (m_status)
- fl_draw_pixmap(imgOn, x()+1, y()+1, G_COLOR_GREY_4);
+ if (active())
+ if (m_status) geButton::draw(imgOn, bgColor1, txtColor);
+ else geButton::draw(imgOff, bgColor0, txtColor);
else
- fl_draw_pixmap(imgOff, x()+1, y()+1, G_COLOR_GREY_4);
+ geButton::draw(imgDisabled, bgColor0, bdColor);
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
public:
geStatusButton(int x, int y, int w, int h, const char** imgOff=nullptr,
- const char** imgOn=nullptr);
+ const char** imgOn=nullptr, const char** imgDisabled=nullptr);
void draw() override;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <string>
-#include "deps/rtaudio-mod/RtAudio.h"
+#include "deps/rtaudio/RtAudio.h"
#include "core/const.h"
#include "core/conf.h"
#include "core/kernelAudio.h"
if (m::kernelAudio::hasAPI(RtAudio::LINUX_PULSE))
soundsys->add("PulseAudio");
- switch (m::conf::soundSystem) {
+ switch (m::conf::conf.soundSystem) {
case G_SYS_API_NONE:
soundsys->showItem("(none)");
break;
if (m::kernelAudio::hasAPI(RtAudio::LINUX_PULSE))
soundsys->add("PulseAudio");
- switch (m::conf::soundSystem) {
+ switch (m::conf::conf.soundSystem) {
case G_SYS_API_NONE:
soundsys->showItem("(none)");
break;
if (m::kernelAudio::hasAPI(RtAudio::WINDOWS_WASAPI))
soundsys->add("WASAPI");
- switch (m::conf::soundSystem) {
+ switch (m::conf::conf.soundSystem) {
case G_SYS_API_NONE:
soundsys->showItem("(none)");
break;
if (m::kernelAudio::hasAPI(RtAudio::MACOSX_CORE))
soundsys->add("CoreAudio");
- switch (m::conf::soundSystem) {
+ switch (m::conf::conf.soundSystem) {
case G_SYS_API_NONE:
soundsys->showItem("(none)");
break;
devOutInfo->callback(cb_showOutputInfo, this);
devInInfo->callback(cb_showInputInfo, this);
- if (m::conf::soundSystem != G_SYS_API_NONE) {
+ if (m::conf::conf.soundSystem != G_SYS_API_NONE) {
fetchSoundDevs();
fetchOutChans(sounddevOut->value());
fetchInChans(sounddevIn->value());
for (int i=0; i<nfreq; i++) {
int freq = m::kernelAudio::getFreq(sounddevOut->value(), i);
samplerate->add(u::string::iToString(freq).c_str());
- if (freq == m::conf::samplerate)
+ if (freq == m::conf::conf.samplerate)
samplerate->value(i);
}
}
buffersize->add("1024");
buffersize->add("2048");
buffersize->add("4096");
- buffersize->showItem(u::string::iToString(m::conf::buffersize).c_str());
+ buffersize->showItem(u::string::iToString(m::conf::conf.buffersize).c_str());
rsmpQuality->add("Sinc best quality (very slow)");
rsmpQuality->add("Sinc medium quality (slow)");
rsmpQuality->add("Sinc basic quality (medium)");
rsmpQuality->add("Zero Order Hold (fast)");
rsmpQuality->add("Linear (very fast)");
- rsmpQuality->value(m::conf::rsmpQuality);
+ rsmpQuality->value(m::conf::conf.rsmpQuality);
- recTriggerLevel->value(u::string::fToString(m::conf::recTriggerLevel, 1).c_str());
+ recTriggerLevel->value(u::string::fToString(m::conf::conf.recTriggerLevel, 1).c_str());
- limitOutput->value(m::conf::limitOutput);
+ limitOutput->value(m::conf::conf.limitOutput);
}
std::string tmp = u::string::iToString(i+1) + "-" + u::string::iToString(i+2);
channelsIn->add(tmp.c_str());
}
- channelsIn->value(m::conf::channelsIn);
+ channelsIn->value(m::conf::conf.channelsIn);
}
std::string tmp = u::string::iToString(i+1) + "-" + u::string::iToString(i+2);
channelsOut->add(tmp.c_str());
}
- channelsOut->value(m::conf::channelsOut);
+ channelsOut->value(m::conf::conf.channelsOut);
}
devOutInfo->deactivate();
}
else {
- int outMenuValue = findMenuDevice(sounddevOut, m::conf::soundDeviceOut);
+ int outMenuValue = findMenuDevice(sounddevOut, m::conf::conf.soundDeviceOut);
sounddevOut->value(outMenuValue);
}
devInInfo->deactivate();
}
else {
- int inMenuValue = findMenuDevice(sounddevIn, m::conf::soundDeviceIn);
+ int inMenuValue = findMenuDevice(sounddevIn, m::conf::conf.soundDeviceIn);
sounddevIn->value(inMenuValue);
}
}
std::string text = soundsys->text(soundsys->value());
if (text == "(none)") {
- m::conf::soundSystem = G_SYS_API_NONE;
+ m::conf::conf.soundSystem = G_SYS_API_NONE;
return;
}
#if defined(__linux__)
else if (text == "ALSA")
- m::conf::soundSystem = G_SYS_API_ALSA;
+ m::conf::conf.soundSystem = G_SYS_API_ALSA;
else if (text == "Jack")
- m::conf::soundSystem = G_SYS_API_JACK;
+ m::conf::conf.soundSystem = G_SYS_API_JACK;
else if (text == "PulseAudio")
- m::conf::soundSystem = G_SYS_API_PULSE;
+ m::conf::conf.soundSystem = G_SYS_API_PULSE;
#elif defined(__FreeBSD__)
else if (text == "Jack")
- m::conf::soundSystem = G_SYS_API_JACK;
+ m::conf::conf.soundSystem = G_SYS_API_JACK;
else if (text == "PulseAudio")
- m::conf::soundSystem = G_SYS_API_PULSE;
+ m::conf::conf.soundSystem = G_SYS_API_PULSE;
#elif defined(_WIN32)
else if (text == "DirectSound")
- m::conf::soundSystem = G_SYS_API_DS;
+ m::conf::conf.soundSystem = G_SYS_API_DS;
else if (text == "ASIO")
- m::conf::soundSystem = G_SYS_API_ASIO;
+ m::conf::conf.soundSystem = G_SYS_API_ASIO;
else if (text == "WASAPI")
- m::conf::soundSystem = G_SYS_API_WASAPI;
+ m::conf::conf.soundSystem = G_SYS_API_WASAPI;
#elif defined (__APPLE__)
else if (text == "CoreAudio")
- m::conf::soundSystem = G_SYS_API_CORE;
+ m::conf::conf.soundSystem = G_SYS_API_CORE;
#endif
/* use the device name to search into the drop down menu's */
- m::conf::soundDeviceOut = m::kernelAudio::getDeviceByName(sounddevOut->text(sounddevOut->value()));
- m::conf::soundDeviceIn = m::kernelAudio::getDeviceByName(sounddevIn->text(sounddevIn->value()));
- m::conf::channelsOut = channelsOut->value();
- m::conf::channelsIn = channelsIn->value();
- m::conf::limitOutput = limitOutput->value();
- m::conf::rsmpQuality = rsmpQuality->value();
+ m::conf::conf.soundDeviceOut = m::kernelAudio::getDeviceByName(sounddevOut->text(sounddevOut->value()));
+ m::conf::conf.soundDeviceIn = m::kernelAudio::getDeviceByName(sounddevIn->text(sounddevIn->value()));
+ m::conf::conf.channelsOut = channelsOut->value();
+ m::conf::conf.channelsIn = channelsIn->value();
+ m::conf::conf.limitOutput = limitOutput->value();
+ m::conf::conf.rsmpQuality = rsmpQuality->value();
/* if sounddevOut is disabled (because of system change e.g. alsa ->
* jack) its value is equal to -1. Change it! */
- if (m::conf::soundDeviceOut == -1)
- m::conf::soundDeviceOut = 0;
+ if (m::conf::conf.soundDeviceOut == -1)
+ m::conf::conf.soundDeviceOut = 0;
- m::conf::buffersize = std::atoi(buffersize->text());
- m::conf::recTriggerLevel = std::atof(recTriggerLevel->value());
+ m::conf::conf.buffersize = std::atoi(buffersize->text());
+ m::conf::conf.recTriggerLevel = std::atof(recTriggerLevel->value());
const Fl_Menu_Item* i = nullptr;
i = samplerate->mvalue(); // mvalue() returns a pointer to the last menu item that was picked
if (i != nullptr)
- m::conf::samplerate = std::atoi(i->label());
+ m::conf::conf.samplerate = std::atoi(i->label());
}
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
labelsize(G_GUI_FONT_SIZE_BASE);
selection_color(G_COLOR_GREY_4);
- m::conf::recsStopOnChanHalt == 1 ? recsStopOnChanHalt_1->value(1) : recsStopOnChanHalt_0->value(1);
- m::conf::chansStopOnSeqHalt == 1 ? chansStopOnSeqHalt_1->value(1) : chansStopOnSeqHalt_0->value(1);
- treatRecsAsLoops->value(m::conf::treatRecsAsLoops);
- inputMonitorDefaultOn->value(m::conf::inputMonitorDefaultOn);
+ m::conf::conf.recsStopOnChanHalt == 1 ? recsStopOnChanHalt_1->value(1) : recsStopOnChanHalt_0->value(1);
+ m::conf::conf.chansStopOnSeqHalt == 1 ? chansStopOnSeqHalt_1->value(1) : chansStopOnSeqHalt_0->value(1);
+ treatRecsAsLoops->value(m::conf::conf.treatRecsAsLoops);
+ inputMonitorDefaultOn->value(m::conf::conf.inputMonitorDefaultOn);
recsStopOnChanHalt_1->callback(cb_radio_mutex, (void*)this);
recsStopOnChanHalt_0->callback(cb_radio_mutex, (void*)this);
void geTabBehaviors::save()
{
- m::conf::recsStopOnChanHalt = recsStopOnChanHalt_1->value() == 1 ? 1 : 0;
- m::conf::chansStopOnSeqHalt = chansStopOnSeqHalt_1->value() == 1 ? 1 : 0;
- m::conf::treatRecsAsLoops = treatRecsAsLoops->value() == 1 ? 1 : 0;
- m::conf::inputMonitorDefaultOn = inputMonitorDefaultOn->value() == 1 ? 1 : 0;
+ m::conf::conf.recsStopOnChanHalt = recsStopOnChanHalt_1->value() == 1 ? 1 : 0;
+ m::conf::conf.chansStopOnSeqHalt = chansStopOnSeqHalt_1->value() == 1 ? 1 : 0;
+ m::conf::conf.treatRecsAsLoops = treatRecsAsLoops->value() == 1 ? 1 : 0;
+ m::conf::conf.inputMonitorDefaultOn = inputMonitorDefaultOn->value() == 1 ? 1 : 0;
}
}} // giada::v::
\ No newline at end of file
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
sync->add("(disabled)");
sync->add("MIDI Clock (master)");
sync->add("MTC (master)");
- if (m::conf::midiSync == MIDI_SYNC_NONE)
+ if (m::conf::conf.midiSync == MIDI_SYNC_NONE)
sync->value(0);
- else if (m::conf::midiSync == MIDI_SYNC_CLOCK_M)
+ else if (m::conf::conf.midiSync == MIDI_SYNC_CLOCK_M)
sync->value(1);
- else if (m::conf::midiSync == MIDI_SYNC_MTC_M)
+ else if (m::conf::conf.midiSync == MIDI_SYNC_MTC_M)
sync->value(2);
systemInitValue = system->value();
for (unsigned i=0; i<m::kernelMidi::countOutPorts(); i++)
portOut->add(u::gui::removeFltkChars(m::kernelMidi::getOutPortName(i)).c_str());
- portOut->value(m::conf::midiPortOut+1); // +1 because midiPortOut=-1 is '(disabled)'
+ portOut->value(m::conf::conf.midiPortOut+1); // +1 because midiPortOut=-1 is '(disabled)'
}
}
for (unsigned i=0; i<m::kernelMidi::countInPorts(); i++)
portIn->add(u::gui::removeFltkChars(m::kernelMidi::getInPortName(i)).c_str());
- portIn->value(m::conf::midiPortIn+1); // +1 because midiPortIn=-1 is '(disabled)'
+ portIn->value(m::conf::conf.midiPortIn+1); // +1 because midiPortIn=-1 is '(disabled)'
}
}
for (unsigned i=0; i<m::midimap::maps.size(); i++) {
const char *imap = m::midimap::maps.at(i).c_str();
midiMap->add(imap);
- if (m::conf::midiMapPath == imap)
+ if (m::conf::conf.midiMapPath == imap)
midiMap->value(i);
}
std::string text = system->text(system->value());
if (text == "ALSA")
- m::conf::midiSystem = RtMidi::LINUX_ALSA;
+ m::conf::conf.midiSystem = RtMidi::LINUX_ALSA;
else if (text == "Jack")
- m::conf::midiSystem = RtMidi::UNIX_JACK;
+ m::conf::conf.midiSystem = RtMidi::UNIX_JACK;
else if (text == "Multimedia MIDI")
- m::conf::midiSystem = RtMidi::WINDOWS_MM;
+ m::conf::conf.midiSystem = RtMidi::WINDOWS_MM;
else if (text == "OSX Core MIDI")
- m::conf::midiSystem = RtMidi::MACOSX_CORE;
+ m::conf::conf.midiSystem = RtMidi::MACOSX_CORE;
- m::conf::midiPortOut = portOut->value()-1; // -1 because midiPortOut=-1 is '(disabled)'
- m::conf::midiPortIn = portIn->value()-1; // -1 because midiPortIn=-1 is '(disabled)'
- m::conf::midiMapPath = m::midimap::maps.size() == 0 ? "" : midiMap->text(midiMap->value());
+ m::conf::conf.midiPortOut = portOut->value()-1; // -1 because midiPortOut=-1 is '(disabled)'
+ m::conf::conf.midiPortIn = portIn->value()-1; // -1 because midiPortIn=-1 is '(disabled)'
+ m::conf::conf.midiMapPath = m::midimap::maps.size() == 0 ? "" : midiMap->text(midiMap->value());
if (sync->value() == 0)
- m::conf::midiSync = MIDI_SYNC_NONE;
+ m::conf::conf.midiSync = MIDI_SYNC_NONE;
else if (sync->value() == 1)
- m::conf::midiSync = MIDI_SYNC_CLOCK_M;
+ m::conf::conf.midiSync = MIDI_SYNC_CLOCK_M;
else if (sync->value() == 2)
- m::conf::midiSync = MIDI_SYNC_MTC_M;
+ m::conf::conf.midiSync = MIDI_SYNC_MTC_M;
}
#endif
- switch (m::conf::midiSystem) {
+ switch (m::conf::conf.midiSystem) {
case RtMidi::LINUX_ALSA: system->showItem("ALSA"); break;
case RtMidi::UNIX_JACK: system->showItem("Jack"); break;
case RtMidi::WINDOWS_MM: system->showItem("Multimedia MIDI"); break;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
labelsize(G_GUI_FONT_SIZE_BASE);
selection_color(G_COLOR_GREY_4);
- switch (m::conf::logMode) {
+ switch (m::conf::conf.logMode) {
case LOG_MODE_MUTE:
debugMsg->value(0);
break;
{
switch(debugMsg->value()) {
case 0:
- m::conf::logMode = LOG_MODE_MUTE;
+ m::conf::conf.logMode = LOG_MODE_MUTE;
break;
case 1:
- m::conf::logMode = LOG_MODE_STDOUT;
+ m::conf::conf.logMode = LOG_MODE_STDOUT;
break;
case 2:
- m::conf::logMode = LOG_MODE_FILE;
+ m::conf::conf.logMode = LOG_MODE_FILE;
break;
}
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
m_info->label("Scan in progress. Please wait...");
m_info->hide();
- m_folderPath->value(m::conf::pluginPath.c_str());
+ m_folderPath->value(m::conf::conf.pluginPath.c_str());
m_folderPath->label("Plugins folder");
m_browse->callback(cb_browse, (void*) this);
void geTabPlugins::cb_browse()
{
v::gdBrowserDir* browser = new v::gdBrowserDir("Add plug-ins directory",
- m::conf::patchPath, c::plugin::setPluginPathCb);
+ m::conf::conf.patchPath, c::plugin::setPluginPathCb);
static_cast<v::gdWindow*>(top_window())->addSubWindow(browser);
}
void geTabPlugins::save()
{
- m::conf::pluginPath = m_folderPath->value();
+ m::conf::conf.pluginPath = m_folderPath->value();
}
void geTabPlugins::refreshVstPath()
{
- m_folderPath->value(m::conf::pluginPath.c_str());
+ m_folderPath->value(m::conf::conf.pluginPath.c_str());
m_folderPath->redraw();
}
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <FL/Fl.H>
+#include <FL/fl_draw.H>
#include "core/channels/channel.h"
#include "core/model/model.h"
#include "core/const.h"
namespace v
{
geChannel::geChannel(int X, int Y, int W, int H, ID channelId)
-: Fl_Pack (X, Y, W, H),
+: Fl_Group (X, Y, W, H),
channelId(channelId)
{
- type(Fl_Pack::HORIZONTAL);
- spacing(G_GUI_INNER_MARGIN);
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geChannel::draw()
+{
+ const int ny = y() + (h() / 2) - (G_GUI_UNIT / 2);
+
+ playButton->resize(playButton->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+ arm->resize(arm->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+ mute->resize(mute->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+ solo->resize(solo->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+ vol->resize(vol->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+#ifdef WITH_VST
+ fx->resize(fx->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+#endif
+
+ fl_rectf(x(), y(), w(), h(), G_COLOR_GREY_1_5);
+
+ Fl_Group::draw();
}
are visible. */
int visibles = 0;
- for (int i=0; i<children(); i++) {
+ for (int i = 0; i < children(); i++) {
child(i)->size(MIN_ELEM_W, child(i)->h()); // also normalize widths
if (child(i)->visible())
visibles++;
/* Reposition everything else */
- for (int i=1, p=0; i<children(); i++) {
+ for (int i = 1, p = 0; i < children(); i++) {
if (!child(i)->visible())
continue;
- for (int k=i-1; k>=0; k--) // Get the first visible item prior to i
+ for (int k = i - 1; k >= 0; k--) // Get the first visible item prior to i
if (child(k)->visible()) {
p = k;
break;
return false;
}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geChannel::changeSize(int H)
-{
- size(w(), H);
-
- int Y = y() + (H / 2 - (G_GUI_UNIT / 2));
-
- playButton->resize(playButton->x(), Y, playButton->w(), G_GUI_UNIT);
- arm->resize(arm->x(), Y, arm->w(), G_GUI_UNIT);
- mainButton->resize(mainButton->x(), mainButton->y(), mainButton->w(), H);
- mute->resize(mute->x(), Y, mute->w(), G_GUI_UNIT);
- solo->resize(solo->x(), Y, solo->w(), G_GUI_UNIT);
- vol->resize(vol->x(), Y, vol->w(), G_GUI_UNIT);
-#ifdef WITH_VST
- fx->resize(fx->x(), Y, fx->w(), G_GUI_UNIT);
-#endif
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-int geChannel::getSize()
-{
- return h();
-}
-
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#define GE_CHANNEL_H
-#include <FL/Fl_Pack.H>
+#include <FL/Fl_Group.H>
#include "core/types.h"
{
class geChannelButton;
-class geChannel : public Fl_Pack
+class geChannel : public Fl_Group
{
public:
geChannel(int x, int y, int w, int h, ID channelId);
+ void draw() override;
+
/* refresh
Updates graphics. */
virtual void refresh();
- /* changeSize
- Changes channel's size according to a template (x1, x2, ...). */
-
- virtual void changeSize(int h);
-
/* getColumnId
Returns the ID of the column this channel resides in. */
ID getColumnId();
- int getSize();
-
/* handleKey
Performs some UI-related operations when the bound key is pressed. Returns
whether the bound key has been pressed or not. */
ID channelId;
geStatusButton* playButton;
- geChannelStatus* status;
geButton* arm;
+ geChannelStatus* status;
geChannelButton* mainButton;
geStatusButton* mute;
geStatusButton* solo;
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <FL/Fl_Menu_Button.H>
#include "core/channels/sampleChannel.h"
#include "core/channels/midiChannel.h"
+#include "core/model/model.h"
#include "glue/channel.h"
#include "utils/log.h"
#include "utils/fs.h"
#include "utils/string.h"
#include "gui/dialogs/warnings.h"
#include "gui/elems/basics/boxtypes.h"
+#include "gui/elems/basics/resizerBar.h"
#include "keyboard.h"
#include "sampleChannel.h"
#include "midiChannel.h"
namespace v
{
geColumn::geColumn(int X, int Y, int W, int H, ID id, geResizerBar* b)
-: Fl_Pack (X, Y, W, H),
+: Fl_Group (X, Y, W, H),
id (id),
resizerBar(b)
{
end();
-
- type(Fl_Pack::VERTICAL);
- spacing(G_GUI_INNER_MARGIN);
-
init();
}
void geColumn::refresh()
{
- for (int i=1; i<children(); i++) { // Skip "add channel" button
- geChannel* c = dynamic_cast<geChannel*>(child(i));
- if (c != nullptr)
- c->refresh();
- }
+ for (geChannel* c : m_channels)
+ c->refresh();
}
/* -------------------------------------------------------------------------- */
-geChannel* geColumn::addChannel(ID channelId, ChannelType t, int size)
+geChannel* geColumn::addChannel(ID channelId, ChannelType t, int height)
{
- geChannel* gch = nullptr;
+ geChannel* gch = nullptr;
+ Fl_Widget* last = m_channels.size() == 0 ? static_cast<Fl_Widget*>(m_addChannelBtn) : m_channels.back();
if (t == ChannelType::SAMPLE)
- gch = new geSampleChannel(0, 0, w(), size, channelId);
+ gch = new geSampleChannel(x(), last->y() + last->h() + G_GUI_INNER_MARGIN, w(), height, channelId);
else
- gch = new geMidiChannel(0, 0, w(), size, channelId);
+ gch = new geMidiChannel (x(), last->y() + last->h() + G_GUI_INNER_MARGIN, w(), height, channelId);
+
+ geResizerBar* bar = new geResizerBar(x(), gch->y() + gch->h(), w(),
+ G_GUI_INNER_MARGIN, G_GUI_UNIT, geResizerBar::VERTICAL, gch);
+
+ /* Update the column height while dragging the resizer bar. */
+ bar->onDrag = [=](const Fl_Widget* w)
+ {
+ resizable(nullptr);
+ size(this->w(), (child(children() - 1)->y() - y()) + G_GUI_INNER_MARGIN);
+ };
+
+ /* Store the channel height in model when the resizer bar is released. */
+
+ bar->onRelease = [=](const Fl_Widget* w)
+ {
+ storeChannelHeight(w, channelId);
+ };
+
+ m_channels.push_back(gch);
+
+ /* Temporarily disable the resizability, add new stuff, resize the group and
+ bring the resizability back. This is needed to prevent weird vertical
+ stretching on existing content. */
+
+ resizable(nullptr);
add(gch);
- gch->redraw(); // fix corruption
+ add(bar);
+ size(w(), computeHeight());
+ init_sizes();
+ resizable(this);
+
return gch;
}
if (m == nullptr) return;
if (strcmp(m->label(), "Add Sample channel") == 0)
- c::channel::addChannel(id, ChannelType::SAMPLE, G_GUI_CHANNEL_H_1);
+ c::channel::addChannel(id, ChannelType::SAMPLE);
else
if (strcmp(m->label(), "Add MIDI channel") == 0)
- c::channel::addChannel(id, ChannelType::MIDI, G_GUI_CHANNEL_H_1);
+ c::channel::addChannel(id, ChannelType::MIDI);
else
static_cast<geKeyboard*>(parent())->deleteColumn(id);
/* -------------------------------------------------------------------------- */
-geChannel* geColumn::getChannel(ID chanID) const
+geChannel* geColumn::getChannel(ID channelId) const
{
- for (int i=1; i<children(); i++) { // Skip "add channel" button
- geChannel* gch = static_cast<geChannel*>(child(i));
- if (gch->channelId == chanID)
- return gch;
- }
+ for (geChannel* c : m_channels)
+ if (c->channelId == channelId)
+ return c;
return nullptr;
}
void geColumn::init()
{
- clear();
+ Fl_Group::clear();
+ m_channels.clear();
- m_addChannelBtn = new geButton(0, 0, w(), G_GUI_UNIT, "Edit column");
+ m_addChannelBtn = new geButton(x(), y(), w(), G_GUI_UNIT, "Edit column");
m_addChannelBtn->callback(cb_addChannel, (void*)this);
add(m_addChannelBtn);
/* -------------------------------------------------------------------------- */
-void geColumn::forEachChannel(std::function<void(geChannel* c)> f) const
+void geColumn::forEachChannel(std::function<void(geChannel& c)> f) const
{
- for (int i=1; i<children(); i++) // Skip "add channel" button
- f(static_cast<geChannel*>(child(i)));
+ for (geChannel* c : m_channels)
+ f(*c);
}
int geColumn::countChannels() const
{
- return children() - 1;
+ return m_channels.size();
}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+int geColumn::computeHeight() const
+{
+ int out = 0;
+ for (const geChannel* c : m_channels)
+ out += c->h() + G_GUI_INNER_MARGIN;
+ return out + m_addChannelBtn->h() + G_GUI_INNER_MARGIN;
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geColumn::storeChannelHeight(const Fl_Widget* w, ID channelId) const
+{
+ m::model::onSwap(m::model::channels, channelId, [&](m::Channel& c)
+ {
+ c.height = w->h();
+ });
+}
+
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
#include <functional>
-#include <FL/Fl_Pack.H>
+#include <vector>
+#include <FL/Fl_Group.H>
#include "core/types.h"
{
class geKeyboard;
class geChannel;
-class geColumn : public Fl_Pack
+class geColumn : public Fl_Group
{
public:
geColumn(int x, int y, int w, int h, ID id, geResizerBar* b);
- geChannel* getChannel(ID chanID) const;
+ geChannel* getChannel(ID channelId) const;
/* addChannel
Adds a new channel in this column. */
void init();
- void forEachChannel(std::function<void(geChannel* c)> f) const;
+ void forEachChannel(std::function<void(geChannel& c)> f) const;
ID id;
void cb_addChannel();
int countChannels() const;
+ int computeHeight() const;
+ void storeChannelHeight(const Fl_Widget* c, ID channelId) const;
+
+ std::vector<geChannel*> m_channels;
geButton* m_addChannelBtn;
};
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* Add 6 empty columns as initial layout. */
- cb_addColumn();
- cb_addColumn();
- cb_addColumn();
- cb_addColumn();
- cb_addColumn();
- cb_addColumn();
+ layout.clear();
+ layout.push_back({1, G_DEFAULT_COLUMN_WIDTH});
+ layout.push_back({2, G_DEFAULT_COLUMN_WIDTH});
+ layout.push_back({3, G_DEFAULT_COLUMN_WIDTH});
+ layout.push_back({4, G_DEFAULT_COLUMN_WIDTH});
+ layout.push_back({5, G_DEFAULT_COLUMN_WIDTH});
+ layout.push_back({6, G_DEFAULT_COLUMN_WIDTH});
}
void geKeyboard::rebuild()
{
- for (geColumn* c : m_columns)
- c->init();
+ /* Wipe out all columns and add them according to the current layout. */
+
+ deleteAllColumns();
- m::model::ChannelsLock lock(m::model::channels);
+ for (ColumnLayout c : layout)
+ addColumn(c.width, c.id);
- for (const m::Channel* ch : m::model::channels) {
-
- if (ch->id == m::mixer::MASTER_OUT_CHANNEL_ID ||
- ch->id == m::mixer::MASTER_IN_CHANNEL_ID ||
- ch->id == m::mixer::PREVIEW_CHANNEL_ID)
- continue;
-
- geColumn* column = getColumn(ch->columnId);
- if (column == nullptr)
- column = cb_addColumn(G_DEFAULT_COLUMN_WIDTH, ch->columnId);
-
- column->addChannel(ch->id, ch->type, G_GUI_CHANNEL_H_1);
- }
+ /* Parse the model and assign each channel to its column. */
+ m::model::ChannelsLock lock(m::model::channels);
+
+ for (const m::Channel* ch : m::model::channels)
+ if (!ch->isInternal())
+ getColumn(ch->columnId)->addChannel(ch->id, ch->type, ch->height);
+
redraw();
}
void geKeyboard::deleteColumn(ID id)
{
- size_t i = u::vector::indexOfIf(m_columns, [=](const geColumn* c) { return c->id == id; });
-
- assert(i < m_columns.size());
-
- /* Delete selected column. */
-
- Fl::delete_widget(m_columns.at(i)->resizerBar);
- Fl::delete_widget(m_columns.at(i));
- m_columns.erase(m_columns.begin() + i);
-
- /* Reposition remaining columns and 'add column' button. */
-
- int px = G_GUI_OUTER_MARGIN;
- for (geColumn* c : m_columns) {
- c->position(px, c->y());
- c->resizerBar->position(c->x() + c->w(), c->resizerBar->y());
- px = c->resizerBar->x() + c->resizerBar->w();
- }
- m_addColumnBtn->position(px, y());
+ u::vector::removeIf(layout, [=](const ColumnLayout& c) { return c.id == id; });
+ rebuild();
}
void geKeyboard::cb_addColumn(Fl_Widget* v, void* p)
{
- ((geKeyboard*)p)->cb_addColumn(G_DEFAULT_COLUMN_WIDTH);
+ ((geKeyboard*)p)->cb_addColumn();
}
w() - scrollbar_size() - (G_GUI_OUTER_MARGIN * 2),
h() - scrollbar_size() - (G_GUI_OUTER_MARGIN * 2));
- for (const geColumn* c : m_columns) {
+ for (const geColumn* c : m_columns)
fl_rectf(c->x(), c->y() + c->h(), c->w(), h() + yposition());
- c->resizerBar->size(c->resizerBar->x(), h());
- }
fl_pop_clip();
}
/* -------------------------------------------------------------------------- */
-geColumn* geKeyboard::cb_addColumn(int width, ID id)
+void geKeyboard::cb_addColumn()
+{
+ addColumn();
+ storeLayout();
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geKeyboard::addColumn(int width, ID id)
{
int colx = x() - xposition(); // Mind the x-scroll offset with xposition()
/* Add a new column + a new resizer bar. */
geResizerBar* bar = new geResizerBar(colx + width, y(), COLUMN_GAP, h(), G_MIN_COLUMN_WIDTH, geResizerBar::HORIZONTAL);
- geColumn* column = new geColumn(colx, y(), width, h(), m_columnId.get(id), bar);
+ geColumn* column = new geColumn(colx, y(), width, G_GUI_UNIT, m_columnId.get(id), bar);
+
+ /* Store the column width in layout when the resizer bar is released. */
+
+ bar->onRelease = [=](const Fl_Widget* w)
+ {
+ storeLayout();
+ };
+
add(column);
add(bar);
m_columns.push_back(column);
m_addColumnBtn->position(colx + width + COLUMN_GAP, y());
redraw();
-
- return column;
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geKeyboard::addColumn(int width, ID id)
-{
- cb_addColumn(width, id);
}
/* -------------------------------------------------------------------------- */
-void geKeyboard::forEachChannel(std::function<void(geChannel* c)> f) const
+void geKeyboard::forEachChannel(std::function<void(geChannel& c)> f) const
{
for (geColumn* column : m_columns)
column->forEachChannel(f);
for (geColumn* c : m_columns)
if (c->id == id)
return c;
+ assert(false);
return nullptr;
}
p = u::fs::stripFileUrl(p);
return paths;
}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geKeyboard::storeLayout()
+{
+ layout.clear();
+ for (const geColumn* c : m_columns)
+ layout.push_back({ c->id, c->w() });
+}
+
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
class geButton;
-class geColumn;
class geResizerBar;
namespace giada {
namespace v
{
+class geColumn;
class geChannel;
class geSampleChannel;
{
public:
+ struct ColumnLayout
+ {
+ ID id;
+ int width;
+ };
+
geKeyboard(int X, int Y, int W, int H);
int handle(int e) override;
void refresh();
- /* addColumn
- Adds a new column on the top of the stack. */
-
- void addColumn(int width=G_DEFAULT_COLUMN_WIDTH, ID id=0);
-
/* deleteColumn
Deletes column by id. */
void init();
- void forEachChannel(std::function<void(geChannel* c)> f) const;
+ void forEachChannel(std::function<void(geChannel& c)> f) const;
void forEachColumn(std::function<void(const geColumn& c)> f) const;
+ /* layout
+ The column layout. Each element is a column with a specific witdh. */
+
+ std::vector<ColumnLayout> layout;
+
private:
- static const int COLUMN_GAP = 20;
+ static constexpr int COLUMN_GAP = 20;
static void cb_addColumn(Fl_Widget* v, void* p);
- geColumn* cb_addColumn(int width=G_DEFAULT_COLUMN_WIDTH, ID id=0);
+ void cb_addColumn();
+
+ void addColumn(int width=G_DEFAULT_COLUMN_WIDTH, ID id=0);
/* getDroppedFilePaths
Returns a vector of audio file paths after a drag-n-drop from desktop
geColumn* getColumnAtCursor(Pixel x);
+ /* storeLayout
+ Stores the current column layout into the layout vector. */
+
+ void storeLayout();
+
m::IdManager m_columnId;
std::vector<geColumn*> m_columns;
SETUP_KEYBOARD_INPUT,
SETUP_MIDI_INPUT,
SETUP_MIDI_OUTPUT,
- /*RESIZE,
- RESIZE_H1,
- RESIZE_H2,
- RESIZE_H3,
- RESIZE_H4,
- __END_RESIZE_SUBMENU__,*/
RENAME_CHANNEL,
CLONE_CHANNEL,
DELETE_CHANNEL
{
case Menu::CLEAR_ACTIONS:
case Menu::__END_CLEAR_ACTION_SUBMENU__:
- /*case Menu::RESIZE:
- case Menu::__END_RESIZE_SUBMENU__:*/
break;
case Menu::EDIT_ACTIONS:
u::gui::openSubWindow(G_MainWin, new v::gdMidiActionEditor(gch->channelId), WID_ACTION_EDITOR);
case Menu::SETUP_MIDI_OUTPUT:
u::gui::openSubWindow(G_MainWin, new gdMidiOutputMidiCh(gch->channelId), WID_MIDI_OUTPUT);
break;
- /*case Menu::RESIZE_H1:
- gch->changeSize(G_GUI_CHANNEL_H_1);
- break;
- case Menu::RESIZE_H2:
- gch->changeSize(G_GUI_CHANNEL_H_2);
- break;
- case Menu::RESIZE_H3:
- gch->changeSize(G_GUI_CHANNEL_H_3);
- break;
- case Menu::RESIZE_H4:
- gch->changeSize(G_GUI_CHANNEL_H_4);
- break;*/
case Menu::CLONE_CHANNEL:
c::channel::cloneChannel(gch->channelId);
break;
geMidiChannel::geMidiChannel(int X, int Y, int W, int H, ID channelId)
- : geChannel(X, Y, W, H, channelId)
+: geChannel(X, Y, W, H, channelId)
{
#if defined(WITH_VST)
- const int delta = 144; // (6 widgets * G_GUI_UNIT) + (6 paddings * 4)
+ constexpr int delta = 6 * (G_GUI_UNIT + G_GUI_INNER_MARGIN);
#else
- const int delta = 120; // (5 widgets * G_GUI_UNIT) + (5 paddings * 4)
+ constexpr int delta = 5 * (G_GUI_UNIT + G_GUI_INNER_MARGIN);
#endif
- playButton = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, channelStop_xpm, channelPlay_xpm);
- arm = new geButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, "", armOff_xpm, armOn_xpm);
- mainButton = new geMidiChannelButton(0, 0, w() - delta, H, channelId);
- mute = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, muteOff_xpm, muteOn_xpm);
- solo = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, soloOff_xpm, soloOn_xpm);
+ playButton = new geStatusButton (x(), y(), G_GUI_UNIT, G_GUI_UNIT, channelStop_xpm, channelPlay_xpm);
+ arm = new geButton (playButton->x() + playButton->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, "", armOff_xpm, armOn_xpm);
+ mainButton = new geMidiChannelButton(arm->x() + arm->w() + G_GUI_INNER_MARGIN, y(), w() - delta, H, channelId);
+ mute = new geStatusButton (mainButton->x() + mainButton->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, muteOff_xpm, muteOn_xpm);
+ solo = new geStatusButton (mute->x() + mute->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, soloOff_xpm, soloOn_xpm);
#if defined(WITH_VST)
- fx = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, fxOff_xpm, fxOn_xpm);
+ fx = new geStatusButton (solo->x() + solo->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, fxOff_xpm, fxOn_xpm);
+ vol = new geDial (fx->x() + fx->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT);
+#else
+ vol = new geDial (solo->x() + solo->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT);
#endif
- vol = new geDial(0, 0, G_GUI_UNIT, G_GUI_UNIT);
end();
vol->value(ch.volume);
vol->callback(cb_changeVol, (void*)this);
- changeSize(H); // Update size dynamically
+ size(w(), h()); // Force responsiveness
}
{"Setup keyboard input...", 0, menuCallback, (void*) Menu::SETUP_KEYBOARD_INPUT},
{"Setup MIDI input...", 0, menuCallback, (void*) Menu::SETUP_MIDI_INPUT},
{"Setup MIDI output...", 0, menuCallback, (void*) Menu::SETUP_MIDI_OUTPUT},
- /*{"Resize", 0, menuCallback, (void*) Menu::RESIZE, FL_SUBMENU},
- {"Normal", 0, menuCallback, (void*) Menu::RESIZE_H1},
- {"Medium", 0, menuCallback, (void*) Menu::RESIZE_H2},
- {"Large", 0, menuCallback, (void*) Menu::RESIZE_H3},
- {"X-Large", 0, menuCallback, (void*) Menu::RESIZE_H4},
- {0},*/
{"Rename", 0, menuCallback, (void*) Menu::RENAME_CHANNEL},
{"Clone", 0, menuCallback, (void*) Menu::CLONE_CHANNEL},
{"Delete", 0, menuCallback, (void*) Menu::DELETE_CHANNEL},
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
CLEAR_ACTIONS_VOLUME,
CLEAR_ACTIONS_START_STOP,
__END_CLEAR_ACTIONS_SUBMENU__,
- /*RESIZE,
- RESIZE_H1,
- RESIZE_H2,
- RESIZE_H3,
- RESIZE_H4,
- __END_RESIZE_SUBMENU__,*/
RENAME_CHANNEL,
CLONE_CHANNEL,
FREE_CHANNEL,
}
case Menu::LOAD_SAMPLE: {
gdWindow* w = new gdBrowserLoad("Browse sample",
- m::conf::samplePath.c_str(), c::storage::loadSample, gch->channelId);
+ m::conf::conf.samplePath.c_str(), c::storage::loadSample, gch->channelId);
u::gui::openSubWindow(G_MainWin, w, WID_FILE_BROWSER);
break;
}
case Menu::EXPORT_SAMPLE: {
gdWindow* w = new gdBrowserSave("Save sample",
- m::conf::samplePath.c_str(), "", c::storage::saveSample, gch->channelId);
+ m::conf::conf.samplePath.c_str(), "", c::storage::saveSample, gch->channelId);
u::gui::openSubWindow(G_MainWin, w, WID_FILE_BROWSER);
break;
}
}
case Menu::CLEAR_ACTIONS:
case Menu::__END_CLEAR_ACTIONS_SUBMENU__:
- //case Menu::RESIZE:
- //case Menu::__END_RESIZE_SUBMENU__:
break;
case Menu::CLEAR_ACTIONS_ALL: {
c::recorder::clearAllActions(gch->channelId);
c::recorder::clearStartStopActions(gch->channelId);
break;
}
- /*case Menu::RESIZE_H1: {
- gch->changeSize(G_GUI_CHANNEL_H_1);
- break;
- }
- case Menu::RESIZE_H2: {
- gch->changeSize(G_GUI_CHANNEL_H_2);
- break;
- }
- case Menu::RESIZE_H3: {
- gch->changeSize(G_GUI_CHANNEL_H_3);
- break;
- }
- case Menu::RESIZE_H4: {
- gch->changeSize(G_GUI_CHANNEL_H_4);
- break;
- }*/
case Menu::CLONE_CHANNEL: {
c::channel::cloneChannel(gch->channelId);
break;
geSampleChannel::geSampleChannel(int X, int Y, int W, int H, ID channelId)
- : geChannel(X, Y, W, H, channelId)
+: geChannel(X, Y, W, H, channelId)
{
- playButton = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, channelStop_xpm, channelPlay_xpm);
- arm = new geButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, "", armOff_xpm, armOn_xpm);
- status = new geChannelStatus(0, 0, G_GUI_UNIT, H, channelId);
- mainButton = new geSampleChannelButton(0, 0, G_GUI_UNIT, H, channelId);
- readActions = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, readActionOff_xpm, readActionOn_xpm);
- modeBox = new geChannelMode(0, 0, G_GUI_UNIT, G_GUI_UNIT, channelId);
- mute = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, muteOff_xpm, muteOn_xpm);
- solo = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, soloOff_xpm, soloOn_xpm);
-#ifdef WITH_VST
- fx = new geStatusButton(0, 0, G_GUI_UNIT, G_GUI_UNIT, fxOff_xpm, fxOn_xpm);
+#if defined(WITH_VST)
+ constexpr int delta = 9 * (G_GUI_UNIT + G_GUI_INNER_MARGIN);
+#else
+ constexpr int delta = 8 * (G_GUI_UNIT + G_GUI_INNER_MARGIN);
+#endif
+
+ playButton = new geStatusButton (x(), y(), G_GUI_UNIT, G_GUI_UNIT, channelStop_xpm, channelPlay_xpm);
+ arm = new geButton (playButton->x() + playButton->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, "", armOff_xpm, armOn_xpm);
+ status = new geChannelStatus (arm->x() + arm->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, H, channelId);
+ mainButton = new geSampleChannelButton(status->x() + status->w() + G_GUI_INNER_MARGIN, y(), w() - delta, H, channelId);
+ readActions = new geStatusButton (mainButton->x() + mainButton->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, readActionOff_xpm, readActionOn_xpm, readActionDisabled_xpm);
+ modeBox = new geChannelMode (readActions->x() + readActions->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, channelId);
+ mute = new geStatusButton (modeBox->x() + modeBox->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, muteOff_xpm, muteOn_xpm);
+ solo = new geStatusButton (mute->x() + mute->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, soloOff_xpm, soloOn_xpm);
+#if defined(WITH_VST)
+ fx = new geStatusButton (solo->x() + solo->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT, fxOff_xpm, fxOn_xpm);
+ vol = new geDial (fx->x() + fx->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT);
+#else
+ vol = new geDial (solo->x() + solo->w() + G_GUI_INNER_MARGIN, y(), G_GUI_UNIT, G_GUI_UNIT);
#endif
- vol = new geDial(0, 0, G_GUI_UNIT, G_GUI_UNIT);
end();
mainButton->setKey(ch.key);
mainButton->callback(cb_openMenu, (void*)this);
- //readActions->type(FL_TOGGLE_BUTTON);
- //readActions->value(ch.readActions);
readActions->setStatus(ch.readActions);
readActions->callback(cb_readActions, (void*)this);
vol->value(ch.volume);
vol->callback(cb_changeVol, (void*)this);
- changeSize(H); // Update size dynamically
+ size(w(), h()); // Force responsiveness
}
});
/* If you're recording (input or actions) no menu is allowed; you can't do
- anything, especially deallocate the channel */
+ anything, especially deallocate the channel. */
if (m::recManager::isRecording())
return;
{"Volume", 0, menuCallback, (void*) Menu::CLEAR_ACTIONS_VOLUME},
{"Start/Stop", 0, menuCallback, (void*) Menu::CLEAR_ACTIONS_START_STOP},
{0},
-/* {"Resize", 0, menuCallback, (void*) Menu::RESIZE, FL_SUBMENU},
- {"Normal", 0, menuCallback, (void*) Menu::RESIZE_H1},
- {"Medium", 0, menuCallback, (void*) Menu::RESIZE_H2},
- {"Large", 0, menuCallback, (void*) Menu::RESIZE_H3},
- {"X-Large", 0, menuCallback, (void*) Menu::RESIZE_H4},
- {0},*/
{"Rename", 0, menuCallback, (void*) Menu::RENAME_CHANNEL},
{"Clone", 0, menuCallback, (void*) Menu::CLONE_CHANNEL},
{"Free", 0, menuCallback, (void*) Menu::FREE_CHANNEL},
{
if (c.hasData())
status->redraw();
-
if (c.hasActions) {
- readActions->show();
+ readActions->activate();
readActions->setStatus(c.readActions);
- readActions->redraw();
}
- else
- readActions->hide();
+ else
+ readActions->deactivate();
});
}
/* -------------------------------------------------------------------------- */
+void geSampleChannel::draw()
+{
+ const int ny = y() + (h() / 2) - (G_GUI_UNIT / 2);
+
+ status->resize(status->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+ modeBox->resize(modeBox->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+ readActions->resize(readActions->x(), ny, G_GUI_UNIT, G_GUI_UNIT);
+
+ geChannel::draw();
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
void geSampleChannel::resize(int X, int Y, int W, int H)
{
geChannel::resize(X, Y, W, H);
fx->hide();
#endif
- m::model::ChannelsLock l(m::model::channels);
- const m::SampleChannel& ch = static_cast<m::SampleChannel&>(m::model::get(m::model::channels, channelId));
-
if (w() > BREAK_ARM)
arm->show();
#ifdef WITH_VST
#endif
if (w() > BREAK_MODE_BOX)
modeBox->show();
- if (w() > BREAK_READ_ACTIONS && ch.hasActions)
+ if (w() > BREAK_READ_ACTIONS)
readActions->show();
packWidgets();
}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geSampleChannel::changeSize(int H)
-{
- geChannel::changeSize(H);
-
- int Y = y() + (H / 2 - (G_GUI_UNIT / 2));
-
- status->resize(x(), Y, w(), G_GUI_UNIT);
- modeBox->resize(x(), Y, w(), G_GUI_UNIT);
- readActions->resize(x(), Y, w(), G_GUI_UNIT);
-}
-
}} // giada::v::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
geSampleChannel(int x, int y, int w, int h, ID channelId);
void resize(int x, int y, int w, int h) override;
+ void draw() override;
void refresh() override;
- void changeSize(int h) override;
- geChannelMode* modeBox;
- geStatusButton* readActions;
+ geChannelMode* modeBox;
+ geStatusButton* readActions;
private:
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* ------------------------------------------------------------------------------
*
-* Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+* Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
namespace v
{
geMainIO::geMainIO(int x, int y)
-: Fl_Pack(x, y, 396, 20)
+: Fl_Pack(x, y, 396, G_GUI_UNIT)
{
type(Fl_Pack::HORIZONTAL);
spacing(G_GUI_INNER_MARGIN);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
/* An Fl_Menu_Button is made of many Fl_Menu_Item */
Fl_Menu_Item menu[] = {
- {"Open patch or project..."},
- {"Save patch..."},
+ {"Open project..."},
{"Save project..."},
{"Close project"},
#ifndef NDEBUG
const Fl_Menu_Item* m = menu->popup(Fl::event_x(), Fl::event_y(), 0, 0, &b);
if (!m) return;
- if (strcmp(m->label(), "Open patch or project...") == 0) {
- gdWindow* childWin = new gdBrowserLoad("Load patch or project",
- conf::patchPath, c::storage::loadPatch, 0);
+ if (strcmp(m->label(), "Open project...") == 0) {
+ gdWindow* childWin = new gdBrowserLoad("Open project",
+ conf::conf.patchPath, c::storage::loadProject, 0);
u::gui::openSubWindow(G_MainWin, childWin, WID_FILE_BROWSER);
- return;
- }
- if (strcmp(m->label(), "Save patch...") == 0) {
- if (mh::hasLogicalSamples() || mh::hasEditedSamples())
- if (!gdConfirmWin("Warning", "You should save a project in order to store\nyour takes and/or processed samples."))
- return;
- gdWindow* childWin = new gdBrowserSave("Save patch", conf::patchPath,
- patch::name, c::storage::savePatch, 0);
- u::gui::openSubWindow(G_MainWin, childWin, WID_FILE_BROWSER);
- return;
}
+ else
if (strcmp(m->label(), "Save project...") == 0) {
- gdWindow* childWin = new gdBrowserSave("Save project", conf::patchPath,
- patch::name, c::storage::saveProject, 0);
+ gdWindow* childWin = new gdBrowserSave("Save project", conf::conf.patchPath,
+ patch::patch.name, c::storage::saveProject, 0);
u::gui::openSubWindow(G_MainWin, childWin, WID_FILE_BROWSER);
- return;
}
+ else
if (strcmp(m->label(), "Close project") == 0) {
c::main::resetToInitState(/*createColumns=*/true);
- return;
}
#ifndef NDEBUG
+ else
if (strcmp(m->label(), "Debug stats") == 0) {
m::model::debug();
- return;
}
#endif
+ else
if (strcmp(m->label(), "Quit Giada") == 0) {
G_MainWin->do_callback();
- return;
}
}
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
c::main::toggleInputRec();
});
- recTriggerMode->value(m::conf::recTriggerMode);
+ recTriggerMode->value(static_cast<int>(m::conf::conf.recTriggerMode));
recTriggerMode->type(FL_TOGGLE_BUTTON);
recTriggerMode->callback([](Fl_Widget* w, void* v) {
- m::conf::recTriggerMode = static_cast<geButton*>(w)->value();
+ m::conf::conf.recTriggerMode = static_cast<RecTriggerMode>(static_cast<geButton*>(w)->value());
});
metronome->type(FL_TOGGLE_BUTTON);
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#include <string>
+#include "utils/string.h"
+#include "gui/elems/basics/boxtypes.h"
+#include "gui/elems/basics/box.h"
+#include "gui/elems/basics/button.h"
+#include "midiLearnerBase.h"
+
+
+namespace giada {
+namespace v
+{
+geMidiLearnerBase::geMidiLearnerBase(int X, int Y, int W, std::string l, int param, uint32_t value)
+: Fl_Group (X, Y, W, 20),
+ m_param (param)
+{
+ begin();
+ m_text = new geBox(x(), y(), 156, 20, l.c_str());
+ m_valueBtn = new geButton(m_text->x()+m_text->w()+4, y(), 80, 20);
+ m_button = new geButton(m_valueBtn->x()+m_valueBtn->w()+4, y(), 40, 20, "learn");
+ end();
+
+ m_text->box(G_CUSTOM_BORDER_BOX);
+ m_text->align(FL_ALIGN_LEFT | FL_ALIGN_INSIDE);
+
+ m_valueBtn->box(G_CUSTOM_BORDER_BOX);
+ m_valueBtn->callback(cb_value, (void*)this);
+ m_valueBtn->when(FL_WHEN_RELEASE);
+
+ m_button->type(FL_TOGGLE_BUTTON);
+ m_button->callback(cb_button, (void*)this);
+
+ update(value);
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerBase::cb_button(Fl_Widget* v, void* p) { ((geMidiLearnerBase*)p)->onLearn(); }
+void geMidiLearnerBase::cb_value(Fl_Widget* v, void* p) { ((geMidiLearnerBase*)p)->onReset(); }
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerBase::update(uint32_t value)
+{
+ std::string tmp = "(not set)";
+
+ if (value != 0x0) {
+ tmp = "0x" + u::string::iToString(value, /*hex=*/true);
+ tmp.pop_back(); // Remove last two digits, useless in MIDI messages
+ tmp.pop_back(); // Remove last two digits, useless in MIDI messages
+ }
+
+ m_valueBtn->copy_label(tmp.c_str());
+ m_button->value(0);
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerBase::activate()
+{
+ Fl_Group::activate();
+ m_valueBtn->activate();
+ m_button->activate();
+}
+
+
+void geMidiLearnerBase::deactivate()
+{
+ Fl_Group::deactivate();
+ m_valueBtn->deactivate();
+ m_button->deactivate();
+}
+}} // giada::v::
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifndef GE_MIDI_LEARNER_BASE_H
+#define GE_MIDI_LEARNER_BASE_H
+
+
+#include <string>
+#include <FL/Fl_Group.H>
+
+
+class geBox;
+class geButton;
+
+
+namespace giada {
+namespace v
+{
+class geMidiLearnerBase : public Fl_Group
+{
+public:
+
+ virtual ~geMidiLearnerBase() = default;
+
+ virtual void refresh() = 0;
+ virtual void onLearn() = 0;
+ virtual void onReset() = 0;
+
+ void activate();
+ void deactivate();
+
+protected:
+
+ geMidiLearnerBase(int x, int y, int w, std::string l, int param, uint32_t value);
+
+ /* update
+ Updates and repaints the label widget with value 'value'. */
+
+ void update(uint32_t value);
+
+ /* m_param
+ Parameter index to be learnt. */
+
+ int m_param;
+
+ geBox* m_text;
+ geButton* m_valueBtn;
+ geButton* m_button;
+
+private:
+
+ static void cb_button(Fl_Widget* v, void* p);
+ static void cb_value (Fl_Widget* v, void* p);
+};
+}} // giada::v::
+
+
+#endif
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#include <FL/Fl.H>
+#include "core/model/model.h"
+#include "core/channels/sampleChannel.h"
+#include "glue/io.h"
+#include "gui/elems/basics/button.h"
+#include "midiLearnerChannel.h"
+
+
+namespace giada {
+namespace v
+{
+geMidiLearnerChannel::geMidiLearnerChannel(int x, int y, int w, std::string l, int param, uint32_t value, ID channelId)
+: geMidiLearnerBase(x, y, w, l, param, value),
+ m_channelId (channelId)
+{
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerChannel::refresh()
+{
+ m::model::onGet(m::model::channels, m_channelId, [&](const m::Channel& c)
+ {
+ switch (m_param) {
+ case G_MIDI_IN_KEYPRESS : update(c.midiInKeyPress); break;
+ case G_MIDI_IN_KEYREL : update(c.midiInKeyRel); break;
+ case G_MIDI_IN_KILL : update(c.midiInKill); break;
+ case G_MIDI_IN_ARM : update(c.midiInArm); break;
+ case G_MIDI_IN_MUTE : update(c.midiInVolume); break;
+ case G_MIDI_IN_SOLO : update(c.midiInMute); break;
+ case G_MIDI_IN_VOLUME : update(c.midiInSolo); break;
+ case G_MIDI_IN_PITCH : update(static_cast<const m::SampleChannel&>(c).midiInPitch); break;
+ case G_MIDI_IN_READ_ACTIONS : update(static_cast<const m::SampleChannel&>(c).midiInReadActions); break;
+ case G_MIDI_OUT_L_PLAYING : update(static_cast<const m::SampleChannel&>(c).midiOutLplaying); break;
+ case G_MIDI_OUT_L_MUTE : update(static_cast<const m::SampleChannel&>(c).midiOutLmute); break;
+ case G_MIDI_OUT_L_SOLO : update(static_cast<const m::SampleChannel&>(c).midiOutLsolo); break;
+ }
+ });
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerChannel::onLearn()
+{
+ if (m_button->value() == 1)
+ c::io::startChannelMidiLearn(m_param, m_channelId);
+ else
+ c::io::stopMidiLearn();
+}
+
+
+void geMidiLearnerChannel::onReset()
+{
+ if (Fl::event_button() == FL_RIGHT_MOUSE)
+ c::io::clearChannelMidiLearn(m_param, m_channelId);
+}
+}} // giada::v::
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifndef GE_MIDI_LEARNER_CHANNEL_H
+#define GE_MIDI_LEARNER_CHANNEL_H
+
+
+#include "midiLearnerBase.h"
+
+
+namespace giada {
+namespace v
+{
+class geMidiLearnerChannel : public geMidiLearnerBase
+{
+public:
+
+ geMidiLearnerChannel(int x, int y, int w, std::string l, int param, uint32_t value, ID channelId);
+
+ void refresh() override;
+ void onLearn() override;
+ void onReset() override;
+
+private:
+
+ ID m_channelId;
+};
+}} // giada::v::
+
+
+#endif
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#include <FL/Fl.H>
+#include "core/model/model.h"
+#include "glue/io.h"
+#include "gui/elems/basics/button.h"
+#include "midiLearnerMaster.h"
+
+
+namespace giada {
+namespace v
+{
+geMidiLearnerMaster::geMidiLearnerMaster(int X, int Y, int W, std::string l, int param, uint32_t value)
+: geMidiLearnerBase(X, Y, W, l, param, value)
+{
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerMaster::refresh()
+{
+ m::model::onGet(m::model::midiIn, [&](const m::model::MidiIn& m)
+ {
+ switch (m_param) {
+ case G_MIDI_IN_REWIND : update(m.rewind); break;
+ case G_MIDI_IN_START_STOP : update(m.startStop); break;
+ case G_MIDI_IN_ACTION_REC : update(m.actionRec); break;
+ case G_MIDI_IN_INPUT_REC : update(m.inputRec); break;
+ case G_MIDI_IN_METRONOME : update(m.volumeIn); break;
+ case G_MIDI_IN_VOLUME_IN : update(m.volumeOut); break;
+ case G_MIDI_IN_VOLUME_OUT : update(m.beatDouble); break;
+ case G_MIDI_IN_BEAT_DOUBLE : update(m.beatHalf); break;
+ case G_MIDI_IN_BEAT_HALF : update(m.metronome); break;
+ }
+ });
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerMaster::onLearn()
+{
+ if (m_button->value() == 1)
+ c::io::startMasterMidiLearn(m_param);
+ else
+ c::io::stopMidiLearn();
+}
+
+
+void geMidiLearnerMaster::onReset()
+{
+ if (Fl::event_button() == FL_RIGHT_MOUSE)
+ c::io::clearMasterMidiLearn(m_param);
+}
+}} // giada::v::
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifndef GE_MIDI_LEARNER_MASTER_H
+#define GE_MIDI_LEARNER_MASTER_H
+
+
+#include "midiLearnerBase.h"
+
+
+namespace giada {
+namespace v
+{
+class geMidiLearnerMaster : public geMidiLearnerBase
+{
+public:
+
+ geMidiLearnerMaster(int x, int y, int w, std::string l, int param, uint32_t value);
+
+ void refresh() override;
+ void onLearn() override;
+ void onReset() override;
+};
+}} // giada::v::
+
+
+#endif
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifdef WITH_VST
+
+
+#include <FL/Fl.H>
+#include "core/model/model.h"
+#include "glue/io.h"
+#include "gui/elems/basics/button.h"
+#include "midiLearnerPlugin.h"
+
+
+namespace giada {
+namespace v
+{
+geMidiLearnerPlugin::geMidiLearnerPlugin(int x, int y, int w, std::string l, int param, uint32_t value, ID pluginId)
+: geMidiLearnerBase(x, y, w, l, param, value),
+ m_pluginId (pluginId)
+{
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerPlugin::refresh()
+{
+ m::model::onGet(m::model::plugins, m_pluginId, [&](const m::Plugin& p)
+ {
+ assert(static_cast<size_t>(m_param) < p.midiInParams.size());
+ update(p.midiInParams[m_param]);
+ });
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void geMidiLearnerPlugin::onLearn()
+{
+ if (m_button->value() == 1)
+ c::io::startPluginMidiLearn(m_param, m_pluginId);
+ else
+ c::io::stopMidiLearn();
+}
+
+
+void geMidiLearnerPlugin::onReset()
+{
+ if (Fl::event_button() == FL_RIGHT_MOUSE)
+ c::io::clearPluginMidiLearn(m_param, m_pluginId);
+}
+}} // giada::v::
+
+
+#endif
\ No newline at end of file
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifdef WITH_VST
+
+
+#ifndef GE_MIDI_LEARNER_PLUGIN_H
+#define GE_MIDI_LEARNER_PLUGIN_H
+
+
+#include "core/types.h"
+#include "midiLearnerBase.h"
+
+
+namespace giada {
+namespace v
+{
+class geMidiLearnerPlugin : public geMidiLearnerBase
+{
+public:
+
+ geMidiLearnerPlugin(int x, int y, int w, std::string l, int param, uint32_t value, ID pluginId);
+
+ void refresh() override;
+ void onLearn() override;
+ void onReset() override;
+
+private:
+
+ ID m_pluginId;
+};
+}} // giada::v::
+
+
+#endif
+#endif
+++ /dev/null
-/* -----------------------------------------------------------------------------
- *
- * Giada - Your Hardcore Loopmachine
- *
- * -----------------------------------------------------------------------------
- *
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
- *
- * This file is part of Giada - Your Hardcore Loopmachine.
- *
- * Giada - Your Hardcore Loopmachine is free software: you can
- * redistribute it and/or modify it under the terms of the GNU General
- * Public License as published by the Free Software Foundation, either
- * version 3 of the License, or (at your option) any later version.
- *
- * Giada - Your Hardcore Loopmachine is distributed in the hope that it
- * will be useful, but WITHOUT ANY WARRANTY; without even the implied
- * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- * See the GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Giada - Your Hardcore Loopmachine. If not, see
- * <http://www.gnu.org/licenses/>.
- *
- * -------------------------------------------------------------------------- */
-
-
-#include "utils/string.h"
-#include "core/midiDispatcher.h"
-#include "core/midiEvent.h"
-#include "glue/io.h"
-#include "gui/dialogs/midiIO/midiInputBase.h"
-#include "basics/boxtypes.h"
-#include "basics/button.h"
-#include "basics/box.h"
-#include "midiLearner.h"
-
-
-namespace giada {
-namespace v
-{
-geMidiLearner::geMidiLearner(int X, int Y, int W, const char* l,
- std::atomic<uint32_t>& param, ID channelId)
-: Fl_Group (X, Y, W, 20),
- m_channelId(channelId),
- m_param (param)
-{
- begin();
- m_text = new geBox(x(), y(), 156, 20, l);
- m_value = new geButton(m_text->x()+m_text->w()+4, y(), 80, 20);
- m_button = new geButton(m_value->x()+m_value->w()+4, y(), 40, 20, "learn");
- end();
-
- m_text->box(G_CUSTOM_BORDER_BOX);
- m_text->align(FL_ALIGN_LEFT | FL_ALIGN_INSIDE);
-
- m_value->box(G_CUSTOM_BORDER_BOX);
- m_value->callback(cb_value, (void*)this);
- m_value->when(FL_WHEN_RELEASE);
-
- m_button->type(FL_TOGGLE_BUTTON);
- m_button->callback(cb_button, (void*)this);
-
- refresh();
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geMidiLearner::refresh()
-{
- std::string tmp = "(not set)";
-
- if (m_param != 0x0) {
- tmp = "0x" + u::string::iToString(m_param.load(), true); // true: hex mode
- tmp.pop_back(); // Remove last two digits, useless in MIDI messages
- tmp.pop_back(); // Remove last two digits, useless in MIDI messages
- }
-
- m_value->copy_label(tmp.c_str());
- m_button->value(0);
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geMidiLearner::cb_button(Fl_Widget* v, void* p) { ((geMidiLearner*)p)->cb_button(); }
-void geMidiLearner::cb_value(Fl_Widget* v, void* p) { ((geMidiLearner*)p)->cb_value(); }
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geMidiLearner::cb_value()
-{
- if (Fl::event_button() == FL_RIGHT_MOUSE)
- c::io::midiLearn(m::MidiEvent(), m_param, m_channelId); // Empty event (0x0)
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-void geMidiLearner::cb_button()
-{
- if (m_button->value() == 1)
- m::midiDispatcher::startMidiLearn([this](m::MidiEvent e)
- {
- c::io::midiLearn(e, m_param, m_channelId);
- });
- else
- m::midiDispatcher::stopMidiLearn();
-}
-}} // giada::v::
+++ /dev/null
-/* -----------------------------------------------------------------------------
- *
- * Giada - Your Hardcore Loopmachine
- *
- * -----------------------------------------------------------------------------
- *
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
- *
- * This file is part of Giada - Your Hardcore Loopmachine.
- *
- * Giada - Your Hardcore Loopmachine is free software: you can
- * redistribute it and/or modify it under the terms of the GNU General
- * Public License as published by the Free Software Foundation, either
- * version 3 of the License, or (at your option) any later version.
- *
- * Giada - Your Hardcore Loopmachine is distributed in the hope that it
- * will be useful, but WITHOUT ANY WARRANTY; without even the implied
- * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- * See the GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Giada - Your Hardcore Loopmachine. If not, see
- * <http://www.gnu.org/licenses/>.
- *
- * -------------------------------------------------------------------------- */
-
-
-#ifndef GE_MIDI_LEARNER_H
-#define GE_MIDI_LEARNER_H
-
-
-#include <atomic>
-#include <FL/Fl_Group.H>
-#include "core/types.h"
-
-
-class geBox;
-class geButton;
-
-
-namespace giada {
-namespace m
-{
-class Channel;
-}
-namespace v
-{
-class geMidiLearner : public Fl_Group
-{
-public:
-
- geMidiLearner(int x, int y, int w, const char* l, std::atomic<uint32_t>& param,
- ID channelId);
-
- void refresh();
-
-private:
-
- static void cb_button(Fl_Widget* v, void* p);
- static void cb_value (Fl_Widget* v, void* p);
- void cb_button();
- void cb_value();
-
- /* m_channelId
- Channel it belongs to. Might be 0 if the learner comes from the MIDI input
- master window. */
-
- ID m_channelId;
-
- /* m_param
- Reference to ch->midiIn[value]. */
-
- std::atomic<uint32_t>& m_param;
-
- geBox* m_text;
- geButton* m_value;
- geButton* m_button;
-};
-}} // giada::v::
-
-
-#endif
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
{
m_data.size = w;
- m_grid.snap = m::conf::sampleEditorGridOn;
- m_grid.level = m::conf::sampleEditorGridVal;
+ m_grid.snap = m::conf::conf.sampleEditorGridOn;
+ m_grid.level = m::conf::conf.sampleEditorGridVal;
}
float tp;
m::model::onGet(m::model::channels, m_channelId, [&](m::Channel& c)
{
- tp = static_cast<m::SampleChannel&>(c).trackerPreview.load();
+ tp = static_cast<m::SampleChannel&>(c).trackerPreview;
});
int p = frameToPixel(tp) + x();
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#include "core/patch.h"
+#include "utils/log.h"
+#include "gui/dialogs/mainWindow.h"
+#include "gui/elems/mainWindow/keyboard/keyboard.h"
+#include "gui/elems/mainWindow/keyboard/column.h"
+#include "gui/elems/mainWindow/keyboard/channel.h"
+#include "model.h"
+
+
+extern giada::v::gdMainWindow* G_MainWin;
+
+
+namespace giada {
+namespace v {
+namespace model
+{
+void store(m::patch::Patch& patch)
+{
+ G_MainWin->keyboard->forEachColumn([&](const geColumn& c)
+ {
+ patch.columns.push_back({ c.id, c.w() });
+ });
+}
+
+
+/* -------------------------------------------------------------------------- */
+
+
+void load(const m::patch::Patch& patch)
+{
+ G_MainWin->keyboard->layout.clear();
+ for (const m::patch::Column& col : patch.columns)
+ G_MainWin->keyboard->layout.push_back({ col.id, col.width });
+}
+}}} // giada::v::model
--- /dev/null
+/* -----------------------------------------------------------------------------
+ *
+ * Giada - Your Hardcore Loopmachine
+ *
+ * -----------------------------------------------------------------------------
+ *
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
+ *
+ * This file is part of Giada - Your Hardcore Loopmachine.
+ *
+ * Giada - Your Hardcore Loopmachine is free software: you can
+ * redistribute it and/or modify it under the terms of the GNU General
+ * Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * Giada - Your Hardcore Loopmachine is distributed in the hope that it
+ * will be useful, but WITHOUT ANY WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Giada - Your Hardcore Loopmachine. If not, see
+ * <http://www.gnu.org/licenses/>.
+ *
+ * -------------------------------------------------------------------------- */
+
+
+#ifndef G_V_MODEL_H
+#define G_V_MODEL_H
+
+
+namespace giada {
+namespace m { namespace patch
+{
+struct Patch;
+}}
+namespace v {
+namespace model
+{
+void store(m::patch::Patch& patch);
+void load(const m::patch::Patch& patch);
+}}} // giada::v::model
+
+
+#endif
\ No newline at end of file
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
return out;
}
+
+/* -------------------------------------------------------------------------- */
+
+
+int centerWindowX(int w)
+{
+ return (Fl::w() / 2) - (w / 2);
+}
+
+
+int centerWindowY(int h)
+{
+ return (Fl::h() / 2) - (h / 2);
+}
}}} // giada::u::gui::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
int getStringWidth(const std::string& s);
+int centerWindowX(int w);
+int centerWindowY(int h);
+
}}} // giada::u::gui::
+++ /dev/null
-/* -----------------------------------------------------------------------------
- *
- * Giada - Your Hardcore Loopmachine
- *
- * -----------------------------------------------------------------------------
- *
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
- *
- * This file is part of Giada - Your Hardcore Loopmachine.
- *
- * Giada - Your Hardcore Loopmachine is free software: you can
- * redistribute it and/or modify it under the terms of the GNU General
- * Public License as published by the Free Software Foundation, either
- * version 3 of the License, or (at your option) any later version.
- *
- * Giada - Your Hardcore Loopmachine is distributed in the hope that it
- * will be useful, but WITHOUT ANY WARRANTY; without even the implied
- * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- * See the GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Giada - Your Hardcore Loopmachine. If not, see
- * <http://www.gnu.org/licenses/>.
- *
- * -------------------------------------------------------------------------- */
-
-
-#include <jansson.h>
-#include "utils/log.h"
-#include "json.h"
-
-
-namespace giada {
-namespace u {
-namespace json
-{
-namespace
-{
-/* jsonIs, jsonGet
-Tiny wrappers around the old C-style macros provided by Jansson. This way we can
-pass them as template parameters. */
-
-bool jsonIsString_(json_t* j) { return json_is_string(j); }
-bool jsonIsInt_ (json_t* j) { return json_is_integer(j); }
-bool jsonIsFloat_ (json_t* j) { return json_is_real(j); }
-bool jsonIsBool_ (json_t* j) { return json_is_boolean(j); }
-bool jsonIsArray_ (json_t* j) { return json_is_array(j); }
-bool jsonIsObject_(json_t* j) { return json_is_object(j); }
-
-std::string jsonGetString_(json_t* j) { return json_string_value(j); }
-uint32_t jsonGetInt_ (json_t* j) { return json_integer_value(j); }
-float jsonGetFloat_ (json_t* j) { return json_real_value(j); }
-bool jsonGetBool_ (json_t* j) { return json_boolean_value(j); }
-
-
-/* -------------------------------------------------------------------------- */
-
-
-template<typename F>
-bool is_(json_t* j, F f)
-{
- if (!f(j)) {
- u::log::print("[patch::is_] malformed json!\n");
- json_decref(j);
- return false;
- }
- return true;
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-template<typename O, typename FC, typename FG>
-O read_(json_t* j, const char* key, FC checker, FG getter, O def)
-{
- json_t* jo = json_object_get(j, key);
- if (jo == nullptr) {
- u::log::print("[patch::read_] key '%s' not found, using default value\n", key);
- return def;
- }
- if (!checker(jo)) {
- u::log::print("[patch::read_] key '%s' is of the wrong type, using default value\n", key);
- return def;
- }
- return getter(jo);
-}
-} // {anonymous}
-
-
-/* -------------------------------------------------------------------------- */
-/* -------------------------------------------------------------------------- */
-/* -------------------------------------------------------------------------- */
-
-
-std::string readString(json_t* j, const char* key, const std::string& def)
-{
- return read_(j, key, jsonIsString_, jsonGetString_, def);
-}
-
-uint32_t readInt(json_t* j, const char* key, uint32_t def)
-{
- return read_(j, key, jsonIsInt_, jsonGetInt_, def);
-}
-
-float readFloat(json_t* j, const char* key, float def)
-{
- return read_(j, key, jsonIsFloat_, jsonGetFloat_, def);
-}
-
-bool readBool(json_t* j, const char* key, bool def)
-{
- return read_(j, key, jsonIsBool_, jsonGetBool_, def);
-}
-
-
-/* -------------------------------------------------------------------------- */
-
-
-bool isArray(json_t* j) { return is_(j, jsonIsArray_); };
-bool isObject(json_t* j) { return is_(j, jsonIsObject_); };
-
-
-/* -------------------------------------------------------------------------- */
-
-
-json_t* load(const std::string& file)
-{
- json_error_t jerr;
- json_t* j = json_load_file(file.c_str(), 0, &jerr);
- if (j == nullptr)
- u::log::print("[u::json::load] unable to read json file! Error on line %d: %s\n",
- jerr.line, jerr.text);
- return j;
-}
-
-}}} // giada::u::json::
+++ /dev/null
-/* -----------------------------------------------------------------------------
- *
- * Giada - Your Hardcore Loopmachine
- *
- * -----------------------------------------------------------------------------
- *
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
- *
- * This file is part of Giada - Your Hardcore Loopmachine.
- *
- * Giada - Your Hardcore Loopmachine is free software: you can
- * redistribute it and/or modify it under the terms of the GNU General
- * Public License as published by the Free Software Foundation, either
- * version 3 of the License, or (at your option) any later version.
- *
- * Giada - Your Hardcore Loopmachine is distributed in the hope that it
- * will be useful, but WITHOUT ANY WARRANTY; without even the implied
- * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- * See the GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Giada - Your Hardcore Loopmachine. If not, see
- * <http://www.gnu.org/licenses/>.
- *
- * -------------------------------------------------------------------------- */
-
-
-#ifndef G_UTILS_JSON_H
-#define G_UTILS_JSON_H
-
-
-#include <string>
-#include <jansson.h>
-
-
-namespace giada {
-namespace u {
-namespace json
-{
-std::string readString(json_t* j, const char* key, const std::string& def="");
-uint32_t readInt(json_t* j, const char* key, uint32_t def=0);
-float readFloat(json_t* j, const char* key, float def=0.0f);
-bool readBool(json_t* j, const char* key, bool def=false);
-
-bool isArray(json_t* j);
-bool isObject(json_t* j);
-
-json_t* load(const std::string& file);
-}}} // giada::u::json::
-
-
-#endif
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
* -------------------------------------------------------------------------- */
-#include "../core/const.h"
+#include "core/const.h"
#ifdef G_OS_MAC
#include <RtMidi.h>
#else
#include <rtmidi/RtMidi.h>
#endif
#include <sndfile.h>
-#include "../deps/rtaudio-mod/RtAudio.h"
+#include "deps/rtaudio/RtAudio.h"
#include "ver.h"
-using std::string;
-
-
namespace giada {
-namespace u {
+namespace u {
namespace ver
{
-string getLibsndfileVersion()
+std::string getLibsndfileVersion()
{
- char buffer[128];
- sf_command(NULL, SFC_GET_LIB_VERSION, buffer, sizeof(buffer));
- return string(buffer);
+ char buffer[128];
+ sf_command(nullptr, SFC_GET_LIB_VERSION, buffer, sizeof(buffer));
+ return std::string(buffer);
}
/* -------------------------------------------------------------------------- */
-string getRtAudioVersion()
+std::string getRtAudioVersion()
{
#ifdef TESTS
- return "";
+ return "";
#else
- return RtAudio::getVersion();
+ return RtAudio::getVersion();
#endif
}
/* -------------------------------------------------------------------------- */
-string getRtMidiVersion()
+std::string getRtMidiVersion()
{
#ifdef TESTS
- return "";
+ return "";
#else
- return RtMidi::getVersion();
+ return RtMidi::getVersion();
#endif
}
}}}; // giada::u::ver::
*
* -----------------------------------------------------------------------------
*
- * Copyright (C) 2010-2019 Giovanni A. Zuliani | Monocasual
+ * Copyright (C) 2010-2020 Giovanni A. Zuliani | Monocasual
*
* This file is part of Giada - Your Hardcore Loopmachine.
*
+++ /dev/null
-#include "../src/core/const.h"
-#include "../src/core/conf.h"
-#include <catch.hpp>
-
-
-using std::string;
-using namespace giada::m;
-
-
-TEST_CASE("conf")
-{
- conf::init();
-
- SECTION("test write")
- {
- conf::header = "GIADACONFTEST";
- conf::logMode = 1;
- conf::soundSystem = 2;
- conf::soundDeviceOut = 3;
- conf::soundDeviceIn = 4;
- conf::channelsOut = 5;
- conf::channelsIn = 6;
- conf::samplerate = 7;
- conf::buffersize = 8;
- conf::limitOutput = true;
- conf::rsmpQuality = 10;
- conf::midiSystem = 11;
- conf::midiPortOut = 12;
- conf::midiPortIn = 13;
- conf::midiMapPath = "path/to/midi/map";
- conf::lastFileMap = "path/to/last/midi/map";
- conf::midiSync = 14;
- conf::midiTCfps = 15.1f;
- conf::midiInRewind = 16;
- conf::midiInStartStop = 17;
- conf::midiInActionRec = 18;
- conf::midiInInputRec = 19;
- conf::midiInMetronome = 20;
- conf::midiInVolumeIn = 21;
- conf::midiInVolumeOut = 22;
- conf::midiInBeatDouble = 23;
- conf::midiInBeatHalf = 24;
- conf::recsStopOnChanHalt = true;
- conf::chansStopOnSeqHalt = false;
- conf::treatRecsAsLoops = true;
- conf::pluginPath = "path/to/plugins";
- conf::patchPath = "path/to/patches";
- conf::samplePath = "path/to/samples";
- conf::mainWindowX = 0;
- conf::mainWindowY = 0;
- conf::mainWindowW = 800;
- conf::mainWindowH = 600;
- conf::browserX = 0;
- conf::browserY = 0;
- conf::browserW = 800;
- conf::browserH = 600;
- conf::actionEditorX = 0;
- conf::actionEditorY = 0;
- conf::actionEditorW = 800;
- conf::actionEditorH = 600;
- conf::actionEditorZoom = 1;
- conf::actionEditorGridVal = 10;
- conf::actionEditorGridOn = 1;
- conf::sampleEditorX = 0;
- conf::sampleEditorY = 0;
- conf::sampleEditorW = 800;
- conf::sampleEditorH = 600;
- conf::sampleEditorGridVal = 4;
- conf::sampleEditorGridOn = 0;
- conf::pianoRollY = 0;
- conf::pianoRollH = 900;
- conf::pluginListX = 0;
- conf::pluginListY = 50;
- conf::configX = 20;
- conf::configY = 20;
- conf::bpmX = 30;
- conf::bpmY = 36;
- conf::beatsX = 1;
- conf::beatsY = 1;
- conf::aboutX = 2;
- conf::aboutY = 2;
-
- REQUIRE(conf::write() == 1);
- }
-
- SECTION("test read")
- {
- REQUIRE(conf::read() == 1);
- REQUIRE(conf::header == "GIADACONFTEST");
- REQUIRE(conf::logMode == 1);
- REQUIRE(conf::soundSystem == 2);
- REQUIRE(conf::soundDeviceOut == 3);
- REQUIRE(conf::soundDeviceIn == 4);
- REQUIRE(conf::channelsOut == 5);
- REQUIRE(conf::channelsIn == 6);
- REQUIRE(conf::samplerate == 44100); // sanitized
- REQUIRE(conf::buffersize == 8);
- REQUIRE(conf::limitOutput == true);
- REQUIRE(conf::rsmpQuality == 0); // sanitized
- REQUIRE(conf::midiSystem == 11);
- REQUIRE(conf::midiPortOut == 12);
- REQUIRE(conf::midiPortIn == 13);
- REQUIRE(conf::midiMapPath == "path/to/midi/map");
- REQUIRE(conf::lastFileMap == "path/to/last/midi/map");
- REQUIRE(conf::midiSync == 14);
- REQUIRE(conf::midiTCfps == Approx(15.1));
- REQUIRE(conf::midiInRewind == 16);
- REQUIRE(conf::midiInStartStop == 17);
- REQUIRE(conf::midiInActionRec == 18);
- REQUIRE(conf::midiInInputRec == 19);
- REQUIRE(conf::midiInMetronome == 20);
- REQUIRE(conf::midiInVolumeIn == 21);
- REQUIRE(conf::midiInVolumeOut == 22);
- REQUIRE(conf::midiInBeatDouble == 23);
- REQUIRE(conf::midiInBeatHalf == 24);
- REQUIRE(conf::recsStopOnChanHalt == true);
- REQUIRE(conf::chansStopOnSeqHalt == false);
- REQUIRE(conf::treatRecsAsLoops == true);
- REQUIRE(conf::pluginPath == "path/to/plugins");
- REQUIRE(conf::patchPath == "path/to/patches");
- REQUIRE(conf::samplePath == "path/to/samples");
- REQUIRE(conf::mainWindowX == 0);
- REQUIRE(conf::mainWindowY == 0);
- REQUIRE(conf::mainWindowW == 800);
- REQUIRE(conf::mainWindowH == 600);
- REQUIRE(conf::browserX == 0);
- REQUIRE(conf::browserY == 0);
- REQUIRE(conf::browserW == 800);
- REQUIRE(conf::browserH == 600);
- REQUIRE(conf::actionEditorX == 0);
- REQUIRE(conf::actionEditorY == 0);
- REQUIRE(conf::actionEditorW == 800);
- REQUIRE(conf::actionEditorH == 600);
- REQUIRE(conf::actionEditorZoom == 100); // sanitized
- REQUIRE(conf::actionEditorGridVal == 10);
- REQUIRE(conf::actionEditorGridOn == 1);
- REQUIRE(conf::sampleEditorX == 0);
- REQUIRE(conf::sampleEditorY == 0);
- REQUIRE(conf::sampleEditorW == 800);
- REQUIRE(conf::sampleEditorH == 600);
- REQUIRE(conf::sampleEditorGridVal == 4);
- REQUIRE(conf::sampleEditorGridOn == 0);
- REQUIRE(conf::pianoRollY == 0);
- REQUIRE(conf::pianoRollH == 900);
- REQUIRE(conf::pluginListX == 0);
- REQUIRE(conf::pluginListY == 50);
- REQUIRE(conf::configX == 20);
- REQUIRE(conf::configY == 20);
- REQUIRE(conf::bpmX == 30);
- REQUIRE(conf::bpmY == 36);
- REQUIRE(conf::beatsX == 1);
- REQUIRE(conf::beatsY == 1);
- REQUIRE(conf::aboutX == 2);
- REQUIRE(conf::aboutY == 2);
- }
-}
+++ /dev/null
-#include "../src/core/const.h"
-#include "../src/core/midiMapConf.h"
-#include <catch.hpp>
-
-
-using std::string;
-using namespace giada::m;
-
-
-TEST_CASE("midiMapConf")
-{
- SECTION("test default values")
- {
- midimap::setDefault();
- REQUIRE(midimap::brand == "");
- REQUIRE(midimap::device == "");
- REQUIRE(midimap::muteOn.channel == 0);
- REQUIRE(midimap::muteOn.valueStr == "");
- REQUIRE(midimap::muteOn.offset == -1);
- REQUIRE(midimap::muteOn.value == 0);
- REQUIRE(midimap::muteOff.channel == 0);
- REQUIRE(midimap::muteOff.valueStr == "");
- REQUIRE(midimap::muteOff.offset == -1);
- REQUIRE(midimap::muteOff.value == 0);
- REQUIRE(midimap::soloOn.channel == 0);
- REQUIRE(midimap::soloOn.valueStr == "");
- REQUIRE(midimap::soloOn.offset == -1);
- REQUIRE(midimap::soloOn.value == 0);
- REQUIRE(midimap::soloOff.channel == 0);
- REQUIRE(midimap::soloOff.valueStr == "");
- REQUIRE(midimap::soloOff.offset == -1);
- REQUIRE(midimap::soloOff.value == 0);
- REQUIRE(midimap::waiting.channel == 0);
- REQUIRE(midimap::waiting.valueStr == "");
- REQUIRE(midimap::waiting.offset == -1);
- REQUIRE(midimap::waiting.value == 0);
- REQUIRE(midimap::playing.channel == 0);
- REQUIRE(midimap::playing.valueStr == "");
- REQUIRE(midimap::playing.offset == -1);
- REQUIRE(midimap::playing.value == 0);
- REQUIRE(midimap::stopping.channel == 0);
- REQUIRE(midimap::stopping.valueStr == "");
- REQUIRE(midimap::stopping.offset == -1);
- REQUIRE(midimap::stopping.value == 0);
- REQUIRE(midimap::stopped.channel == 0);
- REQUIRE(midimap::stopped.valueStr == "");
- REQUIRE(midimap::stopped.offset == -1);
- REQUIRE(midimap::stopped.value == 0);
- }
-
-#ifdef RUN_TESTS_WITH_LOCAL_FILES
-
- SECTION("test read")
- {
- midimap::init();
- midimap::setDefault();
-
- /* expect more than 2 midifiles */
-
- REQUIRE(midimap::maps.size() >= 2);
-
- /* try with deprecated mode */
-
- int res = midimap::read("akai-lpd8.giadamap");
- if (res != MIDIMAP_READ_OK)
- res = midimap::readMap_DEPR_("akai-lpd8.giadamap");
-
- REQUIRE(res == MIDIMAP_READ_OK);
-
- REQUIRE(midimap::brand == "AKAI");
- REQUIRE(midimap::device == "LPD8");
-
- REQUIRE(midimap::initCommands.size() == 2);
- REQUIRE(midimap::initCommands[0].channel == 0);
- REQUIRE(midimap::initCommands[0].value == 0xB0000000);
- REQUIRE(midimap::initCommands[1].channel == 0);
- REQUIRE(midimap::initCommands[1].value == 0xB0002800);
-
- /* TODO - can't check 'valueStr' until deprecated methods are alive */
-
- REQUIRE(midimap::muteOn.channel == 0);
- //REQUIRE(midimap::muteOn.valueStr == "90nn3F00");
- REQUIRE(midimap::muteOn.offset == 16);
- REQUIRE(midimap::muteOn.value == 0x90003F00);
-
- REQUIRE(midimap::muteOff.channel == 0);
- //REQUIRE(midimap::muteOff.valueStr == "90nn0C00");
- REQUIRE(midimap::muteOff.offset == 16);
- REQUIRE(midimap::muteOff.value == 0x90000C00);
-
- REQUIRE(midimap::soloOn.channel == 0);
- //REQUIRE(midimap::soloOn.valueStr == "90nn0F00");
- REQUIRE(midimap::soloOn.offset == 16);
- REQUIRE(midimap::soloOn.value == 0x90000F00);
-
- REQUIRE(midimap::soloOff.channel == 0);
- //REQUIRE(midimap::soloOff.valueStr == "90nn0C00");
- REQUIRE(midimap::soloOff.offset == 16);
- REQUIRE(midimap::soloOff.value == 0x90000C00);
-
- REQUIRE(midimap::waiting.channel == 0);
- //REQUIRE(midimap::waiting.valueStr == "90nn7f00");
- REQUIRE(midimap::waiting.offset == 16);
- REQUIRE(midimap::waiting.value == 0x90007f00);
-
- REQUIRE(midimap::playing.channel == 0);
- //REQUIRE(midimap::playing.valueStr == "90nn7f00");
- REQUIRE(midimap::playing.offset == 16);
- REQUIRE(midimap::playing.value == 0x90007f00);
-
- REQUIRE(midimap::stopping.channel == 0);
- //REQUIRE(midimap::stopping.valueStr == "90nn7f00");
- REQUIRE(midimap::stopping.offset == 16);
- REQUIRE(midimap::stopping.value == 0x90007f00);
-
- REQUIRE(midimap::stopped.channel == 0);
- //REQUIRE(midimap::stopped.valueStr == "80nn7f00");
- REQUIRE(midimap::stopped.offset == 16);
- REQUIRE(midimap::stopped.value == 0x80007f00);
- }
-
-#endif // #ifdef RUN_TESTS_WITH_LOCAL_FILES
-}
+++ /dev/null
-#include "../src/core/patch.h"
-#include "../src/core/const.h"
-#include "../src/core/types.h"
-#include <catch.hpp>
-
-
-using std::string;
-using std::vector;
-using namespace giada;
-using namespace giada::m;
-
-
-TEST_CASE("patch")
-{
-#if 0
- string filename = "./test-patch.json";
-
- SECTION("test write")
- {
- patch::action_t action0;
- patch::action_t action1;
- patch::channel_t channel1;
- patch::column_t column;
-#ifdef WITH_VST
- patch::plugin_t plugin1;
- patch::plugin_t plugin2;
- patch::plugin_t plugin3;
-#endif
-
- action0.id = 0;
- action0.channel = 6;
- action0.frame = 4000;
- action0.event = 0xFF00FF00;
- action0.prev = -1;
- action0.next = -1;
- action1.id = 1;
- action1.channel = 2;
- action1.frame = 8000;
- action1.event = 0x00000000;
- action1.prev = -1;
- action1.next = -1;
- channel1.actions.push_back(action0);
- channel1.actions.push_back(action1);
-
-#ifdef WITH_VST
- plugin1.path = "/path/to/plugin1";
- plugin1.bypass = false;
- plugin1.params.push_back(0.0f);
- plugin1.params.push_back(0.1f);
- plugin1.params.push_back(0.2f);
- channel1.plugins.push_back(plugin1);
-
- plugin2.path = "/another/path/to/plugin2";
- plugin2.bypass = true;
- plugin2.params.push_back(0.6f);
- plugin2.params.push_back(0.6f);
- plugin2.params.push_back(0.6f);
- plugin2.params.push_back(0.0f);
- plugin2.params.push_back(1.0f);
- plugin2.params.push_back(1.0f);
- plugin2.params.push_back(0.333f);
- channel1.plugins.push_back(plugin2);
-#endif
-
- channel1.type = static_cast<int>(ChannelType::SAMPLE);
- channel1.index = 666;
- channel1.size = G_GUI_CHANNEL_H_1;
- channel1.column = 0;
- channel1.mute = 0;
- channel1.solo = 0;
- channel1.volume = 1.0f;
- channel1.pan = 0.5f;
- channel1.midiIn = true;
- channel1.midiInKeyPress = UINT32_MAX; // check maximum value
- channel1.midiInKeyRel = 1;
- channel1.midiInKill = 2;
- channel1.midiInArm = 11;
- channel1.midiInVolume = 3;
- channel1.midiInMute = 4;
- channel1.midiInSolo = 5;
- channel1.midiOutL = true;
- channel1.midiOutLplaying = 7;
- channel1.midiOutLmute = 8;
- channel1.midiOutLsolo = 9;
- channel1.samplePath = "/tmp/test.wav";
- channel1.key = 666;
- channel1.mode = 0;
- channel1.begin = 0;
- channel1.end = 0;
- channel1.boost = 0;
- channel1.readActions = 0;
- channel1.pitch = 1.2f;
- channel1.midiInReadActions = 0;
- channel1.midiInPitch = 0;
- channel1.midiOut = 0;
- channel1.midiOutChan = 5;
- patch::channels.push_back(channel1);
-
- column.index = 0;
- column.width = 500;
- patch::columns.push_back(column);
-
- patch::header = "GPTCH";
- patch::version = "1.0";
- patch::versionMajor = 6;
- patch::versionMinor = 6;
- patch::versionPatch = 6;
- patch::name = "test patch";
- patch::bpm = 100.0f;
- patch::bars = 4;
- patch::beats = 23;
- patch::quantize = 1;
- patch::masterVolIn = 1.0f;
- patch::masterVolOut = 0.7f;
- patch::metronome = 0;
- patch::lastTakeId = 0;
- patch::samplerate = 44100;
-
-#ifdef WITH_VST
-
- patch::masterInPlugins.push_back(plugin1);
- patch::masterOutPlugins.push_back(plugin2);
-
-#endif
-
- REQUIRE(patch::write(filename) == 1);
- }
-
- SECTION("test read")
- {
- REQUIRE(patch::read(filename) == PATCH_READ_OK);
- REQUIRE(patch::header == "GPTCH");
- REQUIRE(patch::version == "1.0");
- REQUIRE(patch::versionMajor == 6);
- REQUIRE(patch::versionMinor == 6);
- REQUIRE(patch::versionPatch == 6);
- REQUIRE(patch::name == "test patch");
- REQUIRE(patch::bpm == Approx(100.0f));
- REQUIRE(patch::bars == 4);
- REQUIRE(patch::beats == 23);
- REQUIRE(patch::quantize == 1);
- REQUIRE(patch::masterVolIn == Approx(1.0f));
- REQUIRE(patch::masterVolOut == Approx(0.7f));
- REQUIRE(patch::metronome == 0);
- REQUIRE(patch::lastTakeId == 0);
- REQUIRE(patch::samplerate == 44100);
-
- patch::column_t column0 = patch::columns.at(0);
- REQUIRE(column0.index == 0);
- REQUIRE(column0.width == 500);
-
- patch::channel_t channel0 = patch::channels.at(0);
- REQUIRE(channel0.type == static_cast<int>(ChannelType::SAMPLE));
- REQUIRE(channel0.index == 666);
- REQUIRE(channel0.size == G_GUI_CHANNEL_H_1);
- REQUIRE(channel0.column == 0);
- REQUIRE(channel0.mute == 0);
- REQUIRE(channel0.solo == 0);
- REQUIRE(channel0.volume == Approx(1.0f));
- REQUIRE(channel0.pan == Approx(0.5f));
- REQUIRE(channel0.midiIn == true);
- REQUIRE(channel0.midiInKeyPress == UINT32_MAX);
- REQUIRE(channel0.midiInKeyRel == 1);
- REQUIRE(channel0.midiInKill == 2);
- REQUIRE(channel0.midiInArm == 11);
- REQUIRE(channel0.midiInVolume == 3);
- REQUIRE(channel0.midiInMute == 4);
- REQUIRE(channel0.midiInSolo == 5);
- REQUIRE(channel0.midiOutL == true);
- REQUIRE(channel0.midiOutLplaying == 7);
- REQUIRE(channel0.midiOutLmute == 8);
- REQUIRE(channel0.midiOutLsolo == 9);
- REQUIRE(channel0.samplePath == "/tmp/test.wav");
- REQUIRE(channel0.key == 666);
- REQUIRE(channel0.mode == 0);
- REQUIRE(channel0.begin == 0);
- REQUIRE(channel0.end == 0);
- REQUIRE(channel0.boost == 1.0f);
- REQUIRE(channel0.readActions == 0);
- REQUIRE(channel0.pitch == Approx(1.2f));
- REQUIRE(channel0.midiInReadActions == 0);
- REQUIRE(channel0.midiInPitch == 0);
- REQUIRE(channel0.midiOut == 0);
- REQUIRE(channel0.midiOutChan == 5);
-
- patch::action_t action0 = channel0.actions.at(0);
- REQUIRE(action0.id == 0);
- REQUIRE(action0.channel == 6);
- REQUIRE(action0.frame == 4000);
- REQUIRE(action0.event == 0xFF00FF00);
- REQUIRE(action0.prev == -1);
- REQUIRE(action0.next == -1);
-
- patch::action_t action1 = channel0.actions.at(1);
- REQUIRE(action1.id == 1);
- REQUIRE(action1.channel == 2);
- REQUIRE(action1.frame == 8000);
- REQUIRE(action1.event == 0x00000000);
- REQUIRE(action1.prev == -1);
- REQUIRE(action1.next == -1);
-
-#ifdef WITH_VST
- patch::plugin_t plugin0 = channel0.plugins.at(0);
- REQUIRE(plugin0.path == "/path/to/plugin1");
- REQUIRE(plugin0.bypass == false);
- REQUIRE(plugin0.params.at(0) == Approx(0.0f));
- REQUIRE(plugin0.params.at(1) == Approx(0.1f));
- REQUIRE(plugin0.params.at(2) == Approx(0.2f));
-
- patch::plugin_t plugin1 = channel0.plugins.at(1);
- REQUIRE(plugin1.path == "/another/path/to/plugin2");
- REQUIRE(plugin1.bypass == true);
- REQUIRE(plugin1.params.at(0) == Approx(0.6f));
- REQUIRE(plugin1.params.at(1) == Approx(0.6f));
- REQUIRE(plugin1.params.at(2) == Approx(0.6f));
- REQUIRE(plugin1.params.at(3) == Approx(0.0f));
- REQUIRE(plugin1.params.at(4) == Approx(1.0f));
- REQUIRE(plugin1.params.at(5) == Approx(1.0f));
- REQUIRE(plugin1.params.at(6) == Approx(0.333f));
-
- patch::plugin_t masterPlugin0 = patch::masterInPlugins.at(0);
- REQUIRE(masterPlugin0.path == "/path/to/plugin1");
- REQUIRE(masterPlugin0.bypass == false);
- REQUIRE(masterPlugin0.params.at(0) == Approx(0.0f));
- REQUIRE(masterPlugin0.params.at(1) == Approx(0.1f));
- REQUIRE(masterPlugin0.params.at(2) == Approx(0.2f));
-
- patch::plugin_t masterPlugin1 = patch::masterOutPlugins.at(0);
- REQUIRE(masterPlugin1.path == "/another/path/to/plugin2");
- REQUIRE(masterPlugin1.bypass == true);
- REQUIRE(masterPlugin1.params.at(0) == Approx(0.6f));
- REQUIRE(masterPlugin1.params.at(1) == Approx(0.6f));
- REQUIRE(masterPlugin1.params.at(2) == Approx(0.6f));
- REQUIRE(masterPlugin1.params.at(3) == Approx(0.0f));
- REQUIRE(masterPlugin1.params.at(4) == Approx(1.0f));
- REQUIRE(masterPlugin1.params.at(5) == Approx(1.0f));
- REQUIRE(masterPlugin1.params.at(6) == Approx(0.333f));
-#endif
- }
-#endif
-}
+++ /dev/null
-#ifdef WITH_VST
-#ifdef RUN_TESTS_WITH_LOCAL_FILES
-
-// temporarily disabled due to entangled deps (WIP)
-#if 0
-
-#include "../src/core/pluginHost.h"
-#include <catch.hpp>
-
-
-TEST_CASE("Test PluginHost class")
-{
- PluginHost ph;
- pthread_mutex_t mutex;
- pthread_mutex_init(&mutex, NULL);
-
- SECTION("test read & write")
- {
- REQUIRE(ph.countPlugins(PluginHost::MASTER_IN) == 0);
- REQUIRE(ph.scanDir(".") > 0);
- REQUIRE(ph.saveList("test-plugin-list.xml") == 1);
- REQUIRE(ph.loadList("test-plugin-list.xml") == 1);
- REQUIRE(ph.addPlugin(0, PluginHost::MASTER_IN, &mutex) != NULL);
- REQUIRE(ph.countPlugins(PluginHost::MASTER_IN) == 1);
-
- ph.freeStack(PluginHost::MASTER_IN, &mutex);
- REQUIRE(ph.countPlugins(PluginHost::MASTER_IN) == 0);
- }
-}
-
-#endif
-
-#endif
-#endif
+++ /dev/null
-#if 0
-#include "../src/core/channels/sampleChannel.h"
-#include "../src/core/channels/sampleChannelProc.h"
-#include <catch.hpp>
-
-
-using namespace giada;
-using namespace giada::m;
-
-
-TEST_CASE("sampleChannelProc")
-{
- const int BUFFER_SIZE = 1024;
-
- std::vector<ChannelMode> modes = { ChannelMode::LOOP_BASIC,
- ChannelMode::LOOP_ONCE, ChannelMode::LOOP_REPEAT,
- ChannelMode::LOOP_ONCE_BAR, ChannelMode::SINGLE_BASIC,
- ChannelMode::SINGLE_PRESS, ChannelMode::SINGLE_RETRIG,
- ChannelMode::SINGLE_ENDLESS };
-
- SampleChannel ch(false, BUFFER_SIZE, 1, 1);
-
- AudioBuffer out;
- AudioBuffer in;
- AudioBuffer inToOut;
-
- REQUIRE(ch.playStatus == ChannelStatus::EMPTY);
- REQUIRE(ch.mode == ChannelMode::SINGLE_BASIC);
-
- SECTION("buffer")
- {
- SECTION("prepare")
- {
- /* With no wave data in it. */
- sampleChannelProc::render(&ch, out, in, inToOut, /*audible=*/true,
- /*running=*/false);
- REQUIRE(ch.tracker == 0);
-
- /* With data, stopped. */
- ch.pushWave(1, 1024);
- sampleChannelProc::render(&ch, out, in, inToOut, /*audible=*/true,
- /*running=*/false);
-
- REQUIRE(ch.tracker == 0);
-
- /* With data, playing. */
- ch.playStatus = ChannelStatus::PLAY;
- sampleChannelProc::render(&ch, out, in, inToOut, /*audible=*/true,
- /*running=*/false);
-
- REQUIRE(ch.tracker == BUFFER_SIZE);
- }
-
- SECTION("fill")
- {
- ch.pushWave(1, 1024);
-
- /* Zero offset. */
- REQUIRE(ch.fillBuffer(ch.buffer, 0, 0) == BUFFER_SIZE);
-
- /* Non-zero offset. */
- REQUIRE(ch.fillBuffer(ch.buffer, 0, 666) == BUFFER_SIZE - 666);
-
- /* At the end of the waveform. */
- REQUIRE(ch.fillBuffer(ch.buffer, ch.end - 666, 0) == (ch.end - (ch.end - 666)) + 1);
- }
- }
-
- SECTION("statuses")
- {
- ch.pushWave(1, 1024);
-
- SECTION("start from OFF")
- {
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::OFF;
- sampleChannelProc::start(&ch, 0, /*doQuantize=*/false, /*velocity=*/0);
-
- if (ch.isAnyLoopMode())
- REQUIRE(ch.playStatus == ChannelStatus::WAIT);
- else
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- }
- }
-
- SECTION("start from PLAY")
- {
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::PLAY;
- ch.tracker = 16; // simulate processing
- sampleChannelProc::start(&ch, 0, /*doQuantize=*/false, /*velocity=*/0);
-
- if (ch.mode == ChannelMode::SINGLE_RETRIG) {
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- REQUIRE(ch.tracker == 0);
- }
- else
- if (ch.isAnyLoopMode() || ch.mode == ChannelMode::SINGLE_ENDLESS)
- REQUIRE(ch.playStatus == ChannelStatus::ENDING);
- else
- if (ch.mode == ChannelMode::SINGLE_BASIC) {
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- REQUIRE(ch.tracker == 0);
- }
- }
- }
-
- SECTION("start from WAIT")
- {
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::WAIT;
- sampleChannelProc::start(&ch, 0, /*doQuantize=*/false, /*velocity=*/0);
-
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- }
- }
-
- SECTION("start from ENDING")
- {
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::ENDING;
- sampleChannelProc::start(&ch, 0, /*doQuantize=*/false, /*velocity=*/0);
-
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- }
- }
-
- SECTION("stop from PLAY")
- {
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::PLAY;
- ch.tracker = 16; // simulate processing
- sampleChannelProc::stop(&ch);
-
- if (ch.mode == ChannelMode::SINGLE_PRESS) {
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- REQUIRE(ch.tracker == 0);
- }
- else {
- /* Nothing should change for other modes. */
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- REQUIRE(ch.tracker == 16);
- }
- }
- }
-
- SECTION("kill")
- {
- std::vector<ChannelStatus> statuses = { ChannelStatus::ENDING,
- ChannelStatus::WAIT, ChannelStatus::PLAY, ChannelStatus::OFF,
- ChannelStatus::EMPTY, ChannelStatus::MISSING, ChannelStatus::WRONG };
-
- for (ChannelMode mode : modes) {
- for (ChannelStatus status : statuses) {
- ch.mode = mode;
- ch.playStatus = status;
- ch.tracker = 16; // simulate processing
- sampleChannelProc::kill(&ch, 0);
-
- if (ch.playStatus == ChannelStatus::WAIT ||
- ch.playStatus == ChannelStatus::PLAY ||
- ch.playStatus == ChannelStatus::ENDING) {
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- REQUIRE(ch.tracker == 0);
- }
- }
- }
- }
-
- SECTION("quantized start")
- {
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::OFF;
- sampleChannelProc::start(&ch, 0, /*doQuantize=*/true, /*velocity=*/0);
-
- if (ch.isAnyLoopMode())
- REQUIRE(ch.playStatus == ChannelStatus::WAIT);
- else {
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- REQUIRE(ch.quantizing == true);
- }
- }
- }
- }
-
- SECTION("stop input recordings")
- {
- /* Start all sample channels in any loop mode that were armed. */
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::OFF;
- ch.armed = true;
- ch.tracker = 16;
-
- sampleChannelProc::stopInputRec(&ch, /*globalFrame=*/666);
-
- if (ch.isAnyLoopMode()) {
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- REQUIRE(ch.tracker == 666);
- }
- else {
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- REQUIRE(ch.tracker == 16);
- }
- }
- }
-
- SECTION("rewind by sequencer")
- {
- ch.pushWave(1, 1024);
-
- /* Test loop modes. */
-
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::PLAY;
- ch.tracker = 16; // simulate processing
-
- sampleChannelProc::rewindBySeq(&ch);
-
- if (ch.isAnyLoopMode()) {
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- REQUIRE(ch.tracker == 0);
- }
- else {
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- REQUIRE(ch.tracker == 16);
- }
- }
-
- /* Test single modes that are reading actions. */
-
- for (ChannelMode mode : modes) {
- ch.mode = mode;
- ch.playStatus = ChannelStatus::PLAY;
- ch.tracker = 16; // simulate processing
- ch.recStatus = ChannelStatus::PLAY;
-
- sampleChannelProc::rewindBySeq(&ch);
-
- if (ch.isAnySingleMode()) {
- REQUIRE(ch.playStatus == ChannelStatus::PLAY);
- REQUIRE(ch.tracker == 0);
- }
- }
- }
-}
-#endif
+++ /dev/null
-#if 0
-
-#include "../src/core/channels/sampleChannel.h"
-#include "../src/core/channels/sampleChannelRec.h"
-#include <catch.hpp>
-
-
-using namespace giada;
-using namespace giada::m;
-
-
-TEST_CASE("sampleChannelRec")
-{
- const int BUFFER_SIZE = 1024;
-
- SampleChannel ch(false, BUFFER_SIZE, 1, 1);
-
- SECTION("start reading actions, don't treat recs as loop")
- {
- sampleChannelRec::startReadingActions(&ch, /*treatRecsAsLoops=*/false,
- /*recsStopOnChanHalt=*/false);
-
- REQUIRE(ch.recStatus == ChannelStatus::OFF);
- REQUIRE(ch.readActions == true);
- }
-
- SECTION("start reading actions, do treat recs as loop")
- {
- sampleChannelRec::startReadingActions(&ch, /*treatRecsAsLoops=*/true,
- /*recsStopOnChanHalt=*/false);
-
- REQUIRE(ch.recStatus == ChannelStatus::WAIT);
- REQUIRE(ch.readActions == false);
- }
-
- SECTION("stop reading actions")
- {
- /* First state: PLAY */
- ch.recStatus = ChannelStatus::PLAY;
-
- sampleChannelRec::stopReadingActions(&ch, /*clockRunning=*/true,
- /*treatRecsAsLoops=*/false, /*recsStopOnChanHalt=*/false);
-
- REQUIRE(ch.readActions == false);
- REQUIRE(ch.recStatus == ChannelStatus::PLAY);
-
- /* Second state: WAIT */
- ch.recStatus = ChannelStatus::WAIT;
-
- sampleChannelRec::stopReadingActions(&ch, /*clockRunning=*/true,
- /*treatRecsAsLoops=*/false, /*recsStopOnChanHalt=*/false);
-
- REQUIRE(ch.readActions == false);
- REQUIRE(ch.recStatus == ChannelStatus::OFF);
-
- /* Third state: WAIT */
- ch.recStatus = ChannelStatus::ENDING;
-
- sampleChannelRec::stopReadingActions(&ch, /*clockRunning=*/true,
- /*treatRecsAsLoops=*/false, /*recsStopOnChanHalt=*/false);
-
- REQUIRE(ch.readActions == false);
- REQUIRE(ch.recStatus == ChannelStatus::PLAY);
-
- /* Fourth state: any, but with clockRunning == false. */
-
- sampleChannelRec::stopReadingActions(&ch, /*clockRunning=*/false,
- /*treatRecsAsLoops=*/false, /*recsStopOnChanHalt=*/false);
-
- REQUIRE(ch.readActions == false);
- REQUIRE(ch.recStatus == ChannelStatus::OFF);
- }
-
-
- SECTION("set read actions status to false with recsStopOnChanHalt")
- {
- ch.playStatus = ChannelStatus::PLAY;
- ch.tracker = 1024;
-
- sampleChannelRec::setReadActions(&ch, false, /*recsStopOnChanHalt=*/true);
-
- REQUIRE(ch.readActions == false);
- REQUIRE(ch.playStatus == ChannelStatus::OFF);
- REQUIRE(ch.tracker == 0);
-
- }
-}
-#endif