1
0
Fork 0
mirror of https://github.com/juce-framework/JUCE.git synced 2026-01-13 00:04:19 +00:00

Added Animated App template and examples

This commit is contained in:
Felix Faire 2014-10-29 15:55:23 +00:00
parent fefcf7aca6
commit ff6520a89a
1141 changed files with 438491 additions and 94 deletions

View file

@ -0,0 +1,169 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIOCDBURNER_H_INCLUDED
#define JUCE_AUDIOCDBURNER_H_INCLUDED
#if JUCE_USE_CDBURNER || DOXYGEN
//==============================================================================
/**
*/
class AudioCDBurner : public ChangeBroadcaster
{
public:
//==============================================================================
/** Returns a list of available optical drives.
Use openDevice() to open one of the items from this list.
*/
static StringArray findAvailableDevices();
/** Tries to open one of the optical drives.
The deviceIndex is an index into the array returned by findAvailableDevices().
*/
static AudioCDBurner* openDevice (const int deviceIndex);
/** Destructor. */
~AudioCDBurner();
//==============================================================================
enum DiskState
{
unknown, /**< An error condition, if the device isn't responding. */
trayOpen, /**< The drive is currently open. Note that a slot-loading drive
may seem to be permanently open. */
noDisc, /**< The drive has no disk in it. */
writableDiskPresent, /**< The drive contains a writeable disk. */
readOnlyDiskPresent /**< The drive contains a read-only disk. */
};
/** Returns the current status of the device.
To get informed when the drive's status changes, attach a ChangeListener to
the AudioCDBurner.
*/
DiskState getDiskState() const;
/** Returns true if there's a writable disk in the drive. */
bool isDiskPresent() const;
/** Sends an eject signal to the drive.
The eject will happen asynchronously, so you can use getDiskState() and
waitUntilStateChange() to monitor its progress.
*/
bool openTray();
/** Blocks the current thread until the drive's state changes, or until the timeout expires.
@returns the device's new state
*/
DiskState waitUntilStateChange (int timeOutMilliseconds);
//==============================================================================
/** Returns the set of possible write speeds that the device can handle.
These are as a multiple of 'normal' speed, so e.g. '24x' returns 24, etc.
Note that if there's no media present in the drive, this value may be unavailable!
@see setWriteSpeed, getWriteSpeed
*/
Array<int> getAvailableWriteSpeeds() const;
//==============================================================================
/** Tries to enable or disable buffer underrun safety on devices that support it.
@returns true if it's now enabled. If the device doesn't support it, this
will always return false.
*/
bool setBufferUnderrunProtection (bool shouldBeEnabled);
//==============================================================================
/** Returns the number of free blocks on the disk.
There are 75 blocks per second, at 44100Hz.
*/
int getNumAvailableAudioBlocks() const;
/** Adds a track to be written.
The source passed-in here will be kept by this object, and it will
be used and deleted at some point in the future, either during the
burn() method or when this AudioCDBurner object is deleted. Your caller
method shouldn't keep a reference to it or use it again after passing
it in here.
*/
bool addAudioTrack (AudioSource* source, int numSamples);
//==============================================================================
/** Receives progress callbacks during a cd-burn operation.
@see AudioCDBurner::burn()
*/
class BurnProgressListener
{
public:
BurnProgressListener() noexcept {}
virtual ~BurnProgressListener() {}
/** Called at intervals to report on the progress of the AudioCDBurner.
To cancel the burn, return true from this method.
*/
virtual bool audioCDBurnProgress (float proportionComplete) = 0;
};
/** Runs the burn process.
This method will block until the operation is complete.
@param listener the object to receive callbacks about progress
@param ejectDiscAfterwards whether to eject the disk after the burn completes
@param performFakeBurnForTesting if true, no data will actually be written to the disk
@param writeSpeed one of the write speeds from getAvailableWriteSpeeds(), or
0 or less to mean the fastest speed.
*/
String burn (BurnProgressListener* listener,
bool ejectDiscAfterwards,
bool performFakeBurnForTesting,
int writeSpeed);
/** If a burn operation is currently in progress, this tells it to stop
as soon as possible.
It's also possible to stop the burn process by returning true from
BurnProgressListener::audioCDBurnProgress()
*/
void abortBurn();
private:
//==============================================================================
AudioCDBurner (const int deviceIndex);
class Pimpl;
friend struct ContainerDeletePolicy<Pimpl>;
ScopedPointer<Pimpl> pimpl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioCDBurner)
};
#endif
#endif // JUCE_AUDIOCDBURNER_H_INCLUDED

View file

@ -0,0 +1,57 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if JUCE_USE_CDREADER
int AudioCDReader::getNumTracks() const
{
return trackStartSamples.size() - 1;
}
int AudioCDReader::getPositionOfTrackStart (int trackNum) const
{
return trackStartSamples [trackNum];
}
const Array<int>& AudioCDReader::getTrackOffsets() const
{
return trackStartSamples;
}
int AudioCDReader::getCDDBId()
{
int checksum = 0;
const int numTracks = getNumTracks();
for (int i = 0; i < numTracks; ++i)
for (int offset = (trackStartSamples.getUnchecked(i) + 88200) / 44100; offset > 0; offset /= 10)
checksum += offset % 10;
const int length = (trackStartSamples.getLast() - trackStartSamples.getFirst()) / 44100;
// CCLLLLTT: checksum, length, tracks
return ((checksum & 0xff) << 24) | (length << 8) | numTracks;
}
#endif

View file

@ -0,0 +1,174 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIOCDREADER_H_INCLUDED
#define JUCE_AUDIOCDREADER_H_INCLUDED
#if JUCE_USE_CDREADER || DOXYGEN
//==============================================================================
/**
A type of AudioFormatReader that reads from an audio CD.
One of these can be used to read a CD as if it's one big audio stream. Use the
getPositionOfTrackStart() method to find where the individual tracks are
within the stream.
@see AudioFormatReader
*/
class JUCE_API AudioCDReader : public AudioFormatReader
{
public:
//==============================================================================
/** Returns a list of names of Audio CDs currently available for reading.
If there's a CD drive but no CD in it, this might return an empty list, or
possibly a device that can be opened but which has no tracks, depending
on the platform.
@see createReaderForCD
*/
static StringArray getAvailableCDNames();
/** Tries to create an AudioFormatReader that can read from an Audio CD.
@param index the index of one of the available CDs - use getAvailableCDNames()
to find out how many there are.
@returns a new AudioCDReader object, or nullptr if it couldn't be created. The
caller will be responsible for deleting the object returned.
*/
static AudioCDReader* createReaderForCD (const int index);
//==============================================================================
/** Destructor. */
~AudioCDReader();
/** Implementation of the AudioFormatReader method. */
bool readSamples (int** destSamples, int numDestChannels, int startOffsetInDestBuffer,
int64 startSampleInFile, int numSamples) override;
/** Checks whether the CD has been removed from the drive. */
bool isCDStillPresent() const;
/** Returns the total number of tracks (audio + data). */
int getNumTracks() const;
/** Finds the sample offset of the start of a track.
@param trackNum the track number, where trackNum = 0 is the first track
and trackNum = getNumTracks() means the end of the CD.
*/
int getPositionOfTrackStart (int trackNum) const;
/** Returns true if a given track is an audio track.
@param trackNum the track number, where 0 is the first track.
*/
bool isTrackAudio (int trackNum) const;
/** Returns an array of sample offsets for the start of each track, followed by
the sample position of the end of the CD.
*/
const Array<int>& getTrackOffsets() const;
/** Refreshes the object's table of contents.
If the disc has been ejected and a different one put in since this
object was created, this will cause it to update its idea of how many tracks
there are, etc.
*/
void refreshTrackLengths();
/** Enables scanning for indexes within tracks.
@see getLastIndex
*/
void enableIndexScanning (bool enabled);
/** Returns the index number found during the last read() call.
Index scanning is turned off by default - turn it on with enableIndexScanning().
Then when the read() method is called, if it comes across an index within that
block, the index number is stored and returned by this method.
Some devices might not support indexes, of course.
(If you don't know what CD indexes are, it's unlikely you'll ever need them).
@see enableIndexScanning
*/
int getLastIndex() const;
/** Scans a track to find the position of any indexes within it.
@param trackNumber the track to look in, where 0 is the first track on the disc
@returns an array of sample positions of any index points found (not including
the index that marks the start of the track)
*/
Array<int> findIndexesInTrack (const int trackNumber);
/** Returns the CDDB id number for the CD.
It's not a great way of identifying a disc, but it's traditional.
*/
int getCDDBId();
/** Tries to eject the disk.
Ejecting the disk might not actually be possible, e.g. if some other process is using it.
*/
void ejectDisk();
//==============================================================================
enum
{
framesPerSecond = 75,
samplesPerFrame = 44100 / framesPerSecond
};
private:
//==============================================================================
Array<int> trackStartSamples;
#if JUCE_MAC
File volumeDir;
Array<File> tracks;
int currentReaderTrack;
ScopedPointer <AudioFormatReader> reader;
AudioCDReader (const File& volume);
#elif JUCE_WINDOWS
bool audioTracks [100];
void* handle;
MemoryBlock buffer;
bool indexingEnabled;
int lastIndex, firstFrameInBuffer, samplesInBuffer;
AudioCDReader (void* handle);
int getIndexAt (int samplePos);
#elif JUCE_LINUX
AudioCDReader();
#endif
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioCDReader)
};
#endif
#endif // JUCE_AUDIOCDREADER_H_INCLUDED

View file

@ -0,0 +1,987 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
AudioDeviceManager::AudioDeviceSetup::AudioDeviceSetup()
: sampleRate (0),
bufferSize (0),
useDefaultInputChannels (true),
useDefaultOutputChannels (true)
{
}
bool AudioDeviceManager::AudioDeviceSetup::operator== (const AudioDeviceManager::AudioDeviceSetup& other) const
{
return outputDeviceName == other.outputDeviceName
&& inputDeviceName == other.inputDeviceName
&& sampleRate == other.sampleRate
&& bufferSize == other.bufferSize
&& inputChannels == other.inputChannels
&& useDefaultInputChannels == other.useDefaultInputChannels
&& outputChannels == other.outputChannels
&& useDefaultOutputChannels == other.useDefaultOutputChannels;
}
//==============================================================================
class AudioDeviceManager::CallbackHandler : public AudioIODeviceCallback,
public MidiInputCallback,
public AudioIODeviceType::Listener
{
public:
CallbackHandler (AudioDeviceManager& adm) noexcept : owner (adm) {}
private:
void audioDeviceIOCallback (const float** ins, int numIns, float** outs, int numOuts, int numSamples) override
{
owner.audioDeviceIOCallbackInt (ins, numIns, outs, numOuts, numSamples);
}
void audioDeviceAboutToStart (AudioIODevice* device) override
{
owner.audioDeviceAboutToStartInt (device);
}
void audioDeviceStopped() override
{
owner.audioDeviceStoppedInt();
}
void audioDeviceError (const String& message) override
{
owner.audioDeviceErrorInt (message);
}
void handleIncomingMidiMessage (MidiInput* source, const MidiMessage& message) override
{
owner.handleIncomingMidiMessageInt (source, message);
}
void audioDeviceListChanged() override
{
owner.audioDeviceListChanged();
}
AudioDeviceManager& owner;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (CallbackHandler)
};
//==============================================================================
AudioDeviceManager::AudioDeviceManager()
: numInputChansNeeded (0),
numOutputChansNeeded (2),
listNeedsScanning (true),
useInputNames (false),
inputLevel (0),
testSoundPosition (0),
cpuUsageMs (0),
timeToCpuScale (0)
{
callbackHandler = new CallbackHandler (*this);
}
AudioDeviceManager::~AudioDeviceManager()
{
currentAudioDevice = nullptr;
defaultMidiOutput = nullptr;
}
//==============================================================================
void AudioDeviceManager::createDeviceTypesIfNeeded()
{
if (availableDeviceTypes.size() == 0)
{
OwnedArray<AudioIODeviceType> types;
createAudioDeviceTypes (types);
for (int i = 0; i < types.size(); ++i)
addAudioDeviceType (types.getUnchecked(i));
types.clear (false);
if (AudioIODeviceType* first = availableDeviceTypes.getFirst())
currentDeviceType = first->getTypeName();
}
}
const OwnedArray<AudioIODeviceType>& AudioDeviceManager::getAvailableDeviceTypes()
{
scanDevicesIfNeeded();
return availableDeviceTypes;
}
void AudioDeviceManager::audioDeviceListChanged()
{
if (currentAudioDevice != nullptr)
{
currentSetup.sampleRate = currentAudioDevice->getCurrentSampleRate();
currentSetup.bufferSize = currentAudioDevice->getCurrentBufferSizeSamples();
currentSetup.inputChannels = currentAudioDevice->getActiveInputChannels();
currentSetup.outputChannels = currentAudioDevice->getActiveOutputChannels();
}
sendChangeMessage();
}
//==============================================================================
static void addIfNotNull (OwnedArray<AudioIODeviceType>& list, AudioIODeviceType* const device)
{
if (device != nullptr)
list.add (device);
}
void AudioDeviceManager::createAudioDeviceTypes (OwnedArray<AudioIODeviceType>& list)
{
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_WASAPI());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_DirectSound());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_ASIO());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_CoreAudio());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_iOSAudio());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_ALSA());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_JACK());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_OpenSLES());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_Android());
}
void AudioDeviceManager::addAudioDeviceType (AudioIODeviceType* newDeviceType)
{
if (newDeviceType != nullptr)
{
jassert (lastDeviceTypeConfigs.size() == availableDeviceTypes.size());
availableDeviceTypes.add (newDeviceType);
lastDeviceTypeConfigs.add (new AudioDeviceSetup());
newDeviceType->addListener (callbackHandler);
}
}
//==============================================================================
String AudioDeviceManager::initialise (const int numInputChannelsNeeded,
const int numOutputChannelsNeeded,
const XmlElement* const xml,
const bool selectDefaultDeviceOnFailure,
const String& preferredDefaultDeviceName,
const AudioDeviceSetup* preferredSetupOptions)
{
scanDevicesIfNeeded();
numInputChansNeeded = numInputChannelsNeeded;
numOutputChansNeeded = numOutputChannelsNeeded;
if (xml != nullptr && xml->hasTagName ("DEVICESETUP"))
return initialiseFromXML (*xml, selectDefaultDeviceOnFailure,
preferredDefaultDeviceName, preferredSetupOptions);
return initialiseDefault (preferredDefaultDeviceName, preferredSetupOptions);
}
String AudioDeviceManager::initialiseDefault (const String& preferredDefaultDeviceName,
const AudioDeviceSetup* preferredSetupOptions)
{
AudioDeviceSetup setup;
if (preferredSetupOptions != nullptr)
{
setup = *preferredSetupOptions;
}
else if (preferredDefaultDeviceName.isNotEmpty())
{
for (int j = availableDeviceTypes.size(); --j >= 0;)
{
AudioIODeviceType* const type = availableDeviceTypes.getUnchecked(j);
const StringArray outs (type->getDeviceNames (false));
for (int i = 0; i < outs.size(); ++i)
{
if (outs[i].matchesWildcard (preferredDefaultDeviceName, true))
{
setup.outputDeviceName = outs[i];
break;
}
}
const StringArray ins (type->getDeviceNames (true));
for (int i = 0; i < ins.size(); ++i)
{
if (ins[i].matchesWildcard (preferredDefaultDeviceName, true))
{
setup.inputDeviceName = ins[i];
break;
}
}
}
}
insertDefaultDeviceNames (setup);
return setAudioDeviceSetup (setup, false);
}
String AudioDeviceManager::initialiseFromXML (const XmlElement& xml,
const bool selectDefaultDeviceOnFailure,
const String& preferredDefaultDeviceName,
const AudioDeviceSetup* preferredSetupOptions)
{
lastExplicitSettings = new XmlElement (xml);
String error;
AudioDeviceSetup setup;
if (preferredSetupOptions != nullptr)
setup = *preferredSetupOptions;
if (xml.getStringAttribute ("audioDeviceName").isNotEmpty())
{
setup.inputDeviceName = setup.outputDeviceName
= xml.getStringAttribute ("audioDeviceName");
}
else
{
setup.inputDeviceName = xml.getStringAttribute ("audioInputDeviceName");
setup.outputDeviceName = xml.getStringAttribute ("audioOutputDeviceName");
}
currentDeviceType = xml.getStringAttribute ("deviceType");
if (findType (currentDeviceType) == nullptr)
{
if (AudioIODeviceType* const type = findType (setup.inputDeviceName, setup.outputDeviceName))
currentDeviceType = type->getTypeName();
else if (availableDeviceTypes.size() > 0)
currentDeviceType = availableDeviceTypes.getUnchecked(0)->getTypeName();
}
setup.bufferSize = xml.getIntAttribute ("audioDeviceBufferSize");
setup.sampleRate = xml.getDoubleAttribute ("audioDeviceRate");
setup.inputChannels .parseString (xml.getStringAttribute ("audioDeviceInChans", "11"), 2);
setup.outputChannels.parseString (xml.getStringAttribute ("audioDeviceOutChans", "11"), 2);
setup.useDefaultInputChannels = ! xml.hasAttribute ("audioDeviceInChans");
setup.useDefaultOutputChannels = ! xml.hasAttribute ("audioDeviceOutChans");
error = setAudioDeviceSetup (setup, true);
midiInsFromXml.clear();
forEachXmlChildElementWithTagName (xml, c, "MIDIINPUT")
midiInsFromXml.add (c->getStringAttribute ("name"));
const StringArray allMidiIns (MidiInput::getDevices());
for (int i = allMidiIns.size(); --i >= 0;)
setMidiInputEnabled (allMidiIns[i], midiInsFromXml.contains (allMidiIns[i]));
if (error.isNotEmpty() && selectDefaultDeviceOnFailure)
error = initialise (numInputChansNeeded, numOutputChansNeeded,
nullptr, false, preferredDefaultDeviceName);
setDefaultMidiOutput (xml.getStringAttribute ("defaultMidiOutput"));
return error;
}
String AudioDeviceManager::initialiseWithDefaultDevices (int numInputChannelsNeeded,
int numOutputChannelsNeeded)
{
lastExplicitSettings = nullptr;
return initialise (numInputChannelsNeeded, numOutputChannelsNeeded,
nullptr, false, String(), nullptr);
}
void AudioDeviceManager::insertDefaultDeviceNames (AudioDeviceSetup& setup) const
{
if (AudioIODeviceType* type = getCurrentDeviceTypeObject())
{
if (setup.outputDeviceName.isEmpty())
setup.outputDeviceName = type->getDeviceNames (false) [type->getDefaultDeviceIndex (false)];
if (setup.inputDeviceName.isEmpty())
setup.inputDeviceName = type->getDeviceNames (true) [type->getDefaultDeviceIndex (true)];
}
}
XmlElement* AudioDeviceManager::createStateXml() const
{
return lastExplicitSettings.createCopy();
}
//==============================================================================
void AudioDeviceManager::scanDevicesIfNeeded()
{
if (listNeedsScanning)
{
listNeedsScanning = false;
createDeviceTypesIfNeeded();
for (int i = availableDeviceTypes.size(); --i >= 0;)
availableDeviceTypes.getUnchecked(i)->scanForDevices();
}
}
AudioIODeviceType* AudioDeviceManager::findType (const String& typeName)
{
scanDevicesIfNeeded();
for (int i = availableDeviceTypes.size(); --i >= 0;)
if (availableDeviceTypes.getUnchecked(i)->getTypeName() == typeName)
return availableDeviceTypes.getUnchecked(i);
return nullptr;
}
AudioIODeviceType* AudioDeviceManager::findType (const String& inputName, const String& outputName)
{
scanDevicesIfNeeded();
for (int i = availableDeviceTypes.size(); --i >= 0;)
{
AudioIODeviceType* const type = availableDeviceTypes.getUnchecked(i);
if ((inputName.isNotEmpty() && type->getDeviceNames (true).contains (inputName, true))
|| (outputName.isNotEmpty() && type->getDeviceNames (false).contains (outputName, true)))
{
return type;
}
}
return nullptr;
}
void AudioDeviceManager::getAudioDeviceSetup (AudioDeviceSetup& setup)
{
setup = currentSetup;
}
void AudioDeviceManager::deleteCurrentDevice()
{
currentAudioDevice = nullptr;
currentSetup.inputDeviceName.clear();
currentSetup.outputDeviceName.clear();
}
void AudioDeviceManager::setCurrentAudioDeviceType (const String& type,
const bool treatAsChosenDevice)
{
for (int i = 0; i < availableDeviceTypes.size(); ++i)
{
if (availableDeviceTypes.getUnchecked(i)->getTypeName() == type
&& currentDeviceType != type)
{
if (currentAudioDevice != nullptr)
{
closeAudioDevice();
Thread::sleep (1500); // allow a moment for OS devices to sort themselves out, to help
// avoid things like DirectSound/ASIO clashes
}
currentDeviceType = type;
AudioDeviceSetup s (*lastDeviceTypeConfigs.getUnchecked(i));
insertDefaultDeviceNames (s);
setAudioDeviceSetup (s, treatAsChosenDevice);
sendChangeMessage();
break;
}
}
}
AudioIODeviceType* AudioDeviceManager::getCurrentDeviceTypeObject() const
{
for (int i = 0; i < availableDeviceTypes.size(); ++i)
if (availableDeviceTypes.getUnchecked(i)->getTypeName() == currentDeviceType)
return availableDeviceTypes.getUnchecked(i);
return availableDeviceTypes[0];
}
String AudioDeviceManager::setAudioDeviceSetup (const AudioDeviceSetup& newSetup,
const bool treatAsChosenDevice)
{
jassert (&newSetup != &currentSetup); // this will have no effect
if (newSetup == currentSetup && currentAudioDevice != nullptr)
return String();
if (! (newSetup == currentSetup))
sendChangeMessage();
stopDevice();
const String newInputDeviceName (numInputChansNeeded == 0 ? String() : newSetup.inputDeviceName);
const String newOutputDeviceName (numOutputChansNeeded == 0 ? String() : newSetup.outputDeviceName);
String error;
AudioIODeviceType* type = getCurrentDeviceTypeObject();
if (type == nullptr || (newInputDeviceName.isEmpty() && newOutputDeviceName.isEmpty()))
{
deleteCurrentDevice();
if (treatAsChosenDevice)
updateXml();
return String();
}
if (currentSetup.inputDeviceName != newInputDeviceName
|| currentSetup.outputDeviceName != newOutputDeviceName
|| currentAudioDevice == nullptr)
{
deleteCurrentDevice();
scanDevicesIfNeeded();
if (newOutputDeviceName.isNotEmpty()
&& ! type->getDeviceNames (false).contains (newOutputDeviceName))
{
return "No such device: " + newOutputDeviceName;
}
if (newInputDeviceName.isNotEmpty()
&& ! type->getDeviceNames (true).contains (newInputDeviceName))
{
return "No such device: " + newInputDeviceName;
}
currentAudioDevice = type->createDevice (newOutputDeviceName, newInputDeviceName);
if (currentAudioDevice == nullptr)
error = "Can't open the audio device!\n\n"
"This may be because another application is currently using the same device - "
"if so, you should close any other applications and try again!";
else
error = currentAudioDevice->getLastError();
if (error.isNotEmpty())
{
deleteCurrentDevice();
return error;
}
if (newSetup.useDefaultInputChannels)
{
inputChannels.clear();
inputChannels.setRange (0, numInputChansNeeded, true);
}
if (newSetup.useDefaultOutputChannels)
{
outputChannels.clear();
outputChannels.setRange (0, numOutputChansNeeded, true);
}
if (newInputDeviceName.isEmpty()) inputChannels.clear();
if (newOutputDeviceName.isEmpty()) outputChannels.clear();
}
if (! newSetup.useDefaultInputChannels) inputChannels = newSetup.inputChannels;
if (! newSetup.useDefaultOutputChannels) outputChannels = newSetup.outputChannels;
currentSetup = newSetup;
currentSetup.sampleRate = chooseBestSampleRate (newSetup.sampleRate);
currentSetup.bufferSize = chooseBestBufferSize (newSetup.bufferSize);
error = currentAudioDevice->open (inputChannels,
outputChannels,
currentSetup.sampleRate,
currentSetup.bufferSize);
if (error.isEmpty())
{
currentDeviceType = currentAudioDevice->getTypeName();
currentAudioDevice->start (callbackHandler);
currentSetup.sampleRate = currentAudioDevice->getCurrentSampleRate();
currentSetup.bufferSize = currentAudioDevice->getCurrentBufferSizeSamples();
currentSetup.inputChannels = currentAudioDevice->getActiveInputChannels();
currentSetup.outputChannels = currentAudioDevice->getActiveOutputChannels();
for (int i = 0; i < availableDeviceTypes.size(); ++i)
if (availableDeviceTypes.getUnchecked (i)->getTypeName() == currentDeviceType)
*(lastDeviceTypeConfigs.getUnchecked (i)) = currentSetup;
if (treatAsChosenDevice)
updateXml();
}
else
{
deleteCurrentDevice();
}
return error;
}
double AudioDeviceManager::chooseBestSampleRate (double rate) const
{
jassert (currentAudioDevice != nullptr);
const Array<double> rates (currentAudioDevice->getAvailableSampleRates());
if (rate > 0 && rates.contains (rate))
return rate;
double lowestAbove44 = 0.0;
for (int i = rates.size(); --i >= 0;)
{
const double sr = rates[i];
if (sr >= 44100.0 && (lowestAbove44 < 1.0 || sr < lowestAbove44))
lowestAbove44 = sr;
}
if (lowestAbove44 > 0.0)
return lowestAbove44;
return rates[0];
}
int AudioDeviceManager::chooseBestBufferSize (int bufferSize) const
{
jassert (currentAudioDevice != nullptr);
if (bufferSize > 0 && currentAudioDevice->getAvailableBufferSizes().contains (bufferSize))
return bufferSize;
return currentAudioDevice->getDefaultBufferSize();
}
void AudioDeviceManager::stopDevice()
{
if (currentAudioDevice != nullptr)
currentAudioDevice->stop();
testSound = nullptr;
}
void AudioDeviceManager::closeAudioDevice()
{
stopDevice();
currentAudioDevice = nullptr;
}
void AudioDeviceManager::restartLastAudioDevice()
{
if (currentAudioDevice == nullptr)
{
if (currentSetup.inputDeviceName.isEmpty()
&& currentSetup.outputDeviceName.isEmpty())
{
// This method will only reload the last device that was running
// before closeAudioDevice() was called - you need to actually open
// one first, with setAudioDevice().
jassertfalse;
return;
}
AudioDeviceSetup s (currentSetup);
setAudioDeviceSetup (s, false);
}
}
void AudioDeviceManager::updateXml()
{
lastExplicitSettings = new XmlElement ("DEVICESETUP");
lastExplicitSettings->setAttribute ("deviceType", currentDeviceType);
lastExplicitSettings->setAttribute ("audioOutputDeviceName", currentSetup.outputDeviceName);
lastExplicitSettings->setAttribute ("audioInputDeviceName", currentSetup.inputDeviceName);
if (currentAudioDevice != nullptr)
{
lastExplicitSettings->setAttribute ("audioDeviceRate", currentAudioDevice->getCurrentSampleRate());
if (currentAudioDevice->getDefaultBufferSize() != currentAudioDevice->getCurrentBufferSizeSamples())
lastExplicitSettings->setAttribute ("audioDeviceBufferSize", currentAudioDevice->getCurrentBufferSizeSamples());
if (! currentSetup.useDefaultInputChannels)
lastExplicitSettings->setAttribute ("audioDeviceInChans", currentSetup.inputChannels.toString (2));
if (! currentSetup.useDefaultOutputChannels)
lastExplicitSettings->setAttribute ("audioDeviceOutChans", currentSetup.outputChannels.toString (2));
}
for (int i = 0; i < enabledMidiInputs.size(); ++i)
lastExplicitSettings->createNewChildElement ("MIDIINPUT")
->setAttribute ("name", enabledMidiInputs[i]->getName());
if (midiInsFromXml.size() > 0)
{
// Add any midi devices that have been enabled before, but which aren't currently
// open because the device has been disconnected.
const StringArray availableMidiDevices (MidiInput::getDevices());
for (int i = 0; i < midiInsFromXml.size(); ++i)
if (! availableMidiDevices.contains (midiInsFromXml[i], true))
lastExplicitSettings->createNewChildElement ("MIDIINPUT")
->setAttribute ("name", midiInsFromXml[i]);
}
if (defaultMidiOutputName.isNotEmpty())
lastExplicitSettings->setAttribute ("defaultMidiOutput", defaultMidiOutputName);
}
//==============================================================================
void AudioDeviceManager::addAudioCallback (AudioIODeviceCallback* newCallback)
{
{
const ScopedLock sl (audioCallbackLock);
if (callbacks.contains (newCallback))
return;
}
if (currentAudioDevice != nullptr && newCallback != nullptr)
newCallback->audioDeviceAboutToStart (currentAudioDevice);
const ScopedLock sl (audioCallbackLock);
callbacks.add (newCallback);
}
void AudioDeviceManager::removeAudioCallback (AudioIODeviceCallback* callbackToRemove)
{
if (callbackToRemove != nullptr)
{
bool needsDeinitialising = currentAudioDevice != nullptr;
{
const ScopedLock sl (audioCallbackLock);
needsDeinitialising = needsDeinitialising && callbacks.contains (callbackToRemove);
callbacks.removeFirstMatchingValue (callbackToRemove);
}
if (needsDeinitialising)
callbackToRemove->audioDeviceStopped();
}
}
void AudioDeviceManager::audioDeviceIOCallbackInt (const float** inputChannelData,
int numInputChannels,
float** outputChannelData,
int numOutputChannels,
int numSamples)
{
const ScopedLock sl (audioCallbackLock);
if (inputLevelMeasurementEnabledCount.get() > 0 && numInputChannels > 0)
{
for (int j = 0; j < numSamples; ++j)
{
float s = 0;
for (int i = 0; i < numInputChannels; ++i)
s += std::abs (inputChannelData[i][j]);
s /= numInputChannels;
const double decayFactor = 0.99992;
if (s > inputLevel)
inputLevel = s;
else if (inputLevel > 0.001f)
inputLevel *= decayFactor;
else
inputLevel = 0;
}
}
else
{
inputLevel = 0;
}
if (callbacks.size() > 0)
{
const double callbackStartTime = Time::getMillisecondCounterHiRes();
tempBuffer.setSize (jmax (1, numOutputChannels), jmax (1, numSamples), false, false, true);
callbacks.getUnchecked(0)->audioDeviceIOCallback (inputChannelData, numInputChannels,
outputChannelData, numOutputChannels, numSamples);
float** const tempChans = tempBuffer.getArrayOfWritePointers();
for (int i = callbacks.size(); --i > 0;)
{
callbacks.getUnchecked(i)->audioDeviceIOCallback (inputChannelData, numInputChannels,
tempChans, numOutputChannels, numSamples);
for (int chan = 0; chan < numOutputChannels; ++chan)
{
if (const float* const src = tempChans [chan])
if (float* const dst = outputChannelData [chan])
for (int j = 0; j < numSamples; ++j)
dst[j] += src[j];
}
}
const double msTaken = Time::getMillisecondCounterHiRes() - callbackStartTime;
const double filterAmount = 0.2;
cpuUsageMs += filterAmount * (msTaken - cpuUsageMs);
}
else
{
for (int i = 0; i < numOutputChannels; ++i)
zeromem (outputChannelData[i], sizeof (float) * (size_t) numSamples);
}
if (testSound != nullptr)
{
const int numSamps = jmin (numSamples, testSound->getNumSamples() - testSoundPosition);
const float* const src = testSound->getReadPointer (0, testSoundPosition);
for (int i = 0; i < numOutputChannels; ++i)
for (int j = 0; j < numSamps; ++j)
outputChannelData [i][j] += src[j];
testSoundPosition += numSamps;
if (testSoundPosition >= testSound->getNumSamples())
testSound = nullptr;
}
}
void AudioDeviceManager::audioDeviceAboutToStartInt (AudioIODevice* const device)
{
cpuUsageMs = 0;
const double sampleRate = device->getCurrentSampleRate();
const int blockSize = device->getCurrentBufferSizeSamples();
if (sampleRate > 0.0 && blockSize > 0)
{
const double msPerBlock = 1000.0 * blockSize / sampleRate;
timeToCpuScale = (msPerBlock > 0.0) ? (1.0 / msPerBlock) : 0.0;
}
{
const ScopedLock sl (audioCallbackLock);
for (int i = callbacks.size(); --i >= 0;)
callbacks.getUnchecked(i)->audioDeviceAboutToStart (device);
}
sendChangeMessage();
}
void AudioDeviceManager::audioDeviceStoppedInt()
{
cpuUsageMs = 0;
timeToCpuScale = 0;
sendChangeMessage();
const ScopedLock sl (audioCallbackLock);
for (int i = callbacks.size(); --i >= 0;)
callbacks.getUnchecked(i)->audioDeviceStopped();
}
void AudioDeviceManager::audioDeviceErrorInt (const String& message)
{
const ScopedLock sl (audioCallbackLock);
for (int i = callbacks.size(); --i >= 0;)
callbacks.getUnchecked(i)->audioDeviceError (message);
}
double AudioDeviceManager::getCpuUsage() const
{
return jlimit (0.0, 1.0, timeToCpuScale * cpuUsageMs);
}
//==============================================================================
void AudioDeviceManager::setMidiInputEnabled (const String& name, const bool enabled)
{
if (enabled != isMidiInputEnabled (name))
{
if (enabled)
{
const int index = MidiInput::getDevices().indexOf (name);
if (index >= 0)
{
if (MidiInput* const midiIn = MidiInput::openDevice (index, callbackHandler))
{
enabledMidiInputs.add (midiIn);
midiIn->start();
}
}
}
else
{
for (int i = enabledMidiInputs.size(); --i >= 0;)
if (enabledMidiInputs[i]->getName() == name)
enabledMidiInputs.remove (i);
}
updateXml();
sendChangeMessage();
}
}
bool AudioDeviceManager::isMidiInputEnabled (const String& name) const
{
for (int i = enabledMidiInputs.size(); --i >= 0;)
if (enabledMidiInputs[i]->getName() == name)
return true;
return false;
}
void AudioDeviceManager::addMidiInputCallback (const String& name, MidiInputCallback* callbackToAdd)
{
removeMidiInputCallback (name, callbackToAdd);
if (name.isEmpty() || isMidiInputEnabled (name))
{
const ScopedLock sl (midiCallbackLock);
MidiCallbackInfo mc;
mc.deviceName = name;
mc.callback = callbackToAdd;
midiCallbacks.add (mc);
}
}
void AudioDeviceManager::removeMidiInputCallback (const String& name, MidiInputCallback* callbackToRemove)
{
for (int i = midiCallbacks.size(); --i >= 0;)
{
const MidiCallbackInfo& mc = midiCallbacks.getReference(i);
if (mc.callback == callbackToRemove && mc.deviceName == name)
{
const ScopedLock sl (midiCallbackLock);
midiCallbacks.remove (i);
}
}
}
void AudioDeviceManager::handleIncomingMidiMessageInt (MidiInput* source, const MidiMessage& message)
{
if (! message.isActiveSense())
{
const ScopedLock sl (midiCallbackLock);
for (int i = 0; i < midiCallbacks.size(); ++i)
{
const MidiCallbackInfo& mc = midiCallbacks.getReference(i);
if (mc.deviceName.isEmpty() || mc.deviceName == source->getName())
mc.callback->handleIncomingMidiMessage (source, message);
}
}
}
//==============================================================================
void AudioDeviceManager::setDefaultMidiOutput (const String& deviceName)
{
if (defaultMidiOutputName != deviceName)
{
Array<AudioIODeviceCallback*> oldCallbacks;
{
const ScopedLock sl (audioCallbackLock);
oldCallbacks.swapWith (callbacks);
}
if (currentAudioDevice != nullptr)
for (int i = oldCallbacks.size(); --i >= 0;)
oldCallbacks.getUnchecked(i)->audioDeviceStopped();
defaultMidiOutput = nullptr;
defaultMidiOutputName = deviceName;
if (deviceName.isNotEmpty())
defaultMidiOutput = MidiOutput::openDevice (MidiOutput::getDevices().indexOf (deviceName));
if (currentAudioDevice != nullptr)
for (int i = oldCallbacks.size(); --i >= 0;)
oldCallbacks.getUnchecked(i)->audioDeviceAboutToStart (currentAudioDevice);
{
const ScopedLock sl (audioCallbackLock);
oldCallbacks.swapWith (callbacks);
}
updateXml();
sendChangeMessage();
}
}
//==============================================================================
void AudioDeviceManager::playTestSound()
{
{ // cunningly nested to swap, unlock and delete in that order.
ScopedPointer<AudioSampleBuffer> oldSound;
{
const ScopedLock sl (audioCallbackLock);
oldSound = testSound;
}
}
testSoundPosition = 0;
if (currentAudioDevice != nullptr)
{
const double sampleRate = currentAudioDevice->getCurrentSampleRate();
const int soundLength = (int) sampleRate;
const double frequency = 440.0;
const float amplitude = 0.5f;
const double phasePerSample = double_Pi * 2.0 / (sampleRate / frequency);
AudioSampleBuffer* const newSound = new AudioSampleBuffer (1, soundLength);
for (int i = 0; i < soundLength; ++i)
newSound->setSample (0, i, amplitude * (float) std::sin (i * phasePerSample));
newSound->applyGainRamp (0, 0, soundLength / 10, 0.0f, 1.0f);
newSound->applyGainRamp (0, soundLength - soundLength / 4, soundLength / 4, 1.0f, 0.0f);
const ScopedLock sl (audioCallbackLock);
testSound = newSound;
}
}
void AudioDeviceManager::enableInputLevelMeasurement (const bool enableMeasurement)
{
if (enableMeasurement)
++inputLevelMeasurementEnabledCount;
else
--inputLevelMeasurementEnabledCount;
inputLevel = 0;
}
double AudioDeviceManager::getCurrentInputLevel() const
{
jassert (inputLevelMeasurementEnabledCount.get() > 0); // you need to call enableInputLevelMeasurement() before using this!
return inputLevel;
}

View file

@ -0,0 +1,512 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIODEVICEMANAGER_H_INCLUDED
#define JUCE_AUDIODEVICEMANAGER_H_INCLUDED
//==============================================================================
/**
Manages the state of some audio and midi i/o devices.
This class keeps tracks of a currently-selected audio device, through
with which it continuously streams data from an audio callback, as well as
one or more midi inputs.
The idea is that your application will create one global instance of this object,
and let it take care of creating and deleting specific types of audio devices
internally. So when the device is changed, your callbacks will just keep running
without having to worry about this.
The manager can save and reload all of its device settings as XML, which
makes it very easy for you to save and reload the audio setup of your
application.
And to make it easy to let the user change its settings, there's a component
to do just that - the AudioDeviceSelectorComponent class, which contains a set of
device selection/sample-rate/latency controls.
To use an AudioDeviceManager, create one, and use initialise() to set it up. Then
call addAudioCallback() to register your audio callback with it, and use that to process
your audio data.
The manager also acts as a handy hub for incoming midi messages, allowing a
listener to register for messages from either a specific midi device, or from whatever
the current default midi input device is. The listener then doesn't have to worry about
re-registering with different midi devices if they are changed or deleted.
And yet another neat trick is that amount of CPU time being used is measured and
available with the getCpuUsage() method.
The AudioDeviceManager is a ChangeBroadcaster, and will send a change message to
listeners whenever one of its settings is changed.
@see AudioDeviceSelectorComponent, AudioIODevice, AudioIODeviceType
*/
class JUCE_API AudioDeviceManager : public ChangeBroadcaster
{
public:
//==============================================================================
/** Creates a default AudioDeviceManager.
Initially no audio device will be selected. You should call the initialise() method
and register an audio callback with setAudioCallback() before it'll be able to
actually make any noise.
*/
AudioDeviceManager();
/** Destructor. */
~AudioDeviceManager();
//==============================================================================
/**
This structure holds a set of properties describing the current audio setup.
An AudioDeviceManager uses this class to save/load its current settings, and to
specify your preferred options when opening a device.
@see AudioDeviceManager::setAudioDeviceSetup(), AudioDeviceManager::initialise()
*/
struct JUCE_API AudioDeviceSetup
{
/** Creates an AudioDeviceSetup object.
The default constructor sets all the member variables to indicate default values.
You can then fill-in any values you want to before passing the object to
AudioDeviceManager::initialise().
*/
AudioDeviceSetup();
bool operator== (const AudioDeviceSetup& other) const;
/** The name of the audio device used for output.
The name has to be one of the ones listed by the AudioDeviceManager's currently
selected device type.
This may be the same as the input device.
An empty string indicates the default device.
*/
String outputDeviceName;
/** The name of the audio device used for input.
This may be the same as the output device.
An empty string indicates the default device.
*/
String inputDeviceName;
/** The current sample rate.
This rate is used for both the input and output devices.
A value of 0 indicates that you don't care what rate is used, and the
device will choose a sensible rate for you.
*/
double sampleRate;
/** The buffer size, in samples.
This buffer size is used for both the input and output devices.
A value of 0 indicates the default buffer size.
*/
int bufferSize;
/** The set of active input channels.
The bits that are set in this array indicate the channels of the
input device that are active.
If useDefaultInputChannels is true, this value is ignored.
*/
BigInteger inputChannels;
/** If this is true, it indicates that the inputChannels array
should be ignored, and instead, the device's default channels
should be used.
*/
bool useDefaultInputChannels;
/** The set of active output channels.
The bits that are set in this array indicate the channels of the
input device that are active.
If useDefaultOutputChannels is true, this value is ignored.
*/
BigInteger outputChannels;
/** If this is true, it indicates that the outputChannels array
should be ignored, and instead, the device's default channels
should be used.
*/
bool useDefaultOutputChannels;
};
//==============================================================================
/** Opens a set of audio devices ready for use.
This will attempt to open either a default audio device, or one that was
previously saved as XML.
@param numInputChannelsNeeded the maximum number of input channels your app would like to
use (the actual number of channels opened may be less than
the number requested)
@param numOutputChannelsNeeded the maximum number of output channels your app would like to
use (the actual number of channels opened may be less than
the number requested)
@param savedState either a previously-saved state that was produced
by createStateXml(), or nullptr if you want the manager
to choose the best device to open.
@param selectDefaultDeviceOnFailure if true, then if the device specified in the XML
fails to open, then a default device will be used
instead. If false, then on failure, no device is
opened.
@param preferredDefaultDeviceName if this is not empty, and there's a device with this
name, then that will be used as the default device
(assuming that there wasn't one specified in the XML).
The string can actually be a simple wildcard, containing "*"
and "?" characters
@param preferredSetupOptions if this is non-null, the structure will be used as the
set of preferred settings when opening the device. If you
use this parameter, the preferredDefaultDeviceName
field will be ignored
@returns an error message if anything went wrong, or an empty string if it worked ok.
*/
String initialise (int numInputChannelsNeeded,
int numOutputChannelsNeeded,
const XmlElement* savedState,
bool selectDefaultDeviceOnFailure,
const String& preferredDefaultDeviceName = String(),
const AudioDeviceSetup* preferredSetupOptions = nullptr);
/** Resets everything to a default device setup, clearing any stored settings. */
String initialiseWithDefaultDevices (int numInputChannelsNeeded,
int numOutputChannelsNeeded);
/** Returns some XML representing the current state of the manager.
This stores the current device, its samplerate, block size, etc, and
can be restored later with initialise().
Note that this can return a null pointer if no settings have been explicitly changed
(i.e. if the device manager has just been left in its default state).
*/
XmlElement* createStateXml() const;
//==============================================================================
/** Returns the current device properties that are in use.
@see setAudioDeviceSetup
*/
void getAudioDeviceSetup (AudioDeviceSetup& result);
/** Changes the current device or its settings.
If you want to change a device property, like the current sample rate or
block size, you can call getAudioDeviceSetup() to retrieve the current
settings, then tweak the appropriate fields in the AudioDeviceSetup structure,
and pass it back into this method to apply the new settings.
@param newSetup the settings that you'd like to use
@param treatAsChosenDevice if this is true and if the device opens correctly, these new
settings will be taken as having been explicitly chosen by the
user, and the next time createStateXml() is called, these settings
will be returned. If it's false, then the device is treated as a
temporary or default device, and a call to createStateXml() will
return either the last settings that were made with treatAsChosenDevice
as true, or the last XML settings that were passed into initialise().
@returns an error message if anything went wrong, or an empty string if it worked ok.
@see getAudioDeviceSetup
*/
String setAudioDeviceSetup (const AudioDeviceSetup& newSetup,
bool treatAsChosenDevice);
/** Returns the currently-active audio device. */
AudioIODevice* getCurrentAudioDevice() const noexcept { return currentAudioDevice; }
/** Returns the type of audio device currently in use.
@see setCurrentAudioDeviceType
*/
String getCurrentAudioDeviceType() const { return currentDeviceType; }
/** Returns the currently active audio device type object.
Don't keep a copy of this pointer - it's owned by the device manager and could
change at any time.
*/
AudioIODeviceType* getCurrentDeviceTypeObject() const;
/** Changes the class of audio device being used.
This switches between, e.g. ASIO and DirectSound. On the Mac you probably won't ever call
this because there's only one type: CoreAudio.
For a list of types, see getAvailableDeviceTypes().
*/
void setCurrentAudioDeviceType (const String& type,
bool treatAsChosenDevice);
/** Closes the currently-open device.
You can call restartLastAudioDevice() later to reopen it in the same state
that it was just in.
*/
void closeAudioDevice();
/** Tries to reload the last audio device that was running.
Note that this only reloads the last device that was running before
closeAudioDevice() was called - it doesn't reload any kind of saved-state,
and can only be called after a device has been opened with SetAudioDevice().
If a device is already open, this call will do nothing.
*/
void restartLastAudioDevice();
//==============================================================================
/** Registers an audio callback to be used.
The manager will redirect callbacks from whatever audio device is currently
in use to all registered callback objects. If more than one callback is
active, they will all be given the same input data, and their outputs will
be summed.
If necessary, this method will invoke audioDeviceAboutToStart() on the callback
object before returning.
To remove a callback, use removeAudioCallback().
*/
void addAudioCallback (AudioIODeviceCallback* newCallback);
/** Deregisters a previously added callback.
If necessary, this method will invoke audioDeviceStopped() on the callback
object before returning.
@see addAudioCallback
*/
void removeAudioCallback (AudioIODeviceCallback* callback);
//==============================================================================
/** Returns the average proportion of available CPU being spent inside the audio callbacks.
@returns A value between 0 and 1.0 to indicate the approximate proportion of CPU
time spent in the callbacks.
*/
double getCpuUsage() const;
//==============================================================================
/** Enables or disables a midi input device.
The list of devices can be obtained with the MidiInput::getDevices() method.
Any incoming messages from enabled input devices will be forwarded on to all the
listeners that have been registered with the addMidiInputCallback() method. They
can either register for messages from a particular device, or from just the
"default" midi input.
Routing the midi input via an AudioDeviceManager means that when a listener
registers for the default midi input, this default device can be changed by the
manager without the listeners having to know about it or re-register.
It also means that a listener can stay registered for a midi input that is disabled
or not present, so that when the input is re-enabled, the listener will start
receiving messages again.
@see addMidiInputCallback, isMidiInputEnabled
*/
void setMidiInputEnabled (const String& midiInputDeviceName, bool enabled);
/** Returns true if a given midi input device is being used.
@see setMidiInputEnabled
*/
bool isMidiInputEnabled (const String& midiInputDeviceName) const;
/** Registers a listener for callbacks when midi events arrive from a midi input.
The device name can be empty to indicate that it wants to receive all incoming
events from all the enabled MIDI inputs. Or it can be the name of one of the
MIDI input devices if it just wants the events from that device. (see
MidiInput::getDevices() for the list of device names).
Only devices which are enabled (see the setMidiInputEnabled() method) will have their
events forwarded on to listeners.
*/
void addMidiInputCallback (const String& midiInputDeviceName,
MidiInputCallback* callback);
/** Removes a listener that was previously registered with addMidiInputCallback(). */
void removeMidiInputCallback (const String& midiInputDeviceName,
MidiInputCallback* callback);
//==============================================================================
/** Sets a midi output device to use as the default.
The list of devices can be obtained with the MidiOutput::getDevices() method.
The specified device will be opened automatically and can be retrieved with the
getDefaultMidiOutput() method.
Pass in an empty string to deselect all devices. For the default device, you
can use MidiOutput::getDevices() [MidiOutput::getDefaultDeviceIndex()].
@see getDefaultMidiOutput, getDefaultMidiOutputName
*/
void setDefaultMidiOutput (const String& deviceName);
/** Returns the name of the default midi output.
@see setDefaultMidiOutput, getDefaultMidiOutput
*/
const String& getDefaultMidiOutputName() const noexcept { return defaultMidiOutputName; }
/** Returns the current default midi output device.
If no device has been selected, or the device can't be opened, this will return nullptr.
@see getDefaultMidiOutputName
*/
MidiOutput* getDefaultMidiOutput() const noexcept { return defaultMidiOutput; }
/** Returns a list of the types of device supported. */
const OwnedArray<AudioIODeviceType>& getAvailableDeviceTypes();
//==============================================================================
/** Creates a list of available types.
This will add a set of new AudioIODeviceType objects to the specified list, to
represent each available types of device.
You can override this if your app needs to do something specific, like avoid
using DirectSound devices, etc.
*/
virtual void createAudioDeviceTypes (OwnedArray<AudioIODeviceType>& types);
/** Adds a new device type to the list of types.
The manager will take ownership of the object that is passed-in.
*/
void addAudioDeviceType (AudioIODeviceType* newDeviceType);
//==============================================================================
/** Plays a beep through the current audio device.
This is here to allow the audio setup UI panels to easily include a "test"
button so that the user can check where the audio is coming from.
*/
void playTestSound();
/** Turns on level-measuring.
When enabled, the device manager will measure the peak input level
across all channels, and you can get this level by calling getCurrentInputLevel().
This is mainly intended for audio setup UI panels to use to create a mic
level display, so that the user can check that they've selected the right
device.
A simple filter is used to make the level decay smoothly, but this is
only intended for giving rough feedback, and not for any kind of accurate
measurement.
*/
void enableInputLevelMeasurement (bool enableMeasurement);
/** Returns the current input level.
To use this, you must first enable it by calling enableInputLevelMeasurement().
See enableInputLevelMeasurement() for more info.
*/
double getCurrentInputLevel() const;
/** Returns the a lock that can be used to synchronise access to the audio callback.
Obviously while this is locked, you're blocking the audio thread from running, so
it must only be used for very brief periods when absolutely necessary.
*/
CriticalSection& getAudioCallbackLock() noexcept { return audioCallbackLock; }
/** Returns the a lock that can be used to synchronise access to the midi callback.
Obviously while this is locked, you're blocking the midi system from running, so
it must only be used for very brief periods when absolutely necessary.
*/
CriticalSection& getMidiCallbackLock() noexcept { return midiCallbackLock; }
private:
//==============================================================================
OwnedArray<AudioIODeviceType> availableDeviceTypes;
OwnedArray<AudioDeviceSetup> lastDeviceTypeConfigs;
AudioDeviceSetup currentSetup;
ScopedPointer<AudioIODevice> currentAudioDevice;
Array<AudioIODeviceCallback*> callbacks;
int numInputChansNeeded, numOutputChansNeeded;
String currentDeviceType;
BigInteger inputChannels, outputChannels;
ScopedPointer<XmlElement> lastExplicitSettings;
mutable bool listNeedsScanning;
bool useInputNames;
Atomic<int> inputLevelMeasurementEnabledCount;
double inputLevel;
ScopedPointer<AudioSampleBuffer> testSound;
int testSoundPosition;
AudioSampleBuffer tempBuffer;
struct MidiCallbackInfo
{
String deviceName;
MidiInputCallback* callback;
};
StringArray midiInsFromXml;
OwnedArray<MidiInput> enabledMidiInputs;
Array<MidiCallbackInfo> midiCallbacks;
String defaultMidiOutputName;
ScopedPointer<MidiOutput> defaultMidiOutput;
CriticalSection audioCallbackLock, midiCallbackLock;
double cpuUsageMs, timeToCpuScale;
//==============================================================================
class CallbackHandler;
friend class CallbackHandler;
friend struct ContainerDeletePolicy<CallbackHandler>;
ScopedPointer<CallbackHandler> callbackHandler;
void audioDeviceIOCallbackInt (const float** inputChannelData, int totalNumInputChannels,
float** outputChannelData, int totalNumOutputChannels, int numSamples);
void audioDeviceAboutToStartInt (AudioIODevice*);
void audioDeviceStoppedInt();
void audioDeviceErrorInt (const String&);
void handleIncomingMidiMessageInt (MidiInput*, const MidiMessage&);
void audioDeviceListChanged();
String restartDevice (int blockSizeToUse, double sampleRateToUse,
const BigInteger& ins, const BigInteger& outs);
void stopDevice();
void updateXml();
void createDeviceTypesIfNeeded();
void scanDevicesIfNeeded();
void deleteCurrentDevice();
double chooseBestSampleRate (double preferred) const;
int chooseBestBufferSize (int preferred) const;
void insertDefaultDeviceNames (AudioDeviceSetup&) const;
String initialiseDefault (const String& preferredDefaultDeviceName, const AudioDeviceSetup*);
String initialiseFromXML (const XmlElement&, bool selectDefaultDeviceOnFailure,
const String& preferredDefaultDeviceName, const AudioDeviceSetup*);
AudioIODeviceType* findType (const String& inputName, const String& outputName);
AudioIODeviceType* findType (const String& typeName);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioDeviceManager)
};
#endif // JUCE_AUDIODEVICEMANAGER_H_INCLUDED

View file

@ -0,0 +1,41 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
AudioIODevice::AudioIODevice (const String& deviceName, const String& deviceTypeName)
: name (deviceName), typeName (deviceTypeName)
{
}
AudioIODevice::~AudioIODevice() {}
void AudioIODeviceCallback::audioDeviceError (const String&) {}
bool AudioIODevice::setAudioPreprocessingEnabled (bool) { return false; }
bool AudioIODevice::hasControlPanel() const { return false; }
bool AudioIODevice::showControlPanel()
{
jassertfalse; // this should only be called for devices which return true from
// their hasControlPanel() method.
return false;
}

View file

@ -0,0 +1,309 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIOIODEVICE_H_INCLUDED
#define JUCE_AUDIOIODEVICE_H_INCLUDED
class AudioIODevice;
//==============================================================================
/**
One of these is passed to an AudioIODevice object to stream the audio data
in and out.
The AudioIODevice will repeatedly call this class's audioDeviceIOCallback()
method on its own high-priority audio thread, when it needs to send or receive
the next block of data.
@see AudioIODevice, AudioDeviceManager
*/
class JUCE_API AudioIODeviceCallback
{
public:
/** Destructor. */
virtual ~AudioIODeviceCallback() {}
/** Processes a block of incoming and outgoing audio data.
The subclass's implementation should use the incoming audio for whatever
purposes it needs to, and must fill all the output channels with the next
block of output data before returning.
The channel data is arranged with the same array indices as the channel name
array returned by AudioIODevice::getOutputChannelNames(), but those channels
that aren't specified in AudioIODevice::open() will have a null pointer for their
associated channel, so remember to check for this.
@param inputChannelData a set of arrays containing the audio data for each
incoming channel - this data is valid until the function
returns. There will be one channel of data for each input
channel that was enabled when the audio device was opened
(see AudioIODevice::open())
@param numInputChannels the number of pointers to channel data in the
inputChannelData array.
@param outputChannelData a set of arrays which need to be filled with the data
that should be sent to each outgoing channel of the device.
There will be one channel of data for each output channel
that was enabled when the audio device was opened (see
AudioIODevice::open())
The initial contents of the array is undefined, so the
callback function must fill all the channels with zeros if
its output is silence. Failing to do this could cause quite
an unpleasant noise!
@param numOutputChannels the number of pointers to channel data in the
outputChannelData array.
@param numSamples the number of samples in each channel of the input and
output arrays. The number of samples will depend on the
audio device's buffer size and will usually remain constant,
although this isn't guaranteed, so make sure your code can
cope with reasonable changes in the buffer size from one
callback to the next.
*/
virtual void audioDeviceIOCallback (const float** inputChannelData,
int numInputChannels,
float** outputChannelData,
int numOutputChannels,
int numSamples) = 0;
/** Called to indicate that the device is about to start calling back.
This will be called just before the audio callbacks begin, either when this
callback has just been added to an audio device, or after the device has been
restarted because of a sample-rate or block-size change.
You can use this opportunity to find out the sample rate and block size
that the device is going to use by calling the AudioIODevice::getCurrentSampleRate()
and AudioIODevice::getCurrentBufferSizeSamples() on the supplied pointer.
@param device the audio IO device that will be used to drive the callback.
Note that if you're going to store this this pointer, it is
only valid until the next time that audioDeviceStopped is called.
*/
virtual void audioDeviceAboutToStart (AudioIODevice* device) = 0;
/** Called to indicate that the device has stopped. */
virtual void audioDeviceStopped() = 0;
/** This can be overridden to be told if the device generates an error while operating.
Be aware that this could be called by any thread! And not all devices perform
this callback.
*/
virtual void audioDeviceError (const String& errorMessage);
};
//==============================================================================
/**
Base class for an audio device with synchronised input and output channels.
Subclasses of this are used to implement different protocols such as DirectSound,
ASIO, CoreAudio, etc.
To create one of these, you'll need to use the AudioIODeviceType class - see the
documentation for that class for more info.
For an easier way of managing audio devices and their settings, have a look at the
AudioDeviceManager class.
@see AudioIODeviceType, AudioDeviceManager
*/
class JUCE_API AudioIODevice
{
public:
/** Destructor. */
virtual ~AudioIODevice();
//==============================================================================
/** Returns the device's name, (as set in the constructor). */
const String& getName() const noexcept { return name; }
/** Returns the type of the device.
E.g. "CoreAudio", "ASIO", etc. - this comes from the AudioIODeviceType that created it.
*/
const String& getTypeName() const noexcept { return typeName; }
//==============================================================================
/** Returns the names of all the available output channels on this device.
To find out which of these are currently in use, call getActiveOutputChannels().
*/
virtual StringArray getOutputChannelNames() = 0;
/** Returns the names of all the available input channels on this device.
To find out which of these are currently in use, call getActiveInputChannels().
*/
virtual StringArray getInputChannelNames() = 0;
//==============================================================================
/** Returns the set of sample-rates this device supports.
@see getCurrentSampleRate
*/
virtual Array<double> getAvailableSampleRates() = 0;
/** Returns the set of buffer sizes that are available.
@see getCurrentBufferSizeSamples, getDefaultBufferSize
*/
virtual Array<int> getAvailableBufferSizes() = 0;
/** Returns the default buffer-size to use.
@returns a number of samples
@see getAvailableBufferSizes
*/
virtual int getDefaultBufferSize() = 0;
//==============================================================================
/** Tries to open the device ready to play.
@param inputChannels a BigInteger in which a set bit indicates that the corresponding
input channel should be enabled
@param outputChannels a BigInteger in which a set bit indicates that the corresponding
output channel should be enabled
@param sampleRate the sample rate to try to use - to find out which rates are
available, see getAvailableSampleRates()
@param bufferSizeSamples the size of i/o buffer to use - to find out the available buffer
sizes, see getAvailableBufferSizes()
@returns an error description if there's a problem, or an empty string if it succeeds in
opening the device
@see close
*/
virtual String open (const BigInteger& inputChannels,
const BigInteger& outputChannels,
double sampleRate,
int bufferSizeSamples) = 0;
/** Closes and releases the device if it's open. */
virtual void close() = 0;
/** Returns true if the device is still open.
A device might spontaneously close itself if something goes wrong, so this checks if
it's still open.
*/
virtual bool isOpen() = 0;
/** Starts the device actually playing.
This must be called after the device has been opened.
@param callback the callback to use for streaming the data.
@see AudioIODeviceCallback, open
*/
virtual void start (AudioIODeviceCallback* callback) = 0;
/** Stops the device playing.
Once a device has been started, this will stop it. Any pending calls to the
callback class will be flushed before this method returns.
*/
virtual void stop() = 0;
/** Returns true if the device is still calling back.
The device might mysteriously stop, so this checks whether it's
still playing.
*/
virtual bool isPlaying() = 0;
/** Returns the last error that happened if anything went wrong. */
virtual String getLastError() = 0;
//==============================================================================
/** Returns the buffer size that the device is currently using.
If the device isn't actually open, this value doesn't really mean much.
*/
virtual int getCurrentBufferSizeSamples() = 0;
/** Returns the sample rate that the device is currently using.
If the device isn't actually open, this value doesn't really mean much.
*/
virtual double getCurrentSampleRate() = 0;
/** Returns the device's current physical bit-depth.
If the device isn't actually open, this value doesn't really mean much.
*/
virtual int getCurrentBitDepth() = 0;
/** Returns a mask showing which of the available output channels are currently
enabled.
@see getOutputChannelNames
*/
virtual BigInteger getActiveOutputChannels() const = 0;
/** Returns a mask showing which of the available input channels are currently
enabled.
@see getInputChannelNames
*/
virtual BigInteger getActiveInputChannels() const = 0;
/** Returns the device's output latency.
This is the delay in samples between a callback getting a block of data, and
that data actually getting played.
*/
virtual int getOutputLatencyInSamples() = 0;
/** Returns the device's input latency.
This is the delay in samples between some audio actually arriving at the soundcard,
and the callback getting passed this block of data.
*/
virtual int getInputLatencyInSamples() = 0;
//==============================================================================
/** True if this device can show a pop-up control panel for editing its settings.
This is generally just true of ASIO devices. If true, you can call showControlPanel()
to display it.
*/
virtual bool hasControlPanel() const;
/** Shows a device-specific control panel if there is one.
This should only be called for devices which return true from hasControlPanel().
*/
virtual bool showControlPanel();
/** On devices which support it, this allows automatic gain control or other
mic processing to be disabled.
If the device doesn't support this operation, it'll return false.
*/
virtual bool setAudioPreprocessingEnabled (bool shouldBeEnabled);
//==============================================================================
protected:
/** Creates a device, setting its name and type member variables. */
AudioIODevice (const String& deviceName,
const String& typeName);
/** @internal */
String name, typeName;
};
#endif // JUCE_AUDIOIODEVICE_H_INCLUDED

View file

@ -0,0 +1,78 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
AudioIODeviceType::AudioIODeviceType (const String& name)
: typeName (name)
{
}
AudioIODeviceType::~AudioIODeviceType()
{
}
//==============================================================================
void AudioIODeviceType::addListener (Listener* l) { listeners.add (l); }
void AudioIODeviceType::removeListener (Listener* l) { listeners.remove (l); }
void AudioIODeviceType::callDeviceChangeListeners()
{
listeners.call (&AudioIODeviceType::Listener::audioDeviceListChanged);
}
//==============================================================================
#if ! JUCE_MAC
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_CoreAudio() { return nullptr; }
#endif
#if ! JUCE_IOS
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio() { return nullptr; }
#endif
#if ! (JUCE_WINDOWS && JUCE_WASAPI)
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_WASAPI() { return nullptr; }
#endif
#if ! (JUCE_WINDOWS && JUCE_DIRECTSOUND)
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_DirectSound() { return nullptr; }
#endif
#if ! (JUCE_WINDOWS && JUCE_ASIO)
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_ASIO() { return nullptr; }
#endif
#if ! (JUCE_LINUX && JUCE_ALSA)
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_ALSA() { return nullptr; }
#endif
#if ! (JUCE_LINUX && JUCE_JACK)
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_JACK() { return nullptr; }
#endif
#if ! JUCE_ANDROID
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android() { return nullptr; }
#endif
#if ! (JUCE_ANDROID && JUCE_USE_ANDROID_OPENSLES)
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_OpenSLES() { return nullptr; }
#endif

View file

@ -0,0 +1,182 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIOIODEVICETYPE_H_INCLUDED
#define JUCE_AUDIOIODEVICETYPE_H_INCLUDED
//==============================================================================
/**
Represents a type of audio driver, such as DirectSound, ASIO, CoreAudio, etc.
To get a list of available audio driver types, use the AudioDeviceManager::createAudioDeviceTypes()
method. Each of the objects returned can then be used to list the available
devices of that type. E.g.
@code
OwnedArray<AudioIODeviceType> types;
myAudioDeviceManager.createAudioDeviceTypes (types);
for (int i = 0; i < types.size(); ++i)
{
String typeName (types[i]->getTypeName()); // This will be things like "DirectSound", "CoreAudio", etc.
types[i]->scanForDevices(); // This must be called before getting the list of devices
StringArray deviceNames (types[i]->getDeviceNames()); // This will now return a list of available devices of this type
for (int j = 0; j < deviceNames.size(); ++j)
{
AudioIODevice* device = types[i]->createDevice (deviceNames [j]);
...
}
}
@endcode
For an easier way of managing audio devices and their settings, have a look at the
AudioDeviceManager class.
@see AudioIODevice, AudioDeviceManager
*/
class JUCE_API AudioIODeviceType
{
public:
//==============================================================================
/** Returns the name of this type of driver that this object manages.
This will be something like "DirectSound", "ASIO", "CoreAudio", "ALSA", etc.
*/
const String& getTypeName() const noexcept { return typeName; }
//==============================================================================
/** Refreshes the object's cached list of known devices.
This must be called at least once before calling getDeviceNames() or any of
the other device creation methods.
*/
virtual void scanForDevices() = 0;
/** Returns the list of available devices of this type.
The scanForDevices() method must have been called to create this list.
@param wantInputNames only really used by DirectSound where devices are split up
into inputs and outputs, this indicates whether to use
the input or output name to refer to a pair of devices.
*/
virtual StringArray getDeviceNames (bool wantInputNames = false) const = 0;
/** Returns the name of the default device.
This will be one of the names from the getDeviceNames() list.
@param forInput if true, this means that a default input device should be
returned; if false, it should return the default output
*/
virtual int getDefaultDeviceIndex (bool forInput) const = 0;
/** Returns the index of a given device in the list of device names.
If asInput is true, it shows the index in the inputs list, otherwise it
looks for it in the outputs list.
*/
virtual int getIndexOfDevice (AudioIODevice* device, bool asInput) const = 0;
/** Returns true if two different devices can be used for the input and output.
*/
virtual bool hasSeparateInputsAndOutputs() const = 0;
/** Creates one of the devices of this type.
The deviceName must be one of the strings returned by getDeviceNames(), and
scanForDevices() must have been called before this method is used.
*/
virtual AudioIODevice* createDevice (const String& outputDeviceName,
const String& inputDeviceName) = 0;
//==============================================================================
/**
A class for receiving events when audio devices are inserted or removed.
You can register an AudioIODeviceType::Listener with an~AudioIODeviceType object
using the AudioIODeviceType::addListener() method, and it will be called when
devices of that type are added or removed.
@see AudioIODeviceType::addListener, AudioIODeviceType::removeListener
*/
class Listener
{
public:
virtual ~Listener() {}
/** Called when the list of available audio devices changes. */
virtual void audioDeviceListChanged() = 0;
};
/** Adds a listener that will be called when this type of device is added or
removed from the system.
*/
void addListener (Listener* listener);
/** Removes a listener that was previously added with addListener(). */
void removeListener (Listener* listener);
//==============================================================================
/** Destructor. */
virtual ~AudioIODeviceType();
//==============================================================================
/** Creates a CoreAudio device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_CoreAudio();
/** Creates an iOS device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_iOSAudio();
/** Creates a WASAPI device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_WASAPI();
/** Creates a DirectSound device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_DirectSound();
/** Creates an ASIO device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_ASIO();
/** Creates an ALSA device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_ALSA();
/** Creates a JACK device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_JACK();
/** Creates an Android device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_Android();
/** Creates an Android OpenSLES device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_OpenSLES();
protected:
explicit AudioIODeviceType (const String& typeName);
/** Synchronously calls all the registered device list change listeners. */
void callDeviceChangeListeners();
private:
String typeName;
ListenerList<Listener> listeners;
JUCE_DECLARE_NON_COPYABLE (AudioIODeviceType)
};
#endif // JUCE_AUDIOIODEVICETYPE_H_INCLUDED

View file

@ -0,0 +1,61 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_SYSTEMAUDIOVOLUME_H_INCLUDED
#define JUCE_SYSTEMAUDIOVOLUME_H_INCLUDED
//==============================================================================
/**
Contains functions to control the system's master volume.
*/
class JUCE_API SystemAudioVolume
{
public:
//==============================================================================
/** Returns the operating system's current volume level in the range 0 to 1.0 */
static float JUCE_CALLTYPE getGain();
/** Attempts to set the operating system's current volume level.
@param newGain the level, between 0 and 1.0
@returns true if the operation succeeds
*/
static bool JUCE_CALLTYPE setGain (float newGain);
/** Returns true if the system's audio output is currently muted. */
static bool JUCE_CALLTYPE isMuted();
/** Attempts to mute the operating system's audio output.
@param shouldBeMuted true if you want it to be muted
@returns true if the operation succeeds
*/
static bool JUCE_CALLTYPE setMuted (bool shouldBeMuted);
private:
SystemAudioVolume(); // Don't instantiate this class, just call its static fns.
JUCE_DECLARE_NON_COPYABLE (SystemAudioVolume)
};
#endif // JUCE_SYSTEMAUDIOVOLUME_H_INCLUDED

View file

@ -0,0 +1,227 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if defined (JUCE_AUDIO_DEVICES_H_INCLUDED) && ! JUCE_AMALGAMATED_INCLUDE
/* When you add this cpp file to your project, you mustn't include it in a file where you've
already included any other headers - just put it inside a file on its own, possibly with your config
flags preceding it, but don't include anything else. That also includes avoiding any automatic prefix
header files that the compiler may be using.
*/
#error "Incorrect use of JUCE cpp file"
#endif
// Your project must contain an AppConfig.h file with your project-specific settings in it,
// and your header search path must make it accessible to the module's files.
#include "AppConfig.h"
#include "../juce_core/native/juce_BasicNativeHeaders.h"
#include "juce_audio_devices.h"
//==============================================================================
#if JUCE_MAC
#define Point CarbonDummyPointName
#define Component CarbonDummyCompName
#import <CoreAudio/AudioHardware.h>
#import <CoreMIDI/MIDIServices.h>
#import <DiscRecording/DiscRecording.h>
#import <AudioToolbox/AudioServices.h>
#undef Point
#undef Component
#elif JUCE_IOS
#import <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreMIDI/MIDIServices.h>
//==============================================================================
#elif JUCE_WINDOWS
#if JUCE_WASAPI
#include <MMReg.h>
#endif
#if JUCE_ASIO
/* This is very frustrating - we only need to use a handful of definitions from
a couple of the header files in Steinberg's ASIO SDK, and it'd be easy to copy
about 30 lines of code into this cpp file to create a fully stand-alone ASIO
implementation...
..unfortunately that would break Steinberg's license agreement for use of
their SDK, so I'm not allowed to do this.
This means that anyone who wants to use JUCE's ASIO abilities will have to:
1) Agree to Steinberg's licensing terms and download the ASIO SDK
(see www.steinberg.net/Steinberg/Developers.asp).
2) Enable this code with a global definition #define JUCE_ASIO 1.
3) Make sure that your header search path contains the iasiodrv.h file that
comes with the SDK. (Only about a handful of the SDK header files are actually
needed - so to simplify things, you could just copy these into your JUCE directory).
*/
#include <iasiodrv.h>
#endif
#if JUCE_USE_CDBURNER
/* You'll need the Platform SDK for these headers - if you don't have it and don't
need to use CD-burning, then you might just want to set the JUCE_USE_CDBURNER flag
to 0, to avoid these includes.
*/
#include <imapi.h>
#include <imapierror.h>
#endif
//==============================================================================
#elif JUCE_LINUX
#if JUCE_ALSA
/* Got an include error here? If so, you've either not got ALSA installed, or you've
not got your paths set up correctly to find its header files.
The package you need to install to get ASLA support is "libasound2-dev".
If you don't have the ALSA library and don't want to build Juce with audio support,
just set the JUCE_ALSA flag to 0.
*/
#include <alsa/asoundlib.h>
#endif
#if JUCE_JACK
/* Got an include error here? If so, you've either not got jack-audio-connection-kit
installed, or you've not got your paths set up correctly to find its header files.
The package you need to install to get JACK support is "libjack-dev".
If you don't have the jack-audio-connection-kit library and don't want to build
Juce with low latency audio support, just set the JUCE_JACK flag to 0.
*/
#include <jack/jack.h>
#endif
#undef SIZEOF
//==============================================================================
#elif JUCE_ANDROID
#if JUCE_USE_ANDROID_OPENSLES
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <SLES/OpenSLES_AndroidConfiguration.h>
#endif
#endif
namespace juce
{
#include "audio_io/juce_AudioDeviceManager.cpp"
#include "audio_io/juce_AudioIODevice.cpp"
#include "audio_io/juce_AudioIODeviceType.cpp"
#include "midi_io/juce_MidiMessageCollector.cpp"
#include "midi_io/juce_MidiOutput.cpp"
#include "audio_cd/juce_AudioCDReader.cpp"
#include "sources/juce_AudioSourcePlayer.cpp"
#include "sources/juce_AudioTransportSource.cpp"
#include "native/juce_MidiDataConcatenator.h"
//==============================================================================
#if JUCE_MAC
#include "../juce_core/native/juce_osx_ObjCHelpers.h"
#include "native/juce_mac_CoreAudio.cpp"
#include "native/juce_mac_CoreMidi.cpp"
#if JUCE_USE_CDREADER
#include "native/juce_mac_AudioCDReader.mm"
#endif
#if JUCE_USE_CDBURNER
#include "native/juce_mac_AudioCDBurner.mm"
#endif
//==============================================================================
#elif JUCE_IOS
#include "native/juce_ios_Audio.cpp"
#include "native/juce_mac_CoreMidi.cpp"
//==============================================================================
#elif JUCE_WINDOWS
#include "../juce_core/native/juce_win32_ComSmartPtr.h"
#include "../juce_events/native/juce_win32_HiddenMessageWindow.h"
#if JUCE_WASAPI
#include "native/juce_win32_WASAPI.cpp"
#endif
#if JUCE_DIRECTSOUND
#include "native/juce_win32_DirectSound.cpp"
#endif
#include "native/juce_win32_Midi.cpp"
#if JUCE_ASIO
#include "native/juce_win32_ASIO.cpp"
#endif
#if JUCE_USE_CDREADER
#include "native/juce_win32_AudioCDReader.cpp"
#endif
#if JUCE_USE_CDBURNER
#include "native/juce_win32_AudioCDBurner.cpp"
#endif
//==============================================================================
#elif JUCE_LINUX
#if JUCE_ALSA
#include "native/juce_linux_ALSA.cpp"
#endif
#include "native/juce_linux_Midi.cpp"
#if JUCE_JACK
#include "native/juce_linux_JackAudio.cpp"
#endif
#if JUCE_USE_CDREADER
#include "native/juce_linux_AudioCDReader.cpp"
#endif
//==============================================================================
#elif JUCE_ANDROID
#include "../juce_core/native/juce_android_JNIHelpers.h"
#include "native/juce_android_Audio.cpp"
#include "native/juce_android_Midi.cpp"
#if JUCE_USE_ANDROID_OPENSLES
#include "native/juce_android_OpenSL.cpp"
#endif
#endif
#if ! JUCE_SYSTEMAUDIOVOL_IMPLEMENTED
// None of these methods are available. (On Windows you might need to enable WASAPI for this)
float JUCE_CALLTYPE SystemAudioVolume::getGain() { jassertfalse; return 0.0f; }
bool JUCE_CALLTYPE SystemAudioVolume::setGain (float) { jassertfalse; return false; }
bool JUCE_CALLTYPE SystemAudioVolume::isMuted() { jassertfalse; return false; }
bool JUCE_CALLTYPE SystemAudioVolume::setMuted (bool) { jassertfalse; return false; }
#endif
}

View file

@ -0,0 +1,117 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIO_DEVICES_H_INCLUDED
#define JUCE_AUDIO_DEVICES_H_INCLUDED
#include "../juce_events/juce_events.h"
#include "../juce_audio_basics/juce_audio_basics.h"
#include "../juce_audio_formats/juce_audio_formats.h"
//=============================================================================
/** Config: JUCE_ASIO
Enables ASIO audio devices (MS Windows only).
Turning this on means that you'll need to have the Steinberg ASIO SDK installed
on your Windows build machine.
See the comments in the ASIOAudioIODevice class's header file for more
info about this.
*/
#ifndef JUCE_ASIO
#define JUCE_ASIO 0
#endif
/** Config: JUCE_WASAPI
Enables WASAPI audio devices (Windows Vista and above).
*/
#ifndef JUCE_WASAPI
#define JUCE_WASAPI 1
#endif
/** Config: JUCE_DIRECTSOUND
Enables DirectSound audio (MS Windows only).
*/
#ifndef JUCE_DIRECTSOUND
#define JUCE_DIRECTSOUND 1
#endif
/** Config: JUCE_ALSA
Enables ALSA audio devices (Linux only).
*/
#ifndef JUCE_ALSA
#define JUCE_ALSA 1
#endif
/** Config: JUCE_JACK
Enables JACK audio devices (Linux only).
*/
#ifndef JUCE_JACK
#define JUCE_JACK 0
#endif
/** Config: JUCE_USE_ANDROID_OPENSLES
Enables OpenSLES devices (Android only).
*/
#ifndef JUCE_USE_ANDROID_OPENSLES
#if JUCE_ANDROID_API_VERSION > 8
#define JUCE_USE_ANDROID_OPENSLES 1
#else
#define JUCE_USE_ANDROID_OPENSLES 0
#endif
#endif
//=============================================================================
/** Config: JUCE_USE_CDREADER
Enables the AudioCDReader class (on supported platforms).
*/
#ifndef JUCE_USE_CDREADER
#define JUCE_USE_CDREADER 0
#endif
/** Config: JUCE_USE_CDBURNER
Enables the AudioCDBurner class (on supported platforms).
*/
#ifndef JUCE_USE_CDBURNER
#define JUCE_USE_CDBURNER 0
#endif
//=============================================================================
namespace juce
{
#include "audio_io/juce_AudioIODevice.h"
#include "audio_io/juce_AudioIODeviceType.h"
#include "audio_io/juce_SystemAudioVolume.h"
#include "midi_io/juce_MidiInput.h"
#include "midi_io/juce_MidiMessageCollector.h"
#include "midi_io/juce_MidiOutput.h"
#include "sources/juce_AudioSourcePlayer.h"
#include "sources/juce_AudioTransportSource.h"
#include "audio_cd/juce_AudioCDBurner.h"
#include "audio_cd/juce_AudioCDReader.h"
#include "audio_io/juce_AudioDeviceManager.h"
}
#endif // JUCE_AUDIO_DEVICES_H_INCLUDED

View file

@ -0,0 +1,25 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#include "juce_audio_devices.cpp"

View file

@ -0,0 +1,28 @@
{
"id": "juce_audio_devices",
"name": "JUCE audio and midi I/O device classes",
"version": "3.0.8",
"description": "Classes to play and record from audio and midi i/o devices.",
"website": "http://www.juce.com/juce",
"license": "GPL/Commercial",
"dependencies": [ { "id": "juce_audio_basics", "version": "matching" },
{ "id": "juce_audio_formats", "version": "matching" },
{ "id": "juce_events", "version": "matching" } ],
"include": "juce_audio_devices.h",
"compile": [ { "file": "juce_audio_devices.cpp", "target": "! xcode" },
{ "file": "juce_audio_devices.mm", "target": "xcode" } ],
"browse": [ "audio_io/*",
"midi_io/*",
"sources/*",
"audio_cd/*",
"native/*" ],
"OSXFrameworks": "CoreAudio CoreMIDI DiscRecording",
"iOSFrameworks": "AudioToolbox CoreMIDI",
"LinuxLibs": "asound",
"mingwLibs": "winmm"
}

View file

@ -0,0 +1,182 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_MIDIINPUT_H_INCLUDED
#define JUCE_MIDIINPUT_H_INCLUDED
class MidiInput;
//==============================================================================
/**
Receives incoming messages from a physical MIDI input device.
This class is overridden to handle incoming midi messages. See the MidiInput
class for more details.
@see MidiInput
*/
class JUCE_API MidiInputCallback
{
public:
/** Destructor. */
virtual ~MidiInputCallback() {}
/** Receives an incoming message.
A MidiInput object will call this method when a midi event arrives. It'll be
called on a high-priority system thread, so avoid doing anything time-consuming
in here, and avoid making any UI calls. You might find the MidiBuffer class helpful
for queueing incoming messages for use later.
@param source the MidiInput object that generated the message
@param message the incoming message. The message's timestamp is set to a value
equivalent to (Time::getMillisecondCounter() / 1000.0) to specify the
time when the message arrived.
*/
virtual void handleIncomingMidiMessage (MidiInput* source,
const MidiMessage& message) = 0;
/** Notification sent each time a packet of a multi-packet sysex message arrives.
If a long sysex message is broken up into multiple packets, this callback is made
for each packet that arrives until the message is finished, at which point
the normal handleIncomingMidiMessage() callback will be made with the entire
message.
The message passed in will contain the start of a sysex, but won't be finished
with the terminating 0xf7 byte.
*/
virtual void handlePartialSysexMessage (MidiInput* source,
const uint8* messageData,
int numBytesSoFar,
double timestamp)
{
// (this bit is just to avoid compiler warnings about unused variables)
(void) source; (void) messageData; (void) numBytesSoFar; (void) timestamp;
}
};
//==============================================================================
/**
Represents a midi input device.
To create one of these, use the static getDevices() method to find out what inputs are
available, and then use the openDevice() method to try to open one.
@see MidiOutput
*/
class JUCE_API MidiInput
{
public:
//==============================================================================
/** Returns a list of the available midi input devices.
You can open one of the devices by passing its index into the
openDevice() method.
@see getDefaultDeviceIndex, openDevice
*/
static StringArray getDevices();
/** Returns the index of the default midi input device to use.
This refers to the index in the list returned by getDevices().
*/
static int getDefaultDeviceIndex();
/** Tries to open one of the midi input devices.
This will return a MidiInput object if it manages to open it. You can then
call start() and stop() on this device, and delete it when no longer needed.
If the device can't be opened, this will return a null pointer.
@param deviceIndex the index of a device from the list returned by getDevices()
@param callback the object that will receive the midi messages from this device.
@see MidiInputCallback, getDevices
*/
static MidiInput* openDevice (int deviceIndex,
MidiInputCallback* callback);
#if JUCE_LINUX || JUCE_MAC || JUCE_IOS || DOXYGEN
/** This will try to create a new midi input device (Not available on Windows).
This will attempt to create a new midi input device with the specified name,
for other apps to connect to.
Returns nullptr if a device can't be created.
@param deviceName the name to use for the new device
@param callback the object that will receive the midi messages from this device.
*/
static MidiInput* createNewDevice (const String& deviceName,
MidiInputCallback* callback);
#endif
//==============================================================================
/** Destructor. */
virtual ~MidiInput();
/** Returns the name of this device. */
const String& getName() const noexcept { return name; }
/** Allows you to set a custom name for the device, in case you don't like the name
it was given when created.
*/
void setName (const String& newName) noexcept { name = newName; }
//==============================================================================
/** Starts the device running.
After calling this, the device will start sending midi messages to the
MidiInputCallback object that was specified when the openDevice() method
was called.
@see stop
*/
virtual void start();
/** Stops the device running.
@see start
*/
virtual void stop();
protected:
//==============================================================================
String name;
void* internal;
explicit MidiInput (const String& name);
private:
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiInput)
};
#endif // JUCE_MIDIINPUT_H_INCLUDED

View file

@ -0,0 +1,153 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
MidiMessageCollector::MidiMessageCollector()
: lastCallbackTime (0),
sampleRate (44100.0001)
{
}
MidiMessageCollector::~MidiMessageCollector()
{
}
//==============================================================================
void MidiMessageCollector::reset (const double sampleRate_)
{
jassert (sampleRate_ > 0);
const ScopedLock sl (midiCallbackLock);
sampleRate = sampleRate_;
incomingMessages.clear();
lastCallbackTime = Time::getMillisecondCounterHiRes();
}
void MidiMessageCollector::addMessageToQueue (const MidiMessage& message)
{
// you need to call reset() to set the correct sample rate before using this object
jassert (sampleRate != 44100.0001);
// the messages that come in here need to be time-stamped correctly - see MidiInput
// for details of what the number should be.
jassert (message.getTimeStamp() != 0);
const ScopedLock sl (midiCallbackLock);
const int sampleNumber
= (int) ((message.getTimeStamp() - 0.001 * lastCallbackTime) * sampleRate);
incomingMessages.addEvent (message, sampleNumber);
// if the messages don't get used for over a second, we'd better
// get rid of any old ones to avoid the queue getting too big
if (sampleNumber > sampleRate)
incomingMessages.clear (0, sampleNumber - (int) sampleRate);
}
void MidiMessageCollector::removeNextBlockOfMessages (MidiBuffer& destBuffer,
const int numSamples)
{
// you need to call reset() to set the correct sample rate before using this object
jassert (sampleRate != 44100.0001);
jassert (numSamples > 0);
const double timeNow = Time::getMillisecondCounterHiRes();
const double msElapsed = timeNow - lastCallbackTime;
const ScopedLock sl (midiCallbackLock);
lastCallbackTime = timeNow;
if (! incomingMessages.isEmpty())
{
int numSourceSamples = jmax (1, roundToInt (msElapsed * 0.001 * sampleRate));
int startSample = 0;
int scale = 1 << 16;
const uint8* midiData;
int numBytes, samplePosition;
MidiBuffer::Iterator iter (incomingMessages);
if (numSourceSamples > numSamples)
{
// if our list of events is longer than the buffer we're being
// asked for, scale them down to squeeze them all in..
const int maxBlockLengthToUse = numSamples << 5;
if (numSourceSamples > maxBlockLengthToUse)
{
startSample = numSourceSamples - maxBlockLengthToUse;
numSourceSamples = maxBlockLengthToUse;
iter.setNextSamplePosition (startSample);
}
scale = (numSamples << 10) / numSourceSamples;
while (iter.getNextEvent (midiData, numBytes, samplePosition))
{
samplePosition = ((samplePosition - startSample) * scale) >> 10;
destBuffer.addEvent (midiData, numBytes,
jlimit (0, numSamples - 1, samplePosition));
}
}
else
{
// if our event list is shorter than the number we need, put them
// towards the end of the buffer
startSample = numSamples - numSourceSamples;
while (iter.getNextEvent (midiData, numBytes, samplePosition))
{
destBuffer.addEvent (midiData, numBytes,
jlimit (0, numSamples - 1, samplePosition + startSample));
}
}
incomingMessages.clear();
}
}
//==============================================================================
void MidiMessageCollector::handleNoteOn (MidiKeyboardState*, int midiChannel, int midiNoteNumber, float velocity)
{
MidiMessage m (MidiMessage::noteOn (midiChannel, midiNoteNumber, velocity));
m.setTimeStamp (Time::getMillisecondCounterHiRes() * 0.001);
addMessageToQueue (m);
}
void MidiMessageCollector::handleNoteOff (MidiKeyboardState*, int midiChannel, int midiNoteNumber)
{
MidiMessage m (MidiMessage::noteOff (midiChannel, midiNoteNumber));
m.setTimeStamp (Time::getMillisecondCounterHiRes() * 0.001);
addMessageToQueue (m);
}
void MidiMessageCollector::handleIncomingMidiMessage (MidiInput*, const MidiMessage& message)
{
addMessageToQueue (message);
}

View file

@ -0,0 +1,104 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_MIDIMESSAGECOLLECTOR_H_INCLUDED
#define JUCE_MIDIMESSAGECOLLECTOR_H_INCLUDED
//==============================================================================
/**
Collects incoming realtime MIDI messages and turns them into blocks suitable for
processing by a block-based audio callback.
The class can also be used as either a MidiKeyboardStateListener or a MidiInputCallback
so it can easily use a midi input or keyboard component as its source.
@see MidiMessage, MidiInput
*/
class JUCE_API MidiMessageCollector : public MidiKeyboardStateListener,
public MidiInputCallback
{
public:
//==============================================================================
/** Creates a MidiMessageCollector. */
MidiMessageCollector();
/** Destructor. */
~MidiMessageCollector();
//==============================================================================
/** Clears any messages from the queue.
You need to call this method before starting to use the collector, so that
it knows the correct sample rate to use.
*/
void reset (double sampleRate);
/** Takes an incoming real-time message and adds it to the queue.
The message's timestamp is taken, and it will be ready for retrieval as part
of the block returned by the next call to removeNextBlockOfMessages().
This method is fully thread-safe when overlapping calls are made with
removeNextBlockOfMessages().
*/
void addMessageToQueue (const MidiMessage& message);
/** Removes all the pending messages from the queue as a buffer.
This will also correct the messages' timestamps to make sure they're in
the range 0 to numSamples - 1.
This call should be made regularly by something like an audio processing
callback, because the time that it happens is used in calculating the
midi event positions.
This method is fully thread-safe when overlapping calls are made with
addMessageToQueue().
Precondition: numSamples must be greater than 0.
*/
void removeNextBlockOfMessages (MidiBuffer& destBuffer, int numSamples);
//==============================================================================
/** @internal */
void handleNoteOn (MidiKeyboardState*, int midiChannel, int midiNoteNumber, float velocity) override;
/** @internal */
void handleNoteOff (MidiKeyboardState*, int midiChannel, int midiNoteNumber) override;
/** @internal */
void handleIncomingMidiMessage (MidiInput*, const MidiMessage&) override;
private:
//==============================================================================
double lastCallbackTime;
CriticalSection midiCallbackLock;
MidiBuffer incomingMessages;
double sampleRate;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiMessageCollector)
};
#endif // JUCE_MIDIMESSAGECOLLECTOR_H_INCLUDED

View file

@ -0,0 +1,162 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
struct MidiOutput::PendingMessage
{
PendingMessage (const void* const data, const int len, const double timeStamp)
: message (data, len, timeStamp)
{}
MidiMessage message;
PendingMessage* next;
};
MidiOutput::MidiOutput()
: Thread ("midi out"),
internal (nullptr),
firstMessage (nullptr)
{
}
void MidiOutput::sendBlockOfMessages (const MidiBuffer& buffer,
const double millisecondCounterToStartAt,
double samplesPerSecondForBuffer)
{
// You've got to call startBackgroundThread() for this to actually work..
jassert (isThreadRunning());
// this needs to be a value in the future - RTFM for this method!
jassert (millisecondCounterToStartAt > 0);
const double timeScaleFactor = 1000.0 / samplesPerSecondForBuffer;
MidiBuffer::Iterator i (buffer);
const uint8* data;
int len, time;
while (i.getNextEvent (data, len, time))
{
const double eventTime = millisecondCounterToStartAt + timeScaleFactor * time;
PendingMessage* const m = new PendingMessage (data, len, eventTime);
const ScopedLock sl (lock);
if (firstMessage == nullptr || firstMessage->message.getTimeStamp() > eventTime)
{
m->next = firstMessage;
firstMessage = m;
}
else
{
PendingMessage* mm = firstMessage;
while (mm->next != nullptr && mm->next->message.getTimeStamp() <= eventTime)
mm = mm->next;
m->next = mm->next;
mm->next = m;
}
}
notify();
}
void MidiOutput::clearAllPendingMessages()
{
const ScopedLock sl (lock);
while (firstMessage != nullptr)
{
PendingMessage* const m = firstMessage;
firstMessage = firstMessage->next;
delete m;
}
}
void MidiOutput::startBackgroundThread()
{
startThread (9);
}
void MidiOutput::stopBackgroundThread()
{
stopThread (5000);
}
void MidiOutput::run()
{
while (! threadShouldExit())
{
uint32 now = Time::getMillisecondCounter();
uint32 eventTime = 0;
uint32 timeToWait = 500;
PendingMessage* message;
{
const ScopedLock sl (lock);
message = firstMessage;
if (message != nullptr)
{
eventTime = (uint32) roundToInt (message->message.getTimeStamp());
if (eventTime > now + 20)
{
timeToWait = eventTime - (now + 20);
message = nullptr;
}
else
{
firstMessage = message->next;
}
}
}
if (message != nullptr)
{
const ScopedPointer<PendingMessage> messageDeleter (message);
if (eventTime > now)
{
Time::waitForMillisecondCounter (eventTime);
if (threadShouldExit())
break;
}
if (eventTime > now - 200)
sendMessageNow (message->message);
}
else
{
jassert (timeToWait < 1000 * 30);
wait ((int) timeToWait);
}
}
clearAllPendingMessages();
}

View file

@ -0,0 +1,147 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_MIDIOUTPUT_H_INCLUDED
#define JUCE_MIDIOUTPUT_H_INCLUDED
//==============================================================================
/**
Controls a physical MIDI output device.
To create one of these, use the static getDevices() method to get a list of the
available output devices, then use the openDevice() method to try to open one.
@see MidiInput
*/
class JUCE_API MidiOutput : private Thread
{
public:
//==============================================================================
/** Returns a list of the available midi output devices.
You can open one of the devices by passing its index into the
openDevice() method.
@see getDefaultDeviceIndex, openDevice
*/
static StringArray getDevices();
/** Returns the index of the default midi output device to use.
This refers to the index in the list returned by getDevices().
*/
static int getDefaultDeviceIndex();
/** Tries to open one of the midi output devices.
This will return a MidiOutput object if it manages to open it. You can then
send messages to this device, and delete it when no longer needed.
If the device can't be opened, this will return a null pointer.
@param deviceIndex the index of a device from the list returned by getDevices()
@see getDevices
*/
static MidiOutput* openDevice (int deviceIndex);
#if JUCE_LINUX || JUCE_MAC || JUCE_IOS || DOXYGEN
/** This will try to create a new midi output device (Not available on Windows).
This will attempt to create a new midi output device that other apps can connect
to and use as their midi input.
Returns nullptr if a device can't be created.
@param deviceName the name to use for the new device
*/
static MidiOutput* createNewDevice (const String& deviceName);
#endif
//==============================================================================
/** Destructor. */
virtual ~MidiOutput();
/** Makes this device output a midi message.
@see MidiMessage
*/
virtual void sendMessageNow (const MidiMessage& message);
//==============================================================================
/** This lets you supply a block of messages that will be sent out at some point
in the future.
The MidiOutput class has an internal thread that can send out timestamped
messages - this appends a set of messages to its internal buffer, ready for
sending.
This will only work if you've already started the thread with startBackgroundThread().
A time is supplied, at which the block of messages should be sent. This time uses
the same time base as Time::getMillisecondCounter(), and must be in the future.
The samplesPerSecondForBuffer parameter indicates the number of samples per second
used by the MidiBuffer. Each event in a MidiBuffer has a sample position, and the
samplesPerSecondForBuffer value is needed to convert this sample position to a
real time.
*/
virtual void sendBlockOfMessages (const MidiBuffer& buffer,
double millisecondCounterToStartAt,
double samplesPerSecondForBuffer);
/** Gets rid of any midi messages that had been added by sendBlockOfMessages().
*/
virtual void clearAllPendingMessages();
/** Starts up a background thread so that the device can send blocks of data.
Call this to get the device ready, before using sendBlockOfMessages().
*/
virtual void startBackgroundThread();
/** Stops the background thread, and clears any pending midi events.
@see startBackgroundThread
*/
virtual void stopBackgroundThread();
protected:
//==============================================================================
void* internal;
CriticalSection lock;
struct PendingMessage;
PendingMessage* firstMessage;
MidiOutput();
void run() override;
private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiOutput)
};
#endif // JUCE_MIDIOUTPUT_H_INCLUDED

View file

@ -0,0 +1,187 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_MIDIDATACONCATENATOR_H_INCLUDED
#define JUCE_MIDIDATACONCATENATOR_H_INCLUDED
//==============================================================================
/**
Helper class that takes chunks of incoming midi bytes, packages them into
messages, and dispatches them to a midi callback.
*/
class MidiDataConcatenator
{
public:
//==============================================================================
MidiDataConcatenator (const int initialBufferSize)
: pendingData ((size_t) initialBufferSize),
pendingDataTime (0), pendingBytes (0), runningStatus (0)
{
}
void reset()
{
pendingBytes = 0;
runningStatus = 0;
pendingDataTime = 0;
}
template <typename UserDataType, typename CallbackType>
void pushMidiData (const void* inputData, int numBytes, double time,
UserDataType* input, CallbackType& callback)
{
const uint8* d = static_cast <const uint8*> (inputData);
while (numBytes > 0)
{
if (pendingBytes > 0 || d[0] == 0xf0)
{
processSysex (d, numBytes, time, input, callback);
runningStatus = 0;
}
else
{
int len = 0;
uint8 data[3];
while (numBytes > 0)
{
// If there's a realtime message embedded in the middle of
// the normal message, handle it now..
if (*d >= 0xf8 && *d <= 0xfe)
{
const MidiMessage m (*d++, time);
callback.handleIncomingMidiMessage (input, m);
--numBytes;
}
else
{
if (len == 0 && *d < 0x80 && runningStatus >= 0x80)
data[len++] = runningStatus;
data[len++] = *d++;
--numBytes;
if (len >= MidiMessage::getMessageLengthFromFirstByte (data[0]))
break;
}
}
if (len > 0)
{
int used = 0;
const MidiMessage m (data, len, used, 0, time);
if (used <= 0)
break; // malformed message..
jassert (used == len);
callback.handleIncomingMidiMessage (input, m);
runningStatus = data[0];
}
}
}
}
private:
template <typename UserDataType, typename CallbackType>
void processSysex (const uint8*& d, int& numBytes, double time,
UserDataType* input, CallbackType& callback)
{
if (*d == 0xf0)
{
pendingBytes = 0;
pendingDataTime = time;
}
pendingData.ensureSize ((size_t) (pendingBytes + numBytes), false);
uint8* totalMessage = static_cast<uint8*> (pendingData.getData());
uint8* dest = totalMessage + pendingBytes;
do
{
if (pendingBytes > 0 && *d >= 0x80)
{
if (*d == 0xf7)
{
*dest++ = *d++;
++pendingBytes;
--numBytes;
break;
}
if (*d >= 0xfa || *d == 0xf8)
{
callback.handleIncomingMidiMessage (input, MidiMessage (*d, time));
++d;
--numBytes;
}
else
{
pendingBytes = 0;
int used = 0;
const MidiMessage m (d, numBytes, used, 0, time);
if (used > 0)
{
callback.handleIncomingMidiMessage (input, m);
numBytes -= used;
d += used;
}
break;
}
}
else
{
*dest++ = *d++;
++pendingBytes;
--numBytes;
}
}
while (numBytes > 0);
if (pendingBytes > 0)
{
if (totalMessage [pendingBytes - 1] == 0xf7)
{
callback.handleIncomingMidiMessage (input, MidiMessage (totalMessage, pendingBytes, pendingDataTime));
pendingBytes = 0;
}
else
{
callback.handlePartialSysexMessage (input, totalMessage, pendingBytes, pendingDataTime);
}
}
}
MemoryBlock pendingData;
double pendingDataTime;
int pendingBytes;
uint8 runningStatus;
JUCE_DECLARE_NON_COPYABLE (MidiDataConcatenator)
};
#endif // JUCE_MIDIDATACONCATENATOR_H_INCLUDED

View file

@ -0,0 +1,450 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
//==============================================================================
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
METHOD (constructor, "<init>", "(IIIIII)V") \
METHOD (getState, "getState", "()I") \
METHOD (play, "play", "()V") \
METHOD (stop, "stop", "()V") \
METHOD (release, "release", "()V") \
METHOD (flush, "flush", "()V") \
METHOD (write, "write", "([SII)I") \
DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack");
#undef JNI_CLASS_MEMBERS
//==============================================================================
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
METHOD (constructor, "<init>", "(IIIII)V") \
METHOD (getState, "getState", "()I") \
METHOD (startRecording, "startRecording", "()V") \
METHOD (stop, "stop", "()V") \
METHOD (read, "read", "([SII)I") \
METHOD (release, "release", "()V") \
DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord");
#undef JNI_CLASS_MEMBERS
//==============================================================================
enum
{
CHANNEL_OUT_STEREO = 12,
CHANNEL_IN_STEREO = 12,
CHANNEL_IN_MONO = 16,
ENCODING_PCM_16BIT = 2,
STREAM_MUSIC = 3,
MODE_STREAM = 1,
STATE_UNINITIALIZED = 0
};
const char* const javaAudioTypeName = "Android Audio";
//==============================================================================
class AndroidAudioIODevice : public AudioIODevice,
public Thread
{
public:
//==============================================================================
AndroidAudioIODevice (const String& deviceName)
: AudioIODevice (deviceName, javaAudioTypeName),
Thread ("audio"),
minBufferSizeOut (0), minBufferSizeIn (0), callback (0), sampleRate (0),
numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
numClientOutputChannels (0), numDeviceOutputChannels (0),
actualBufferSize (0), isRunning (false),
inputChannelBuffer (1, 1),
outputChannelBuffer (1, 1)
{
JNIEnv* env = getEnv();
sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
if (minBufferSizeIn <= 0)
{
minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
if (minBufferSizeIn > 0)
numDeviceInputChannelsAvailable = 1;
else
numDeviceInputChannelsAvailable = 0;
}
DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
<< sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
}
~AndroidAudioIODevice()
{
close();
}
StringArray getOutputChannelNames() override
{
StringArray s;
s.add ("Left");
s.add ("Right");
return s;
}
StringArray getInputChannelNames() override
{
StringArray s;
if (numDeviceInputChannelsAvailable == 2)
{
s.add ("Left");
s.add ("Right");
}
else if (numDeviceInputChannelsAvailable == 1)
{
s.add ("Audio Input");
}
return s;
}
Array<double> getAvailableSampleRates() override
{
Array<double> r;
r.add ((double) sampleRate);
return r;
}
Array<int> getAvailableBufferSizes() override
{
Array<int> b;
int n = 16;
for (int i = 0; i < 50; ++i)
{
b.add (n);
n += n < 64 ? 16
: (n < 512 ? 32
: (n < 1024 ? 64
: (n < 2048 ? 128 : 256)));
}
return b;
}
int getDefaultBufferSize() override { return 2048; }
String open (const BigInteger& inputChannels,
const BigInteger& outputChannels,
double requestedSampleRate,
int bufferSize) override
{
close();
if (sampleRate != (int) requestedSampleRate)
return "Sample rate not allowed";
lastError.clear();
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
numDeviceInputChannels = 0;
numDeviceOutputChannels = 0;
activeOutputChans = outputChannels;
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
activeInputChans = inputChannels;
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
numClientInputChannels = activeInputChans.countNumberOfSetBits();
actualBufferSize = preferredBufferSize;
inputChannelBuffer.setSize (2, actualBufferSize);
inputChannelBuffer.clear();
outputChannelBuffer.setSize (2, actualBufferSize);
outputChannelBuffer.clear();
JNIEnv* env = getEnv();
if (numClientOutputChannels > 0)
{
numDeviceOutputChannels = 2;
outputDevice = GlobalRef (env->NewObject (AudioTrack, AudioTrack.constructor,
STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
(jint) (minBufferSizeOut * numDeviceOutputChannels * sizeof (int16)), MODE_STREAM));
if (env->CallIntMethod (outputDevice, AudioTrack.getState) != STATE_UNINITIALIZED)
isRunning = true;
else
outputDevice.clear(); // failed to open the device
}
if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
{
numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
inputDevice = GlobalRef (env->NewObject (AudioRecord, AudioRecord.constructor,
0 /* (default audio source) */, sampleRate,
numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
ENCODING_PCM_16BIT,
(jint) (minBufferSizeIn * numDeviceInputChannels * sizeof (int16))));
if (env->CallIntMethod (inputDevice, AudioRecord.getState) != STATE_UNINITIALIZED)
isRunning = true;
else
inputDevice.clear(); // failed to open the device
}
if (isRunning)
{
if (outputDevice != nullptr)
env->CallVoidMethod (outputDevice, AudioTrack.play);
if (inputDevice != nullptr)
env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
startThread (8);
}
else
{
closeDevices();
}
return lastError;
}
void close() override
{
if (isRunning)
{
stopThread (2000);
isRunning = false;
closeDevices();
}
}
int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
bool isOpen() override { return isRunning; }
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
int getCurrentBitDepth() override { return 16; }
double getCurrentSampleRate() override { return sampleRate; }
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
BigInteger getActiveInputChannels() const override { return activeInputChans; }
String getLastError() override { return lastError; }
bool isPlaying() override { return isRunning && callback != 0; }
void start (AudioIODeviceCallback* newCallback) override
{
if (isRunning && callback != newCallback)
{
if (newCallback != nullptr)
newCallback->audioDeviceAboutToStart (this);
const ScopedLock sl (callbackLock);
callback = newCallback;
}
}
void stop() override
{
if (isRunning)
{
AudioIODeviceCallback* lastCallback;
{
const ScopedLock sl (callbackLock);
lastCallback = callback;
callback = nullptr;
}
if (lastCallback != nullptr)
lastCallback->audioDeviceStopped();
}
}
void run() override
{
JNIEnv* env = getEnv();
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
while (! threadShouldExit())
{
if (inputDevice != nullptr)
{
jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
if (numRead < actualBufferSize * numDeviceInputChannels)
{
DBG ("Audio read under-run! " << numRead);
}
jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
{
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getWritePointer (chan));
if (chan < numDeviceInputChannels)
{
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
d.convertSamples (s, actualBufferSize);
}
else
{
d.clearSamples (actualBufferSize);
}
}
env->ReleaseShortArrayElements (audioBuffer, src, 0);
}
if (threadShouldExit())
break;
{
const ScopedLock sl (callbackLock);
if (callback != nullptr)
{
callback->audioDeviceIOCallback (inputChannelBuffer.getArrayOfReadPointers(), numClientInputChannels,
outputChannelBuffer.getArrayOfWritePointers(), numClientOutputChannels,
actualBufferSize);
}
else
{
outputChannelBuffer.clear();
}
}
if (outputDevice != nullptr)
{
if (threadShouldExit())
break;
jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
{
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
const float* const sourceChanData = outputChannelBuffer.getReadPointer (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
d.convertSamples (s, actualBufferSize);
}
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
if (numWritten < actualBufferSize * numDeviceOutputChannels)
{
DBG ("Audio write underrun! " << numWritten);
}
}
}
}
int minBufferSizeOut, minBufferSizeIn;
private:
//==================================================================================================
CriticalSection callbackLock;
AudioIODeviceCallback* callback;
jint sampleRate;
int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
int numClientOutputChannels, numDeviceOutputChannels;
int actualBufferSize;
bool isRunning;
String lastError;
BigInteger activeOutputChans, activeInputChans;
GlobalRef outputDevice, inputDevice;
AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
void closeDevices()
{
if (outputDevice != nullptr)
{
outputDevice.callVoidMethod (AudioTrack.stop);
outputDevice.callVoidMethod (AudioTrack.release);
outputDevice.clear();
}
if (inputDevice != nullptr)
{
inputDevice.callVoidMethod (AudioRecord.stop);
inputDevice.callVoidMethod (AudioRecord.release);
inputDevice.clear();
}
}
JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
};
//==============================================================================
class AndroidAudioIODeviceType : public AudioIODeviceType
{
public:
AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
//==============================================================================
void scanForDevices() {}
StringArray getDeviceNames (bool wantInputNames) const { return StringArray (javaAudioTypeName); }
int getDefaultDeviceIndex (bool forInput) const { return 0; }
int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != nullptr ? 0 : -1; }
bool hasSeparateInputsAndOutputs() const { return false; }
AudioIODevice* createDevice (const String& outputDeviceName,
const String& inputDeviceName)
{
ScopedPointer<AndroidAudioIODevice> dev;
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
{
dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
dev = nullptr;
}
return dev.release();
}
private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
};
//==============================================================================
extern bool isOpenSLAvailable();
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
{
#if JUCE_USE_ANDROID_OPENSLES
if (isOpenSLAvailable())
return nullptr;
#endif
return new AndroidAudioIODeviceType();
}

View file

@ -0,0 +1,85 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
StringArray MidiOutput::getDevices()
{
StringArray devices;
return devices;
}
int MidiOutput::getDefaultDeviceIndex()
{
return 0;
}
MidiOutput* MidiOutput::openDevice (int index)
{
return nullptr;
}
MidiOutput::~MidiOutput()
{
stopBackgroundThread();
}
void MidiOutput::sendMessageNow (const MidiMessage&)
{
}
//==============================================================================
MidiInput::MidiInput (const String& name_)
: name (name_),
internal (0)
{
}
MidiInput::~MidiInput()
{
}
void MidiInput::start()
{
}
void MidiInput::stop()
{
}
int MidiInput::getDefaultDeviceIndex()
{
return 0;
}
StringArray MidiInput::getDevices()
{
StringArray devs;
return devs;
}
MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
{
return nullptr;
}

View file

@ -0,0 +1,632 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
const char* const openSLTypeName = "Android OpenSL";
bool isOpenSLAvailable()
{
DynamicLibrary library;
return library.open ("libOpenSLES.so");
}
//==============================================================================
class OpenSLAudioIODevice : public AudioIODevice,
public Thread
{
public:
OpenSLAudioIODevice (const String& deviceName)
: AudioIODevice (deviceName, openSLTypeName),
Thread ("OpenSL"),
callback (nullptr), sampleRate (0), deviceOpen (false),
inputBuffer (2, 2), outputBuffer (2, 2)
{
// OpenSL has piss-poor support for determining latency, so the only way I can find to
// get a number for this is by asking the AudioTrack/AudioRecord classes..
AndroidAudioIODevice javaDevice (String::empty);
// this is a total guess about how to calculate the latency, but seems to vaguely agree
// with the devices I've tested.. YMMV
inputLatency = ((javaDevice.minBufferSizeIn * 2) / 3);
outputLatency = ((javaDevice.minBufferSizeOut * 2) / 3);
const int longestLatency = jmax (inputLatency, outputLatency);
const int totalLatency = inputLatency + outputLatency;
inputLatency = ((longestLatency * inputLatency) / totalLatency) & ~15;
outputLatency = ((longestLatency * outputLatency) / totalLatency) & ~15;
}
~OpenSLAudioIODevice()
{
close();
}
bool openedOk() const { return engine.outputMixObject != nullptr; }
StringArray getOutputChannelNames() override
{
StringArray s;
s.add ("Left");
s.add ("Right");
return s;
}
StringArray getInputChannelNames() override
{
StringArray s;
s.add ("Audio Input");
return s;
}
Array<double> getAvailableSampleRates() override
{
static const double rates[] = { 8000.0, 16000.0, 32000.0, 44100.0, 48000.0 };
return Array<double> (rates, numElementsInArray (rates));
}
Array<int> getAvailableBufferSizes() override
{
static const int sizes[] = { 256, 512, 768, 1024, 1280, 1600 }; // must all be multiples of the block size
return Array<int> (sizes, numElementsInArray (sizes));
}
String open (const BigInteger& inputChannels,
const BigInteger& outputChannels,
double requestedSampleRate,
int bufferSize) override
{
close();
lastError.clear();
sampleRate = (int) requestedSampleRate;
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
activeOutputChans = outputChannels;
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
numOutputChannels = activeOutputChans.countNumberOfSetBits();
activeInputChans = inputChannels;
activeInputChans.setRange (1, activeInputChans.getHighestBit(), false);
numInputChannels = activeInputChans.countNumberOfSetBits();
actualBufferSize = preferredBufferSize;
inputBuffer.setSize (jmax (1, numInputChannels), actualBufferSize);
outputBuffer.setSize (jmax (1, numOutputChannels), actualBufferSize);
outputBuffer.clear();
recorder = engine.createRecorder (numInputChannels, sampleRate);
player = engine.createPlayer (numOutputChannels, sampleRate);
startThread (8);
deviceOpen = true;
return lastError;
}
void close() override
{
stop();
stopThread (6000);
deviceOpen = false;
recorder = nullptr;
player = nullptr;
}
int getDefaultBufferSize() override { return 1024; }
int getOutputLatencyInSamples() override { return outputLatency; }
int getInputLatencyInSamples() override { return inputLatency; }
bool isOpen() override { return deviceOpen; }
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
int getCurrentBitDepth() override { return 16; }
double getCurrentSampleRate() override { return sampleRate; }
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
BigInteger getActiveInputChannels() const override { return activeInputChans; }
String getLastError() override { return lastError; }
bool isPlaying() override { return callback != nullptr; }
void start (AudioIODeviceCallback* newCallback) override
{
stop();
if (deviceOpen && callback != newCallback)
{
if (newCallback != nullptr)
newCallback->audioDeviceAboutToStart (this);
setCallback (newCallback);
}
}
void stop() override
{
if (AudioIODeviceCallback* const oldCallback = setCallback (nullptr))
oldCallback->audioDeviceStopped();
}
bool setAudioPreprocessingEnabled (bool enable) override
{
return recorder != nullptr && recorder->setAudioPreprocessingEnabled (enable);
}
private:
//==================================================================================================
CriticalSection callbackLock;
AudioIODeviceCallback* callback;
int actualBufferSize, sampleRate;
int inputLatency, outputLatency;
bool deviceOpen;
String lastError;
BigInteger activeOutputChans, activeInputChans;
int numInputChannels, numOutputChannels;
AudioSampleBuffer inputBuffer, outputBuffer;
struct Player;
struct Recorder;
AudioIODeviceCallback* setCallback (AudioIODeviceCallback* const newCallback)
{
const ScopedLock sl (callbackLock);
AudioIODeviceCallback* const oldCallback = callback;
callback = newCallback;
return oldCallback;
}
void run() override
{
if (recorder != nullptr) recorder->start();
if (player != nullptr) player->start();
while (! threadShouldExit())
{
if (player != nullptr) player->writeBuffer (outputBuffer, *this);
if (recorder != nullptr) recorder->readNextBlock (inputBuffer, *this);
const ScopedLock sl (callbackLock);
if (callback != nullptr)
{
callback->audioDeviceIOCallback (numInputChannels > 0 ? inputBuffer.getArrayOfReadPointers() : nullptr, numInputChannels,
numOutputChannels > 0 ? outputBuffer.getArrayOfWritePointers() : nullptr, numOutputChannels,
actualBufferSize);
}
else
{
outputBuffer.clear();
}
}
}
//==================================================================================================
struct Engine
{
Engine()
: engineObject (nullptr), engineInterface (nullptr), outputMixObject (nullptr)
{
if (library.open ("libOpenSLES.so"))
{
typedef SLresult (*CreateEngineFunc) (SLObjectItf*, SLuint32, const SLEngineOption*, SLuint32, const SLInterfaceID*, const SLboolean*);
if (CreateEngineFunc createEngine = (CreateEngineFunc) library.getFunction ("slCreateEngine"))
{
check (createEngine (&engineObject, 0, nullptr, 0, nullptr, nullptr));
SLInterfaceID* SL_IID_ENGINE = (SLInterfaceID*) library.getFunction ("SL_IID_ENGINE");
SL_IID_ANDROIDSIMPLEBUFFERQUEUE = (SLInterfaceID*) library.getFunction ("SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
SL_IID_PLAY = (SLInterfaceID*) library.getFunction ("SL_IID_PLAY");
SL_IID_RECORD = (SLInterfaceID*) library.getFunction ("SL_IID_RECORD");
SL_IID_ANDROIDCONFIGURATION = (SLInterfaceID*) library.getFunction ("SL_IID_ANDROIDCONFIGURATION");
check ((*engineObject)->Realize (engineObject, SL_BOOLEAN_FALSE));
check ((*engineObject)->GetInterface (engineObject, *SL_IID_ENGINE, &engineInterface));
check ((*engineInterface)->CreateOutputMix (engineInterface, &outputMixObject, 0, nullptr, nullptr));
check ((*outputMixObject)->Realize (outputMixObject, SL_BOOLEAN_FALSE));
}
}
}
~Engine()
{
if (outputMixObject != nullptr) (*outputMixObject)->Destroy (outputMixObject);
if (engineObject != nullptr) (*engineObject)->Destroy (engineObject);
}
Player* createPlayer (const int numChannels, const int sampleRate)
{
if (numChannels <= 0)
return nullptr;
ScopedPointer<Player> player (new Player (numChannels, sampleRate, *this));
return player->openedOk() ? player.release() : nullptr;
}
Recorder* createRecorder (const int numChannels, const int sampleRate)
{
if (numChannels <= 0)
return nullptr;
ScopedPointer<Recorder> recorder (new Recorder (numChannels, sampleRate, *this));
return recorder->openedOk() ? recorder.release() : nullptr;
}
SLObjectItf engineObject;
SLEngineItf engineInterface;
SLObjectItf outputMixObject;
SLInterfaceID* SL_IID_ANDROIDSIMPLEBUFFERQUEUE;
SLInterfaceID* SL_IID_PLAY;
SLInterfaceID* SL_IID_RECORD;
SLInterfaceID* SL_IID_ANDROIDCONFIGURATION;
private:
DynamicLibrary library;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Engine)
};
//==================================================================================================
struct BufferList
{
BufferList (const int numChannels_)
: numChannels (numChannels_), bufferSpace (numChannels_ * numSamples * numBuffers), nextBlock (0)
{
}
int16* waitForFreeBuffer (Thread& threadToCheck)
{
while (numBlocksOut.get() == numBuffers)
{
dataArrived.wait (1);
if (threadToCheck.threadShouldExit())
return nullptr;
}
return getNextBuffer();
}
int16* getNextBuffer()
{
if (++nextBlock == numBuffers)
nextBlock = 0;
return bufferSpace + nextBlock * numChannels * numSamples;
}
void bufferReturned() { --numBlocksOut; dataArrived.signal(); }
void bufferSent() { ++numBlocksOut; dataArrived.signal(); }
int getBufferSizeBytes() const { return numChannels * numSamples * sizeof (int16); }
const int numChannels;
enum { numSamples = 256, numBuffers = 16 };
private:
HeapBlock<int16> bufferSpace;
int nextBlock;
Atomic<int> numBlocksOut;
WaitableEvent dataArrived;
};
//==================================================================================================
struct Player
{
Player (int numChannels, int sampleRate, Engine& engine)
: playerObject (nullptr), playerPlay (nullptr), playerBufferQueue (nullptr),
bufferList (numChannels)
{
jassert (numChannels == 2);
SLDataFormat_PCM pcmFormat =
{
SL_DATAFORMAT_PCM,
(SLuint32) numChannels,
(SLuint32) (sampleRate * 1000), // (sample rate units are millihertz)
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
SL_BYTEORDER_LITTLEENDIAN
};
SLDataLocator_AndroidSimpleBufferQueue bufferQueue = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, bufferList.numBuffers };
SLDataSource audioSrc = { &bufferQueue, &pcmFormat };
SLDataLocator_OutputMix outputMix = { SL_DATALOCATOR_OUTPUTMIX, engine.outputMixObject };
SLDataSink audioSink = { &outputMix, nullptr };
// (SL_IID_BUFFERQUEUE is not guaranteed to remain future-proof, so use SL_IID_ANDROIDSIMPLEBUFFERQUEUE)
const SLInterfaceID interfaceIDs[] = { *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
const SLboolean flags[] = { SL_BOOLEAN_TRUE };
check ((*engine.engineInterface)->CreateAudioPlayer (engine.engineInterface, &playerObject, &audioSrc, &audioSink,
1, interfaceIDs, flags));
check ((*playerObject)->Realize (playerObject, SL_BOOLEAN_FALSE));
check ((*playerObject)->GetInterface (playerObject, *engine.SL_IID_PLAY, &playerPlay));
check ((*playerObject)->GetInterface (playerObject, *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &playerBufferQueue));
check ((*playerBufferQueue)->RegisterCallback (playerBufferQueue, staticCallback, this));
}
~Player()
{
if (playerPlay != nullptr)
check ((*playerPlay)->SetPlayState (playerPlay, SL_PLAYSTATE_STOPPED));
if (playerBufferQueue != nullptr)
check ((*playerBufferQueue)->Clear (playerBufferQueue));
if (playerObject != nullptr)
(*playerObject)->Destroy (playerObject);
}
bool openedOk() const noexcept { return playerBufferQueue != nullptr; }
void start()
{
jassert (openedOk());
check ((*playerPlay)->SetPlayState (playerPlay, SL_PLAYSTATE_PLAYING));
}
void writeBuffer (const AudioSampleBuffer& buffer, Thread& thread)
{
jassert (buffer.getNumChannels() == bufferList.numChannels);
jassert (buffer.getNumSamples() < bufferList.numSamples * bufferList.numBuffers);
int offset = 0;
int numSamples = buffer.getNumSamples();
while (numSamples > 0)
{
int16* const destBuffer = bufferList.waitForFreeBuffer (thread);
if (destBuffer == nullptr)
break;
for (int i = 0; i < bufferList.numChannels; ++i)
{
typedef AudioData::Pointer <AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> DstSampleType;
typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> SrcSampleType;
DstSampleType dstData (destBuffer + i, bufferList.numChannels);
SrcSampleType srcData (buffer.getReadPointer (i, offset));
dstData.convertSamples (srcData, bufferList.numSamples);
}
check ((*playerBufferQueue)->Enqueue (playerBufferQueue, destBuffer, bufferList.getBufferSizeBytes()));
bufferList.bufferSent();
numSamples -= bufferList.numSamples;
offset += bufferList.numSamples;
}
}
private:
SLObjectItf playerObject;
SLPlayItf playerPlay;
SLAndroidSimpleBufferQueueItf playerBufferQueue;
BufferList bufferList;
static void staticCallback (SLAndroidSimpleBufferQueueItf queue, void* context)
{
jassert (queue == static_cast <Player*> (context)->playerBufferQueue); (void) queue;
static_cast <Player*> (context)->bufferList.bufferReturned();
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Player)
};
//==================================================================================================
struct Recorder
{
Recorder (int numChannels, int sampleRate, Engine& engine)
: recorderObject (nullptr), recorderRecord (nullptr),
recorderBufferQueue (nullptr), configObject (nullptr),
bufferList (numChannels)
{
jassert (numChannels == 1); // STEREO doesn't always work!!
SLDataFormat_PCM pcmFormat =
{
SL_DATAFORMAT_PCM,
(SLuint32) numChannels,
(SLuint32) (sampleRate * 1000), // (sample rate units are millihertz)
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_PCMSAMPLEFORMAT_FIXED_16,
(numChannels == 1) ? SL_SPEAKER_FRONT_CENTER : (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT),
SL_BYTEORDER_LITTLEENDIAN
};
SLDataLocator_IODevice ioDevice = { SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, nullptr };
SLDataSource audioSrc = { &ioDevice, nullptr };
SLDataLocator_AndroidSimpleBufferQueue bufferQueue = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, bufferList.numBuffers };
SLDataSink audioSink = { &bufferQueue, &pcmFormat };
const SLInterfaceID interfaceIDs[] = { *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
const SLboolean flags[] = { SL_BOOLEAN_TRUE };
if (check ((*engine.engineInterface)->CreateAudioRecorder (engine.engineInterface, &recorderObject, &audioSrc,
&audioSink, 1, interfaceIDs, flags)))
{
if (check ((*recorderObject)->Realize (recorderObject, SL_BOOLEAN_FALSE)))
{
check ((*recorderObject)->GetInterface (recorderObject, *engine.SL_IID_RECORD, &recorderRecord));
check ((*recorderObject)->GetInterface (recorderObject, *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &recorderBufferQueue));
check ((*recorderObject)->GetInterface (recorderObject, *engine.SL_IID_ANDROIDCONFIGURATION, &configObject));
check ((*recorderBufferQueue)->RegisterCallback (recorderBufferQueue, staticCallback, this));
check ((*recorderRecord)->SetRecordState (recorderRecord, SL_RECORDSTATE_STOPPED));
for (int i = bufferList.numBuffers; --i >= 0;)
{
int16* const buffer = bufferList.getNextBuffer();
jassert (buffer != nullptr);
enqueueBuffer (buffer);
}
}
}
}
~Recorder()
{
if (recorderRecord != nullptr)
check ((*recorderRecord)->SetRecordState (recorderRecord, SL_RECORDSTATE_STOPPED));
if (recorderBufferQueue != nullptr)
check ((*recorderBufferQueue)->Clear (recorderBufferQueue));
if (recorderObject != nullptr)
(*recorderObject)->Destroy (recorderObject);
}
bool openedOk() const noexcept { return recorderBufferQueue != nullptr; }
void start()
{
jassert (openedOk());
check ((*recorderRecord)->SetRecordState (recorderRecord, SL_RECORDSTATE_RECORDING));
}
void readNextBlock (AudioSampleBuffer& buffer, Thread& thread)
{
jassert (buffer.getNumChannels() == bufferList.numChannels);
jassert (buffer.getNumSamples() < bufferList.numSamples * bufferList.numBuffers);
jassert ((buffer.getNumSamples() % bufferList.numSamples) == 0);
int offset = 0;
int numSamples = buffer.getNumSamples();
while (numSamples > 0)
{
int16* const srcBuffer = bufferList.waitForFreeBuffer (thread);
if (srcBuffer == nullptr)
break;
for (int i = 0; i < bufferList.numChannels; ++i)
{
typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> DstSampleType;
typedef AudioData::Pointer <AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const> SrcSampleType;
DstSampleType dstData (buffer.getWritePointer (i, offset));
SrcSampleType srcData (srcBuffer + i, bufferList.numChannels);
dstData.convertSamples (srcData, bufferList.numSamples);
}
enqueueBuffer (srcBuffer);
numSamples -= bufferList.numSamples;
offset += bufferList.numSamples;
}
}
bool setAudioPreprocessingEnabled (bool enable)
{
SLuint32 mode = enable ? SL_ANDROID_RECORDING_PRESET_GENERIC
: SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION;
return configObject != nullptr
&& check ((*configObject)->SetConfiguration (configObject, SL_ANDROID_KEY_RECORDING_PRESET, &mode, sizeof (mode)));
}
private:
SLObjectItf recorderObject;
SLRecordItf recorderRecord;
SLAndroidSimpleBufferQueueItf recorderBufferQueue;
SLAndroidConfigurationItf configObject;
BufferList bufferList;
void enqueueBuffer (int16* buffer)
{
check ((*recorderBufferQueue)->Enqueue (recorderBufferQueue, buffer, bufferList.getBufferSizeBytes()));
bufferList.bufferSent();
}
static void staticCallback (SLAndroidSimpleBufferQueueItf queue, void* context)
{
jassert (queue == static_cast <Recorder*> (context)->recorderBufferQueue); (void) queue;
static_cast <Recorder*> (context)->bufferList.bufferReturned();
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Recorder)
};
//==============================================================================
Engine engine;
ScopedPointer<Player> player;
ScopedPointer<Recorder> recorder;
//==============================================================================
static bool check (const SLresult result)
{
jassert (result == SL_RESULT_SUCCESS);
return result == SL_RESULT_SUCCESS;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice)
};
//==============================================================================
class OpenSLAudioDeviceType : public AudioIODeviceType
{
public:
OpenSLAudioDeviceType() : AudioIODeviceType (openSLTypeName) {}
//==============================================================================
void scanForDevices() {}
StringArray getDeviceNames (bool wantInputNames) const { return StringArray (openSLTypeName); }
int getDefaultDeviceIndex (bool forInput) const { return 0; }
int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != nullptr ? 0 : -1; }
bool hasSeparateInputsAndOutputs() const { return false; }
AudioIODevice* createDevice (const String& outputDeviceName,
const String& inputDeviceName)
{
ScopedPointer<OpenSLAudioIODevice> dev;
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
{
dev = new OpenSLAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
if (! dev->openedOk())
dev = nullptr;
}
return dev.release();
}
private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioDeviceType)
};
//==============================================================================
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_OpenSLES()
{
return isOpenSLAvailable() ? new OpenSLAudioDeviceType() : nullptr;
}

View file

@ -0,0 +1,576 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
class iOSAudioIODevice : public AudioIODevice
{
public:
iOSAudioIODevice (const String& deviceName)
: AudioIODevice (deviceName, "Audio"),
actualBufferSize (0),
isRunning (false),
audioUnit (0),
callback (nullptr),
floatData (1, 2)
{
getSessionHolder().activeDevices.add (this);
numInputChannels = 2;
numOutputChannels = 2;
preferredBufferSize = 0;
updateDeviceInfo();
}
~iOSAudioIODevice()
{
getSessionHolder().activeDevices.removeFirstMatchingValue (this);
close();
}
StringArray getOutputChannelNames() override
{
StringArray s;
s.add ("Left");
s.add ("Right");
return s;
}
StringArray getInputChannelNames() override
{
StringArray s;
if (audioInputIsAvailable)
{
s.add ("Left");
s.add ("Right");
}
return s;
}
Array<double> getAvailableSampleRates() override
{
// can't find a good way to actually ask the device for which of these it supports..
static const double rates[] = { 8000.0, 16000.0, 22050.0, 32000.0, 44100.0, 48000.0 };
return Array<double> (rates, numElementsInArray (rates));
}
Array<int> getAvailableBufferSizes() override
{
Array<int> r;
for (int i = 6; i < 12; ++i)
r.add (1 << i);
return r;
}
int getDefaultBufferSize() override { return 1024; }
String open (const BigInteger& inputChannelsWanted,
const BigInteger& outputChannelsWanted,
double targetSampleRate, int bufferSize) override
{
close();
lastError.clear();
preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
// xxx set up channel mapping
activeOutputChans = outputChannelsWanted;
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
numOutputChannels = activeOutputChans.countNumberOfSetBits();
monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
activeInputChans = inputChannelsWanted;
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
numInputChannels = activeInputChans.countNumberOfSetBits();
monoInputChannelNumber = activeInputChans.findNextSetBit (0);
AudioSessionSetActive (true);
if (numInputChannels > 0 && audioInputIsAvailable)
{
setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_PlayAndRecord);
setSessionUInt32Property (kAudioSessionProperty_OverrideCategoryEnableBluetoothInput, 1);
}
else
{
setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_MediaPlayback);
}
AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
fixAudioRouteIfSetToReceiver();
setSessionFloat64Property (kAudioSessionProperty_PreferredHardwareSampleRate, targetSampleRate);
updateDeviceInfo();
setSessionFloat32Property (kAudioSessionProperty_PreferredHardwareIOBufferDuration, preferredBufferSize / sampleRate);
updateCurrentBufferSize();
prepareFloatBuffers (actualBufferSize);
isRunning = true;
routingChanged (nullptr); // creates and starts the AU
lastError = audioUnit != 0 ? "" : "Couldn't open the device";
return lastError;
}
void close() override
{
if (isRunning)
{
isRunning = false;
setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_MediaPlayback);
AudioSessionRemovePropertyListenerWithUserData (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
AudioSessionSetActive (false);
if (audioUnit != 0)
{
AudioComponentInstanceDispose (audioUnit);
audioUnit = 0;
}
}
}
bool isOpen() override { return isRunning; }
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
double getCurrentSampleRate() override { return sampleRate; }
int getCurrentBitDepth() override { return 16; }
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
BigInteger getActiveInputChannels() const override { return activeInputChans; }
int getOutputLatencyInSamples() override { return getLatency (kAudioSessionProperty_CurrentHardwareOutputLatency); }
int getInputLatencyInSamples() override { return getLatency (kAudioSessionProperty_CurrentHardwareInputLatency); }
int getLatency (AudioSessionPropertyID propID)
{
Float32 latency = 0;
getSessionProperty (propID, latency);
return roundToInt (latency * getCurrentSampleRate());
}
void start (AudioIODeviceCallback* newCallback) override
{
if (isRunning && callback != newCallback)
{
if (newCallback != nullptr)
newCallback->audioDeviceAboutToStart (this);
const ScopedLock sl (callbackLock);
callback = newCallback;
}
}
void stop() override
{
if (isRunning)
{
AudioIODeviceCallback* lastCallback;
{
const ScopedLock sl (callbackLock);
lastCallback = callback;
callback = nullptr;
}
if (lastCallback != nullptr)
lastCallback->audioDeviceStopped();
}
}
bool isPlaying() override { return isRunning && callback != nullptr; }
String getLastError() override { return lastError; }
bool setAudioPreprocessingEnabled (bool enable) override
{
return setSessionUInt32Property (kAudioSessionProperty_Mode, enable ? kAudioSessionMode_Default
: kAudioSessionMode_Measurement);
}
private:
//==================================================================================================
CriticalSection callbackLock;
Float64 sampleRate;
int numInputChannels, numOutputChannels;
int preferredBufferSize, actualBufferSize;
bool isRunning;
String lastError;
AudioStreamBasicDescription format;
AudioUnit audioUnit;
UInt32 audioInputIsAvailable;
AudioIODeviceCallback* callback;
BigInteger activeOutputChans, activeInputChans;
AudioSampleBuffer floatData;
float* inputChannels[3];
float* outputChannels[3];
bool monoInputChannelNumber, monoOutputChannelNumber;
void prepareFloatBuffers (int bufferSize)
{
if (numInputChannels + numOutputChannels > 0)
{
floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
zeromem (inputChannels, sizeof (inputChannels));
zeromem (outputChannels, sizeof (outputChannels));
for (int i = 0; i < numInputChannels; ++i)
inputChannels[i] = floatData.getWritePointer (i);
for (int i = 0; i < numOutputChannels; ++i)
outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
}
}
//==================================================================================================
OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
const UInt32 numFrames, AudioBufferList* data)
{
OSStatus err = noErr;
if (audioInputIsAvailable && numInputChannels > 0)
err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
const ScopedLock sl (callbackLock);
if (callback != nullptr)
{
if ((int) numFrames > floatData.getNumSamples())
prepareFloatBuffers ((int) numFrames);
if (audioInputIsAvailable && numInputChannels > 0)
{
short* shortData = (short*) data->mBuffers[0].mData;
if (numInputChannels >= 2)
{
for (UInt32 i = 0; i < numFrames; ++i)
{
inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
}
}
else
{
if (monoInputChannelNumber > 0)
++shortData;
for (UInt32 i = 0; i < numFrames; ++i)
{
inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
++shortData;
}
}
}
else
{
for (int i = numInputChannels; --i >= 0;)
zeromem (inputChannels[i], sizeof (float) * numFrames);
}
callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
outputChannels, numOutputChannels, (int) numFrames);
short* shortData = (short*) data->mBuffers[0].mData;
int n = 0;
if (numOutputChannels >= 2)
{
for (UInt32 i = 0; i < numFrames; ++i)
{
shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
}
}
else if (numOutputChannels == 1)
{
for (UInt32 i = 0; i < numFrames; ++i)
{
const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
shortData [n++] = s;
shortData [n++] = s;
}
}
else
{
zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
}
}
else
{
zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
}
return err;
}
void updateDeviceInfo()
{
getSessionProperty (kAudioSessionProperty_CurrentHardwareSampleRate, sampleRate);
getSessionProperty (kAudioSessionProperty_AudioInputAvailable, audioInputIsAvailable);
}
void updateCurrentBufferSize()
{
Float32 bufferDuration = sampleRate > 0 ? (Float32) (preferredBufferSize / sampleRate) : 0.0f;
getSessionProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, bufferDuration);
actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
}
void routingChanged (const void* propertyValue)
{
if (! isRunning)
return;
if (propertyValue != nullptr)
{
CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
SInt32 routeChangeReason;
CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
{
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError ("Old device unavailable");
}
}
updateDeviceInfo();
createAudioUnit();
AudioSessionSetActive (true);
if (audioUnit != 0)
{
UInt32 formatSize = sizeof (format);
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
updateCurrentBufferSize();
AudioOutputUnitStart (audioUnit);
}
}
//==================================================================================================
struct AudioSessionHolder
{
AudioSessionHolder()
{
AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
}
static void interruptionListenerCallback (void* client, UInt32 interruptionType)
{
const Array <iOSAudioIODevice*>& activeDevices = static_cast <AudioSessionHolder*> (client)->activeDevices;
for (int i = activeDevices.size(); --i >= 0;)
activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
}
Array <iOSAudioIODevice*> activeDevices;
};
static AudioSessionHolder& getSessionHolder()
{
static AudioSessionHolder audioSessionHolder;
return audioSessionHolder;
}
void interruptionListener (const UInt32 interruptionType)
{
if (interruptionType == kAudioSessionBeginInterruption)
{
isRunning = false;
AudioOutputUnitStop (audioUnit);
AudioSessionSetActive (false);
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError ("iOS audio session interruption");
}
if (interruptionType == kAudioSessionEndInterruption)
{
isRunning = true;
AudioSessionSetActive (true);
AudioOutputUnitStart (audioUnit);
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError ("iOS audio session resumed");
}
}
//==================================================================================================
static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
{
return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
}
static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
{
static_cast<iOSAudioIODevice*> (client)->routingChanged (propertyValue);
}
//==================================================================================================
void resetFormat (const int numChannels) noexcept
{
zerostruct (format);
format.mFormatID = kAudioFormatLinearPCM;
format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
format.mBitsPerChannel = 8 * sizeof (short);
format.mChannelsPerFrame = (UInt32) numChannels;
format.mFramesPerPacket = 1;
format.mBytesPerFrame = format.mBytesPerPacket = (UInt32) numChannels * sizeof (short);
}
bool createAudioUnit()
{
if (audioUnit != 0)
{
AudioComponentInstanceDispose (audioUnit);
audioUnit = 0;
}
resetFormat (2);
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext (0, &desc);
AudioComponentInstanceNew (comp, &audioUnit);
if (audioUnit == 0)
return false;
if (numInputChannels > 0)
{
const UInt32 one = 1;
AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
}
{
AudioChannelLayout layout;
layout.mChannelBitmap = 0;
layout.mNumberChannelDescriptions = 0;
layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
}
{
AURenderCallbackStruct inputProc;
inputProc.inputProc = processStatic;
inputProc.inputProcRefCon = this;
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
}
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
AudioUnitInitialize (audioUnit);
return true;
}
// If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
// to make it loud. Needed because by default when using an input + output, the output is kept quiet.
static void fixAudioRouteIfSetToReceiver()
{
CFStringRef audioRoute = 0;
if (getSessionProperty (kAudioSessionProperty_AudioRoute, audioRoute) == noErr)
{
NSString* route = (NSString*) audioRoute;
//DBG ("audio route: " + nsStringToJuce (route));
if ([route hasPrefix: @"Receiver"])
setSessionUInt32Property (kAudioSessionProperty_OverrideAudioRoute, kAudioSessionOverrideAudioRoute_Speaker);
CFRelease (audioRoute);
}
}
template <typename Type>
static OSStatus getSessionProperty (AudioSessionPropertyID propID, Type& result) noexcept
{
UInt32 valueSize = sizeof (result);
return AudioSessionGetProperty (propID, &valueSize, &result);
}
static bool setSessionUInt32Property (AudioSessionPropertyID propID, UInt32 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
static bool setSessionFloat32Property (AudioSessionPropertyID propID, Float32 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
static bool setSessionFloat64Property (AudioSessionPropertyID propID, Float64 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
};
//==============================================================================
class iOSAudioIODeviceType : public AudioIODeviceType
{
public:
iOSAudioIODeviceType() : AudioIODeviceType ("iOS Audio") {}
void scanForDevices() {}
StringArray getDeviceNames (bool /*wantInputNames*/) const { return StringArray ("iOS Audio"); }
int getDefaultDeviceIndex (bool /*forInput*/) const { return 0; }
int getIndexOfDevice (AudioIODevice* d, bool /*asInput*/) const { return d != nullptr ? 0 : -1; }
bool hasSeparateInputsAndOutputs() const { return false; }
AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
{
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
return nullptr;
}
private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
};
//==============================================================================
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
{
return new iOSAudioIODeviceType();
}

View file

@ -0,0 +1,77 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
AudioCDReader::AudioCDReader()
: AudioFormatReader (0, "CD Audio")
{
}
StringArray AudioCDReader::getAvailableCDNames()
{
StringArray names;
return names;
}
AudioCDReader* AudioCDReader::createReaderForCD (const int index)
{
return nullptr;
}
AudioCDReader::~AudioCDReader()
{
}
void AudioCDReader::refreshTrackLengths()
{
}
bool AudioCDReader::readSamples (int** destSamples, int numDestChannels, int startOffsetInDestBuffer,
int64 startSampleInFile, int numSamples)
{
return false;
}
bool AudioCDReader::isCDStillPresent() const
{
return false;
}
bool AudioCDReader::isTrackAudio (int trackNum) const
{
return false;
}
void AudioCDReader::enableIndexScanning (bool b)
{
}
int AudioCDReader::getLastIndex() const
{
return 0;
}
Array<int> AudioCDReader::findIndexesInTrack (const int trackNumber)
{
return Array<int>();
}

View file

@ -0,0 +1,604 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
//==============================================================================
static void* juce_libjackHandle = nullptr;
static void* juce_loadJackFunction (const char* const name)
{
if (juce_libjackHandle == nullptr)
return nullptr;
return dlsym (juce_libjackHandle, name);
}
#define JUCE_DECL_JACK_FUNCTION(return_type, fn_name, argument_types, arguments) \
return_type fn_name argument_types \
{ \
typedef return_type (*fn_type) argument_types; \
static fn_type fn = (fn_type) juce_loadJackFunction (#fn_name); \
return (fn != nullptr) ? ((*fn) arguments) : (return_type) 0; \
}
#define JUCE_DECL_VOID_JACK_FUNCTION(fn_name, argument_types, arguments) \
void fn_name argument_types \
{ \
typedef void (*fn_type) argument_types; \
static fn_type fn = (fn_type) juce_loadJackFunction (#fn_name); \
if (fn != nullptr) (*fn) arguments; \
}
//==============================================================================
JUCE_DECL_JACK_FUNCTION (jack_client_t*, jack_client_open, (const char* client_name, jack_options_t options, jack_status_t* status, ...), (client_name, options, status));
JUCE_DECL_JACK_FUNCTION (int, jack_client_close, (jack_client_t *client), (client));
JUCE_DECL_JACK_FUNCTION (int, jack_activate, (jack_client_t* client), (client));
JUCE_DECL_JACK_FUNCTION (int, jack_deactivate, (jack_client_t* client), (client));
JUCE_DECL_JACK_FUNCTION (jack_nframes_t, jack_get_buffer_size, (jack_client_t* client), (client));
JUCE_DECL_JACK_FUNCTION (jack_nframes_t, jack_get_sample_rate, (jack_client_t* client), (client));
JUCE_DECL_VOID_JACK_FUNCTION (jack_on_shutdown, (jack_client_t* client, void (*function)(void* arg), void* arg), (client, function, arg));
JUCE_DECL_JACK_FUNCTION (void* , jack_port_get_buffer, (jack_port_t* port, jack_nframes_t nframes), (port, nframes));
JUCE_DECL_JACK_FUNCTION (jack_nframes_t, jack_port_get_total_latency, (jack_client_t* client, jack_port_t* port), (client, port));
JUCE_DECL_JACK_FUNCTION (jack_port_t* , jack_port_register, (jack_client_t* client, const char* port_name, const char* port_type, unsigned long flags, unsigned long buffer_size), (client, port_name, port_type, flags, buffer_size));
JUCE_DECL_VOID_JACK_FUNCTION (jack_set_error_function, (void (*func)(const char*)), (func));
JUCE_DECL_JACK_FUNCTION (int, jack_set_process_callback, (jack_client_t* client, JackProcessCallback process_callback, void* arg), (client, process_callback, arg));
JUCE_DECL_JACK_FUNCTION (const char**, jack_get_ports, (jack_client_t* client, const char* port_name_pattern, const char* type_name_pattern, unsigned long flags), (client, port_name_pattern, type_name_pattern, flags));
JUCE_DECL_JACK_FUNCTION (int, jack_connect, (jack_client_t* client, const char* source_port, const char* destination_port), (client, source_port, destination_port));
JUCE_DECL_JACK_FUNCTION (const char*, jack_port_name, (const jack_port_t* port), (port));
JUCE_DECL_JACK_FUNCTION (void*, jack_set_port_connect_callback, (jack_client_t* client, JackPortConnectCallback connect_callback, void* arg), (client, connect_callback, arg));
JUCE_DECL_JACK_FUNCTION (jack_port_t* , jack_port_by_id, (jack_client_t* client, jack_port_id_t port_id), (client, port_id));
JUCE_DECL_JACK_FUNCTION (int, jack_port_connected, (const jack_port_t* port), (port));
JUCE_DECL_JACK_FUNCTION (int, jack_port_connected_to, (const jack_port_t* port, const char* port_name), (port, port_name));
#if JUCE_DEBUG
#define JACK_LOGGING_ENABLED 1
#endif
#if JACK_LOGGING_ENABLED
namespace
{
void jack_Log (const String& s)
{
std::cerr << s << std::endl;
}
const char* getJackErrorMessage (const jack_status_t status)
{
if (status & JackServerFailed
|| status & JackServerError) return "Unable to connect to JACK server";
if (status & JackVersionError) return "Client's protocol version does not match";
if (status & JackInvalidOption) return "The operation contained an invalid or unsupported option";
if (status & JackNameNotUnique) return "The desired client name was not unique";
if (status & JackNoSuchClient) return "Requested client does not exist";
if (status & JackInitFailure) return "Unable to initialize client";
return nullptr;
}
}
#define JUCE_JACK_LOG_STATUS(x) { if (const char* m = getJackErrorMessage (x)) jack_Log (m); }
#define JUCE_JACK_LOG(x) jack_Log(x)
#else
#define JUCE_JACK_LOG_STATUS(x) {}
#define JUCE_JACK_LOG(x) {}
#endif
//==============================================================================
#ifndef JUCE_JACK_CLIENT_NAME
#define JUCE_JACK_CLIENT_NAME "JUCEJack"
#endif
struct JackPortIterator
{
JackPortIterator (jack_client_t* const client, const bool forInput)
: ports (nullptr), index (-1)
{
if (client != nullptr)
ports = juce::jack_get_ports (client, nullptr, nullptr,
forInput ? JackPortIsOutput : JackPortIsInput);
// (NB: This looks like it's the wrong way round, but it is correct!)
}
~JackPortIterator()
{
::free (ports);
}
bool next()
{
if (ports == nullptr || ports [index + 1] == nullptr)
return false;
name = CharPointer_UTF8 (ports[++index]);
clientName = name.upToFirstOccurrenceOf (":", false, false);
return true;
}
const char** ports;
int index;
String name;
String clientName;
};
class JackAudioIODeviceType;
static Array<JackAudioIODeviceType*> activeDeviceTypes;
//==============================================================================
class JackAudioIODevice : public AudioIODevice
{
public:
JackAudioIODevice (const String& deviceName,
const String& inId,
const String& outId)
: AudioIODevice (deviceName, "JACK"),
inputId (inId),
outputId (outId),
deviceIsOpen (false),
callback (nullptr),
totalNumberOfInputChannels (0),
totalNumberOfOutputChannels (0)
{
jassert (deviceName.isNotEmpty());
jack_status_t status;
client = juce::jack_client_open (JUCE_JACK_CLIENT_NAME, JackNoStartServer, &status);
if (client == nullptr)
{
JUCE_JACK_LOG_STATUS (status);
}
else
{
juce::jack_set_error_function (errorCallback);
// open input ports
const StringArray inputChannels (getInputChannelNames());
for (int i = 0; i < inputChannels.size(); ++i)
{
String inputName;
inputName << "in_" << ++totalNumberOfInputChannels;
inputPorts.add (juce::jack_port_register (client, inputName.toUTF8(),
JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0));
}
// open output ports
const StringArray outputChannels (getOutputChannelNames());
for (int i = 0; i < outputChannels.size (); ++i)
{
String outputName;
outputName << "out_" << ++totalNumberOfOutputChannels;
outputPorts.add (juce::jack_port_register (client, outputName.toUTF8(),
JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0));
}
inChans.calloc (totalNumberOfInputChannels + 2);
outChans.calloc (totalNumberOfOutputChannels + 2);
}
}
~JackAudioIODevice()
{
close();
if (client != nullptr)
{
juce::jack_client_close (client);
client = nullptr;
}
}
StringArray getChannelNames (bool forInput) const
{
StringArray names;
for (JackPortIterator i (client, forInput); i.next();)
if (i.clientName == getName())
names.add (i.name.fromFirstOccurrenceOf (":", false, false));
return names;
}
StringArray getOutputChannelNames() override { return getChannelNames (false); }
StringArray getInputChannelNames() override { return getChannelNames (true); }
Array<double> getAvailableSampleRates() override
{
Array<double> rates;
if (client != nullptr)
rates.add (juce::jack_get_sample_rate (client));
return rates;
}
Array<int> getAvailableBufferSizes() override
{
Array<int> sizes;
if (client != nullptr)
sizes.add (juce::jack_get_buffer_size (client));
return sizes;
}
int getDefaultBufferSize() override { return getCurrentBufferSizeSamples(); }
int getCurrentBufferSizeSamples() override { return client != nullptr ? juce::jack_get_buffer_size (client) : 0; }
double getCurrentSampleRate() override { return client != nullptr ? juce::jack_get_sample_rate (client) : 0; }
String open (const BigInteger& inputChannels, const BigInteger& outputChannels,
double /* sampleRate */, int /* bufferSizeSamples */) override
{
if (client == nullptr)
{
lastError = "No JACK client running";
return lastError;
}
lastError.clear();
close();
juce::jack_set_process_callback (client, processCallback, this);
juce::jack_set_port_connect_callback (client, portConnectCallback, this);
juce::jack_on_shutdown (client, shutdownCallback, this);
juce::jack_activate (client);
deviceIsOpen = true;
if (! inputChannels.isZero())
{
for (JackPortIterator i (client, true); i.next();)
{
if (inputChannels [i.index] && i.clientName == getName())
{
int error = juce::jack_connect (client, i.ports[i.index], juce::jack_port_name ((jack_port_t*) inputPorts[i.index]));
if (error != 0)
JUCE_JACK_LOG ("Cannot connect input port " + String (i.index) + " (" + i.name + "), error " + String (error));
}
}
}
if (! outputChannels.isZero())
{
for (JackPortIterator i (client, false); i.next();)
{
if (outputChannels [i.index] && i.clientName == getName())
{
int error = juce::jack_connect (client, juce::jack_port_name ((jack_port_t*) outputPorts[i.index]), i.ports[i.index]);
if (error != 0)
JUCE_JACK_LOG ("Cannot connect output port " + String (i.index) + " (" + i.name + "), error " + String (error));
}
}
}
return lastError;
}
void close() override
{
stop();
if (client != nullptr)
{
juce::jack_deactivate (client);
juce::jack_set_process_callback (client, processCallback, nullptr);
juce::jack_set_port_connect_callback (client, portConnectCallback, nullptr);
juce::jack_on_shutdown (client, shutdownCallback, nullptr);
}
deviceIsOpen = false;
}
void start (AudioIODeviceCallback* newCallback) override
{
if (deviceIsOpen && newCallback != callback)
{
if (newCallback != nullptr)
newCallback->audioDeviceAboutToStart (this);
AudioIODeviceCallback* const oldCallback = callback;
{
const ScopedLock sl (callbackLock);
callback = newCallback;
}
if (oldCallback != nullptr)
oldCallback->audioDeviceStopped();
}
}
void stop() override
{
start (nullptr);
}
bool isOpen() override { return deviceIsOpen; }
bool isPlaying() override { return callback != nullptr; }
int getCurrentBitDepth() override { return 32; }
String getLastError() override { return lastError; }
BigInteger getActiveOutputChannels() const override { return activeOutputChannels; }
BigInteger getActiveInputChannels() const override { return activeInputChannels; }
int getOutputLatencyInSamples() override
{
int latency = 0;
for (int i = 0; i < outputPorts.size(); i++)
latency = jmax (latency, (int) juce::jack_port_get_total_latency (client, (jack_port_t*) outputPorts [i]));
return latency;
}
int getInputLatencyInSamples() override
{
int latency = 0;
for (int i = 0; i < inputPorts.size(); i++)
latency = jmax (latency, (int) juce::jack_port_get_total_latency (client, (jack_port_t*) inputPorts [i]));
return latency;
}
String inputId, outputId;
private:
void process (const int numSamples)
{
int numActiveInChans = 0, numActiveOutChans = 0;
for (int i = 0; i < totalNumberOfInputChannels; ++i)
{
if (activeInputChannels[i])
if (jack_default_audio_sample_t* in
= (jack_default_audio_sample_t*) juce::jack_port_get_buffer ((jack_port_t*) inputPorts.getUnchecked(i), numSamples))
inChans [numActiveInChans++] = (float*) in;
}
for (int i = 0; i < totalNumberOfOutputChannels; ++i)
{
if (activeOutputChannels[i])
if (jack_default_audio_sample_t* out
= (jack_default_audio_sample_t*) juce::jack_port_get_buffer ((jack_port_t*) outputPorts.getUnchecked(i), numSamples))
outChans [numActiveOutChans++] = (float*) out;
}
const ScopedLock sl (callbackLock);
if (callback != nullptr)
{
if ((numActiveInChans + numActiveOutChans) > 0)
callback->audioDeviceIOCallback (const_cast <const float**> (inChans.getData()), numActiveInChans,
outChans, numActiveOutChans, numSamples);
}
else
{
for (int i = 0; i < numActiveOutChans; ++i)
zeromem (outChans[i], sizeof (float) * numSamples);
}
}
static int processCallback (jack_nframes_t nframes, void* callbackArgument)
{
if (callbackArgument != nullptr)
((JackAudioIODevice*) callbackArgument)->process (nframes);
return 0;
}
void updateActivePorts()
{
BigInteger newOutputChannels, newInputChannels;
for (int i = 0; i < outputPorts.size(); ++i)
if (juce::jack_port_connected ((jack_port_t*) outputPorts.getUnchecked(i)))
newOutputChannels.setBit (i);
for (int i = 0; i < inputPorts.size(); ++i)
if (juce::jack_port_connected ((jack_port_t*) inputPorts.getUnchecked(i)))
newInputChannels.setBit (i);
if (newOutputChannels != activeOutputChannels
|| newInputChannels != activeInputChannels)
{
AudioIODeviceCallback* const oldCallback = callback;
stop();
activeOutputChannels = newOutputChannels;
activeInputChannels = newInputChannels;
if (oldCallback != nullptr)
start (oldCallback);
sendDeviceChangedCallback();
}
}
static void portConnectCallback (jack_port_id_t, jack_port_id_t, int, void* arg)
{
if (JackAudioIODevice* device = static_cast <JackAudioIODevice*> (arg))
device->updateActivePorts();
}
static void threadInitCallback (void* /* callbackArgument */)
{
JUCE_JACK_LOG ("JackAudioIODevice::initialise");
}
static void shutdownCallback (void* callbackArgument)
{
JUCE_JACK_LOG ("JackAudioIODevice::shutdown");
if (JackAudioIODevice* device = (JackAudioIODevice*) callbackArgument)
{
device->client = nullptr;
device->close();
}
}
static void errorCallback (const char* msg)
{
JUCE_JACK_LOG ("JackAudioIODevice::errorCallback " + String (msg));
}
static void sendDeviceChangedCallback();
bool deviceIsOpen;
jack_client_t* client;
String lastError;
AudioIODeviceCallback* callback;
CriticalSection callbackLock;
HeapBlock <float*> inChans, outChans;
int totalNumberOfInputChannels;
int totalNumberOfOutputChannels;
Array<void*> inputPorts, outputPorts;
BigInteger activeInputChannels, activeOutputChannels;
};
//==============================================================================
class JackAudioIODeviceType : public AudioIODeviceType
{
public:
JackAudioIODeviceType()
: AudioIODeviceType ("JACK"),
hasScanned (false)
{
activeDeviceTypes.add (this);
}
~JackAudioIODeviceType()
{
activeDeviceTypes.removeFirstMatchingValue (this);
}
void scanForDevices()
{
hasScanned = true;
inputNames.clear();
inputIds.clear();
outputNames.clear();
outputIds.clear();
if (juce_libjackHandle == nullptr) juce_libjackHandle = dlopen ("libjack.so.0", RTLD_LAZY);
if (juce_libjackHandle == nullptr) juce_libjackHandle = dlopen ("libjack.so", RTLD_LAZY);
if (juce_libjackHandle == nullptr) return;
jack_status_t status;
// open a dummy client
if (jack_client_t* const client = juce::jack_client_open ("JuceJackDummy", JackNoStartServer, &status))
{
// scan for output devices
for (JackPortIterator i (client, false); i.next();)
{
if (i.clientName != (JUCE_JACK_CLIENT_NAME) && ! inputNames.contains (i.clientName))
{
inputNames.add (i.clientName);
inputIds.add (i.ports [i.index]);
}
}
// scan for input devices
for (JackPortIterator i (client, true); i.next();)
{
if (i.clientName != (JUCE_JACK_CLIENT_NAME) && ! outputNames.contains (i.clientName))
{
outputNames.add (i.clientName);
outputIds.add (i.ports [i.index]);
}
}
juce::jack_client_close (client);
}
else
{
JUCE_JACK_LOG_STATUS (status);
}
}
StringArray getDeviceNames (bool wantInputNames) const
{
jassert (hasScanned); // need to call scanForDevices() before doing this
return wantInputNames ? inputNames : outputNames;
}
int getDefaultDeviceIndex (bool /* forInput */) const
{
jassert (hasScanned); // need to call scanForDevices() before doing this
return 0;
}
bool hasSeparateInputsAndOutputs() const { return true; }
int getIndexOfDevice (AudioIODevice* device, bool asInput) const
{
jassert (hasScanned); // need to call scanForDevices() before doing this
if (JackAudioIODevice* d = dynamic_cast <JackAudioIODevice*> (device))
return asInput ? inputIds.indexOf (d->inputId)
: outputIds.indexOf (d->outputId);
return -1;
}
AudioIODevice* createDevice (const String& outputDeviceName,
const String& inputDeviceName)
{
jassert (hasScanned); // need to call scanForDevices() before doing this
const int inputIndex = inputNames.indexOf (inputDeviceName);
const int outputIndex = outputNames.indexOf (outputDeviceName);
if (inputIndex >= 0 || outputIndex >= 0)
return new JackAudioIODevice (outputIndex >= 0 ? outputDeviceName
: inputDeviceName,
inputIds [inputIndex],
outputIds [outputIndex]);
return nullptr;
}
void portConnectionChange() { callDeviceChangeListeners(); }
private:
StringArray inputNames, outputNames, inputIds, outputIds;
bool hasScanned;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (JackAudioIODeviceType)
};
void JackAudioIODevice::sendDeviceChangedCallback()
{
for (int i = activeDeviceTypes.size(); --i >= 0;)
if (JackAudioIODeviceType* d = activeDeviceTypes[i])
d->portConnectionChange();
}
//==============================================================================
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_JACK()
{
return new JackAudioIODeviceType();
}

View file

@ -0,0 +1,612 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if JUCE_ALSA
// You can define these strings in your app if you want to override the default names:
#ifndef JUCE_ALSA_MIDI_INPUT_NAME
#define JUCE_ALSA_MIDI_INPUT_NAME "Juce Midi Input"
#endif
#ifndef JUCE_ALSA_MIDI_OUTPUT_NAME
#define JUCE_ALSA_MIDI_OUTPUT_NAME "Juce Midi Output"
#endif
//==============================================================================
namespace
{
class AlsaPortAndCallback;
//==============================================================================
class AlsaClient : public ReferenceCountedObject
{
public:
typedef ReferenceCountedObjectPtr<AlsaClient> Ptr;
AlsaClient (bool forInput)
: input (forInput), handle (nullptr)
{
snd_seq_open (&handle, "default", forInput ? SND_SEQ_OPEN_INPUT
: SND_SEQ_OPEN_OUTPUT, 0);
}
~AlsaClient()
{
if (handle != nullptr)
{
snd_seq_close (handle);
handle = nullptr;
}
jassert (activeCallbacks.size() == 0);
if (inputThread)
{
inputThread->stopThread (3000);
inputThread = nullptr;
}
}
bool isInput() const noexcept { return input; }
void setName (const String& name)
{
snd_seq_set_client_name (handle, name.toUTF8());
}
void registerCallback (AlsaPortAndCallback* cb)
{
if (cb != nullptr)
{
{
const ScopedLock sl (callbackLock);
activeCallbacks.add (cb);
if (inputThread == nullptr)
inputThread = new MidiInputThread (*this);
}
inputThread->startThread();
}
}
void unregisterCallback (AlsaPortAndCallback* cb)
{
const ScopedLock sl (callbackLock);
jassert (activeCallbacks.contains (cb));
activeCallbacks.removeAllInstancesOf (cb);
if (activeCallbacks.size() == 0 && inputThread->isThreadRunning())
inputThread->signalThreadShouldExit();
}
void handleIncomingMidiMessage (const MidiMessage& message, int port);
snd_seq_t* get() const noexcept { return handle; }
private:
bool input;
snd_seq_t* handle;
Array<AlsaPortAndCallback*> activeCallbacks;
CriticalSection callbackLock;
//==============================================================================
class MidiInputThread : public Thread
{
public:
MidiInputThread (AlsaClient& c)
: Thread ("Juce MIDI Input"), client (c)
{
jassert (client.input && client.get() != nullptr);
}
void run() override
{
const int maxEventSize = 16 * 1024;
snd_midi_event_t* midiParser;
snd_seq_t* seqHandle = client.get();
if (snd_midi_event_new (maxEventSize, &midiParser) >= 0)
{
const int numPfds = snd_seq_poll_descriptors_count (seqHandle, POLLIN);
HeapBlock<pollfd> pfd (numPfds);
snd_seq_poll_descriptors (seqHandle, pfd, numPfds, POLLIN);
HeapBlock <uint8> buffer (maxEventSize);
while (! threadShouldExit())
{
if (poll (pfd, numPfds, 100) > 0) // there was a "500" here which is a bit long when we exit the program and have to wait for a timeout on this poll call
{
if (threadShouldExit())
break;
snd_seq_nonblock (seqHandle, 1);
do
{
snd_seq_event_t* inputEvent = nullptr;
if (snd_seq_event_input (seqHandle, &inputEvent) >= 0)
{
// xxx what about SYSEXes that are too big for the buffer?
const int numBytes = snd_midi_event_decode (midiParser, buffer,
maxEventSize, inputEvent);
snd_midi_event_reset_decode (midiParser);
if (numBytes > 0)
{
const MidiMessage message ((const uint8*) buffer, numBytes,
Time::getMillisecondCounter() * 0.001);
client.handleIncomingMidiMessage (message, inputEvent->dest.port);
}
snd_seq_free_event (inputEvent);
}
}
while (snd_seq_event_input_pending (seqHandle, 0) > 0);
}
}
snd_midi_event_free (midiParser);
}
};
private:
AlsaClient& client;
};
ScopedPointer<MidiInputThread> inputThread;
};
static AlsaClient::Ptr globalAlsaSequencerIn()
{
static AlsaClient::Ptr global (new AlsaClient (true));
return global;
}
static AlsaClient::Ptr globalAlsaSequencerOut()
{
static AlsaClient::Ptr global (new AlsaClient (false));
return global;
}
static AlsaClient::Ptr globalAlsaSequencer (bool input)
{
return input ? globalAlsaSequencerIn()
: globalAlsaSequencerOut();
}
//==============================================================================
// represents an input or output port of the supplied AlsaClient
class AlsaPort
{
public:
AlsaPort() noexcept : portId (-1) {}
AlsaPort (const AlsaClient::Ptr& c, int port) noexcept : client (c), portId (port) {}
void createPort (const AlsaClient::Ptr& c, const String& name, bool forInput)
{
client = c;
if (snd_seq_t* handle = client->get())
portId = snd_seq_create_simple_port (handle, name.toUTF8(),
forInput ? (SND_SEQ_PORT_CAP_WRITE | SND_SEQ_PORT_CAP_SUBS_WRITE)
: (SND_SEQ_PORT_CAP_READ | SND_SEQ_PORT_CAP_SUBS_READ),
SND_SEQ_PORT_TYPE_MIDI_GENERIC);
}
void deletePort()
{
if (isValid())
{
snd_seq_delete_simple_port (client->get(), portId);
portId = -1;
}
}
void connectWith (int sourceClient, int sourcePort)
{
if (client->isInput())
snd_seq_connect_from (client->get(), portId, sourceClient, sourcePort);
else
snd_seq_connect_to (client->get(), portId, sourceClient, sourcePort);
}
bool isValid() const noexcept
{
return client != nullptr && client->get() != nullptr && portId >= 0;
}
AlsaClient::Ptr client;
int portId;
};
//==============================================================================
class AlsaPortAndCallback
{
public:
AlsaPortAndCallback (AlsaPort p, MidiInput* in, MidiInputCallback* cb)
: port (p), midiInput (in), callback (cb), callbackEnabled (false)
{
}
~AlsaPortAndCallback()
{
enableCallback (false);
port.deletePort();
}
void enableCallback (bool enable)
{
if (callbackEnabled != enable)
{
callbackEnabled = enable;
if (enable)
port.client->registerCallback (this);
else
port.client->unregisterCallback (this);
}
}
void handleIncomingMidiMessage (const MidiMessage& message) const
{
callback->handleIncomingMidiMessage (midiInput, message);
}
private:
AlsaPort port;
MidiInput* midiInput;
MidiInputCallback* callback;
bool callbackEnabled;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AlsaPortAndCallback)
};
void AlsaClient::handleIncomingMidiMessage (const MidiMessage& message, int port)
{
const ScopedLock sl (callbackLock);
if (AlsaPortAndCallback* const cb = activeCallbacks[port])
cb->handleIncomingMidiMessage (message);
}
//==============================================================================
static AlsaPort iterateMidiClient (const AlsaClient::Ptr& seq,
snd_seq_client_info_t* clientInfo,
const bool forInput,
StringArray& deviceNamesFound,
const int deviceIndexToOpen)
{
AlsaPort port;
snd_seq_t* seqHandle = seq->get();
snd_seq_port_info_t* portInfo = nullptr;
if (snd_seq_port_info_malloc (&portInfo) == 0)
{
int numPorts = snd_seq_client_info_get_num_ports (clientInfo);
const int client = snd_seq_client_info_get_client (clientInfo);
snd_seq_port_info_set_client (portInfo, client);
snd_seq_port_info_set_port (portInfo, -1);
while (--numPorts >= 0)
{
if (snd_seq_query_next_port (seqHandle, portInfo) == 0
&& (snd_seq_port_info_get_capability (portInfo) & (forInput ? SND_SEQ_PORT_CAP_READ
: SND_SEQ_PORT_CAP_WRITE)) != 0)
{
deviceNamesFound.add (snd_seq_client_info_get_name (clientInfo));
if (deviceNamesFound.size() == deviceIndexToOpen + 1)
{
const int sourcePort = snd_seq_port_info_get_port (portInfo);
const int sourceClient = snd_seq_client_info_get_client (clientInfo);
if (sourcePort != -1)
{
const String name (forInput ? JUCE_ALSA_MIDI_INPUT_NAME
: JUCE_ALSA_MIDI_OUTPUT_NAME);
seq->setName (name);
port.createPort (seq, name, forInput);
port.connectWith (sourceClient, sourcePort);
}
}
}
}
snd_seq_port_info_free (portInfo);
}
return port;
}
static AlsaPort iterateMidiDevices (const bool forInput,
StringArray& deviceNamesFound,
const int deviceIndexToOpen)
{
AlsaPort port;
const AlsaClient::Ptr client (globalAlsaSequencer (forInput));
if (snd_seq_t* const seqHandle = client->get())
{
snd_seq_system_info_t* systemInfo = nullptr;
snd_seq_client_info_t* clientInfo = nullptr;
if (snd_seq_system_info_malloc (&systemInfo) == 0)
{
if (snd_seq_system_info (seqHandle, systemInfo) == 0
&& snd_seq_client_info_malloc (&clientInfo) == 0)
{
int numClients = snd_seq_system_info_get_cur_clients (systemInfo);
while (--numClients >= 0 && ! port.isValid())
if (snd_seq_query_next_client (seqHandle, clientInfo) == 0)
port = iterateMidiClient (client, clientInfo, forInput,
deviceNamesFound, deviceIndexToOpen);
snd_seq_client_info_free (clientInfo);
}
snd_seq_system_info_free (systemInfo);
}
}
deviceNamesFound.appendNumbersToDuplicates (true, true);
return port;
}
AlsaPort createMidiDevice (const bool forInput, const String& deviceNameToOpen)
{
AlsaPort port;
AlsaClient::Ptr client (new AlsaClient (forInput));
if (client->get())
{
client->setName (deviceNameToOpen + (forInput ? " Input" : " Output"));
port.createPort (client, forInput ? "in" : "out", forInput);
}
return port;
}
//==============================================================================
class MidiOutputDevice
{
public:
MidiOutputDevice (MidiOutput* const output, const AlsaPort& p)
: midiOutput (output), port (p),
maxEventSize (16 * 1024)
{
jassert (port.isValid() && midiOutput != nullptr);
snd_midi_event_new (maxEventSize, &midiParser);
}
~MidiOutputDevice()
{
snd_midi_event_free (midiParser);
port.deletePort();
}
void sendMessageNow (const MidiMessage& message)
{
if (message.getRawDataSize() > maxEventSize)
{
maxEventSize = message.getRawDataSize();
snd_midi_event_free (midiParser);
snd_midi_event_new (maxEventSize, &midiParser);
}
snd_seq_event_t event;
snd_seq_ev_clear (&event);
long numBytes = (long) message.getRawDataSize();
const uint8* data = message.getRawData();
snd_seq_t* seqHandle = port.client->get();
while (numBytes > 0)
{
const long numSent = snd_midi_event_encode (midiParser, data, numBytes, &event);
if (numSent <= 0)
break;
numBytes -= numSent;
data += numSent;
snd_seq_ev_set_source (&event, 0);
snd_seq_ev_set_subs (&event);
snd_seq_ev_set_direct (&event);
snd_seq_event_output (seqHandle, &event);
}
snd_seq_drain_output (seqHandle);
snd_midi_event_reset_encode (midiParser);
}
private:
MidiOutput* const midiOutput;
AlsaPort port;
snd_midi_event_t* midiParser;
int maxEventSize;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiOutputDevice);
};
} // namespace
StringArray MidiOutput::getDevices()
{
StringArray devices;
iterateMidiDevices (false, devices, -1);
return devices;
}
int MidiOutput::getDefaultDeviceIndex()
{
return 0;
}
MidiOutput* MidiOutput::openDevice (int deviceIndex)
{
MidiOutput* newDevice = nullptr;
StringArray devices;
AlsaPort port (iterateMidiDevices (false, devices, deviceIndex));
if (port.isValid())
{
newDevice = new MidiOutput();
newDevice->internal = new MidiOutputDevice (newDevice, port);
}
return newDevice;
}
MidiOutput* MidiOutput::createNewDevice (const String& deviceName)
{
MidiOutput* newDevice = nullptr;
AlsaPort port (createMidiDevice (false, deviceName));
if (port.isValid())
{
newDevice = new MidiOutput();
newDevice->internal = new MidiOutputDevice (newDevice, port);
}
return newDevice;
}
MidiOutput::~MidiOutput()
{
stopBackgroundThread();
delete static_cast<MidiOutputDevice*> (internal);
}
void MidiOutput::sendMessageNow (const MidiMessage& message)
{
static_cast<MidiOutputDevice*> (internal)->sendMessageNow (message);
}
//==============================================================================
MidiInput::MidiInput (const String& nm)
: name (nm), internal (nullptr)
{
}
MidiInput::~MidiInput()
{
stop();
delete static_cast<AlsaPortAndCallback*> (internal);
}
void MidiInput::start()
{
static_cast<AlsaPortAndCallback*> (internal)->enableCallback (true);
}
void MidiInput::stop()
{
static_cast<AlsaPortAndCallback*> (internal)->enableCallback (false);
}
int MidiInput::getDefaultDeviceIndex()
{
return 0;
}
StringArray MidiInput::getDevices()
{
StringArray devices;
iterateMidiDevices (true, devices, -1);
return devices;
}
MidiInput* MidiInput::openDevice (int deviceIndex, MidiInputCallback* callback)
{
MidiInput* newDevice = nullptr;
StringArray devices;
AlsaPort port (iterateMidiDevices (true, devices, deviceIndex));
if (port.isValid())
{
newDevice = new MidiInput (devices [deviceIndex]);
newDevice->internal = new AlsaPortAndCallback (port, newDevice, callback);
}
return newDevice;
}
MidiInput* MidiInput::createNewDevice (const String& deviceName, MidiInputCallback* callback)
{
MidiInput* newDevice = nullptr;
AlsaPort port (createMidiDevice (true, deviceName));
if (port.isValid())
{
newDevice = new MidiInput (deviceName);
newDevice->internal = new AlsaPortAndCallback (port, newDevice, callback);
}
return newDevice;
}
//==============================================================================
#else
// (These are just stub functions if ALSA is unavailable...)
StringArray MidiOutput::getDevices() { return StringArray(); }
int MidiOutput::getDefaultDeviceIndex() { return 0; }
MidiOutput* MidiOutput::openDevice (int) { return nullptr; }
MidiOutput* MidiOutput::createNewDevice (const String&) { return nullptr; }
MidiOutput::~MidiOutput() {}
void MidiOutput::sendMessageNow (const MidiMessage&) {}
MidiInput::MidiInput (const String& nm) : name (nm), internal (nullptr) {}
MidiInput::~MidiInput() {}
void MidiInput::start() {}
void MidiInput::stop() {}
int MidiInput::getDefaultDeviceIndex() { return 0; }
StringArray MidiInput::getDevices() { return StringArray(); }
MidiInput* MidiInput::openDevice (int, MidiInputCallback*) { return nullptr; }
MidiInput* MidiInput::createNewDevice (const String&, MidiInputCallback*) { return nullptr; }
#endif

View file

@ -0,0 +1,455 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
const int kilobytesPerSecond1x = 176;
struct AudioTrackProducerClass : public ObjCClass <NSObject>
{
AudioTrackProducerClass() : ObjCClass <NSObject> ("JUCEAudioTrackProducer_")
{
addIvar<AudioSourceHolder*> ("source");
addMethod (@selector (initWithAudioSourceHolder:), initWithAudioSourceHolder, "@@:^v");
addMethod (@selector (cleanupTrackAfterBurn:), cleanupTrackAfterBurn, "v@:@");
addMethod (@selector (cleanupTrackAfterVerification:), cleanupTrackAfterVerification, "c@:@");
addMethod (@selector (estimateLengthOfTrack:), estimateLengthOfTrack, "Q@:@");
addMethod (@selector (prepareTrack:forBurn:toMedia:), prepareTrack, "c@:@@@");
addMethod (@selector (prepareTrackForVerification:), prepareTrackForVerification, "c@:@");
addMethod (@selector (produceDataForTrack:intoBuffer:length:atAddress:blockSize:ioFlags:),
produceDataForTrack, "I@:@^cIQI^I");
addMethod (@selector (producePreGapForTrack:intoBuffer:length:atAddress:blockSize:ioFlags:),
produceDataForTrack, "I@:@^cIQI^I");
addMethod (@selector (verifyDataForTrack:intoBuffer:length:atAddress:blockSize:ioFlags:),
produceDataForTrack, "I@:@^cIQI^I");
registerClass();
}
struct AudioSourceHolder
{
AudioSourceHolder (AudioSource* s, int numFrames)
: source (s), readPosition (0), lengthInFrames (numFrames)
{
}
~AudioSourceHolder()
{
if (source != nullptr)
source->releaseResources();
}
ScopedPointer<AudioSource> source;
int readPosition, lengthInFrames;
};
private:
static id initWithAudioSourceHolder (id self, SEL, AudioSourceHolder* source)
{
self = sendSuperclassMessage (self, @selector (init));
object_setInstanceVariable (self, "source", source);
return self;
}
static AudioSourceHolder* getSource (id self)
{
return getIvar<AudioSourceHolder*> (self, "source");
}
static void dealloc (id self, SEL)
{
delete getSource (self);
sendSuperclassMessage (self, @selector (dealloc));
}
static void cleanupTrackAfterBurn (id self, SEL, DRTrack*) {}
static BOOL cleanupTrackAfterVerification (id self, SEL, DRTrack*) { return true; }
static uint64_t estimateLengthOfTrack (id self, SEL, DRTrack*)
{
return getSource (self)->lengthInFrames;
}
static BOOL prepareTrack (id self, SEL, DRTrack*, DRBurn*, NSDictionary*)
{
if (AudioSourceHolder* const source = getSource (self))
{
source->source->prepareToPlay (44100 / 75, 44100);
source->readPosition = 0;
}
return true;
}
static BOOL prepareTrackForVerification (id self, SEL, DRTrack*)
{
if (AudioSourceHolder* const source = getSource (self))
source->source->prepareToPlay (44100 / 75, 44100);
return true;
}
static uint32_t produceDataForTrack (id self, SEL, DRTrack*, char* buffer,
uint32_t bufferLength, uint64_t /*address*/,
uint32_t /*blockSize*/, uint32_t* /*flags*/)
{
if (AudioSourceHolder* const source = getSource (self))
{
const int numSamples = jmin ((int) bufferLength / 4,
(source->lengthInFrames * (44100 / 75)) - source->readPosition);
if (numSamples > 0)
{
AudioSampleBuffer tempBuffer (2, numSamples);
AudioSourceChannelInfo info (tempBuffer);
source->source->getNextAudioBlock (info);
typedef AudioData::Pointer <AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> CDSampleFormat;
typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> SourceSampleFormat;
CDSampleFormat left (buffer, 2);
left.convertSamples (SourceSampleFormat (tempBuffer.getReadPointer (0)), numSamples);
CDSampleFormat right (buffer + 2, 2);
right.convertSamples (SourceSampleFormat (tempBuffer.getReadPointer (1)), numSamples);
source->readPosition += numSamples;
}
return numSamples * 4;
}
return 0;
}
static uint32_t producePreGapForTrack (id self, SEL, DRTrack*, char* buffer,
uint32_t bufferLength, uint64_t /*address*/,
uint32_t /*blockSize*/, uint32_t* /*flags*/)
{
zeromem (buffer, bufferLength);
return bufferLength;
}
static BOOL verifyDataForTrack (id self, SEL, DRTrack*, const char*,
uint32_t /*bufferLength*/, uint64_t /*address*/,
uint32_t /*blockSize*/, uint32_t* /*flags*/)
{
return true;
}
};
struct OpenDiskDevice
{
OpenDiskDevice (DRDevice* d)
: device (d),
tracks ([[NSMutableArray alloc] init]),
underrunProtection (true)
{
}
~OpenDiskDevice()
{
[tracks release];
}
void addSourceTrack (AudioSource* source, int numSamples)
{
if (source != nullptr)
{
const int numFrames = (numSamples + 587) / 588;
static AudioTrackProducerClass cls;
NSObject* producer = [cls.createInstance() performSelector: @selector (initWithAudioSourceHolder:)
withObject: (id) new AudioTrackProducerClass::AudioSourceHolder (source, numFrames)];
DRTrack* track = [[DRTrack alloc] initWithProducer: producer];
{
NSMutableDictionary* p = [[track properties] mutableCopy];
[p setObject: [DRMSF msfWithFrames: numFrames] forKey: DRTrackLengthKey];
[p setObject: [NSNumber numberWithUnsignedShort: 2352] forKey: DRBlockSizeKey];
[p setObject: [NSNumber numberWithInt: 0] forKey: DRDataFormKey];
[p setObject: [NSNumber numberWithInt: 0] forKey: DRBlockTypeKey];
[p setObject: [NSNumber numberWithInt: 0] forKey: DRTrackModeKey];
[p setObject: [NSNumber numberWithInt: 0] forKey: DRSessionFormatKey];
[track setProperties: p];
[p release];
}
[tracks addObject: track];
[track release];
[producer release];
}
}
String burn (AudioCDBurner::BurnProgressListener* listener,
bool shouldEject, bool peformFakeBurnForTesting, int burnSpeed)
{
DRBurn* burn = [DRBurn burnForDevice: device];
if (! [device acquireExclusiveAccess])
return "Couldn't open or write to the CD device";
[device acquireMediaReservation];
NSMutableDictionary* d = [[burn properties] mutableCopy];
[d autorelease];
[d setObject: [NSNumber numberWithBool: peformFakeBurnForTesting] forKey: DRBurnTestingKey];
[d setObject: [NSNumber numberWithBool: false] forKey: DRBurnVerifyDiscKey];
[d setObject: (shouldEject ? DRBurnCompletionActionEject : DRBurnCompletionActionMount) forKey: DRBurnCompletionActionKey];
if (burnSpeed > 0)
[d setObject: [NSNumber numberWithFloat: burnSpeed * kilobytesPerSecond1x] forKey: DRBurnRequestedSpeedKey];
if (! underrunProtection)
[d setObject: [NSNumber numberWithBool: false] forKey: DRBurnUnderrunProtectionKey];
[burn setProperties: d];
[burn writeLayout: tracks];
for (;;)
{
Thread::sleep (300);
float progress = [[[burn status] objectForKey: DRStatusPercentCompleteKey] floatValue];
if (listener != nullptr && listener->audioCDBurnProgress (progress))
{
[burn abort];
return "User cancelled the write operation";
}
if ([[[burn status] objectForKey: DRStatusStateKey] isEqualTo: DRStatusStateFailed])
return "Write operation failed";
if ([[[burn status] objectForKey: DRStatusStateKey] isEqualTo: DRStatusStateDone])
break;
NSString* err = (NSString*) [[[burn status] objectForKey: DRErrorStatusKey]
objectForKey: DRErrorStatusErrorStringKey];
if ([err length] > 0)
return nsStringToJuce (err);
}
[device releaseMediaReservation];
[device releaseExclusiveAccess];
return String::empty;
}
DRDevice* device;
NSMutableArray* tracks;
bool underrunProtection;
};
//==============================================================================
class AudioCDBurner::Pimpl : public Timer
{
public:
Pimpl (AudioCDBurner& b, int deviceIndex) : owner (b)
{
if (DRDevice* dev = [[DRDevice devices] objectAtIndex: deviceIndex])
{
device = new OpenDiskDevice (dev);
lastState = getDiskState();
startTimer (1000);
}
}
~Pimpl()
{
stopTimer();
}
void timerCallback() override
{
const DiskState state = getDiskState();
if (state != lastState)
{
lastState = state;
owner.sendChangeMessage();
}
}
DiskState getDiskState() const
{
if ([device->device isValid])
{
NSDictionary* status = [device->device status];
NSString* state = [status objectForKey: DRDeviceMediaStateKey];
if ([state isEqualTo: DRDeviceMediaStateNone])
{
if ([[status objectForKey: DRDeviceIsTrayOpenKey] boolValue])
return trayOpen;
return noDisc;
}
if ([state isEqualTo: DRDeviceMediaStateMediaPresent])
{
if ([[[status objectForKey: DRDeviceMediaInfoKey] objectForKey: DRDeviceMediaBlocksFreeKey] intValue] > 0)
return writableDiskPresent;
return readOnlyDiskPresent;
}
}
return unknown;
}
bool openTray() { return [device->device isValid] && [device->device ejectMedia]; }
Array<int> getAvailableWriteSpeeds() const
{
Array<int> results;
if ([device->device isValid])
for (id kbPerSec in [[[device->device status] objectForKey: DRDeviceMediaInfoKey] objectForKey: DRDeviceBurnSpeedsKey])
results.add ([kbPerSec intValue] / kilobytesPerSecond1x);
return results;
}
bool setBufferUnderrunProtection (const bool shouldBeEnabled)
{
if ([device->device isValid])
{
device->underrunProtection = shouldBeEnabled;
return shouldBeEnabled && [[[device->device status] objectForKey: DRDeviceCanUnderrunProtectCDKey] boolValue];
}
return false;
}
int getNumAvailableAudioBlocks() const
{
return [[[[device->device status] objectForKey: DRDeviceMediaInfoKey]
objectForKey: DRDeviceMediaBlocksFreeKey] intValue];
}
ScopedPointer<OpenDiskDevice> device;
private:
DiskState lastState;
AudioCDBurner& owner;
};
//==============================================================================
AudioCDBurner::AudioCDBurner (const int deviceIndex)
{
pimpl = new Pimpl (*this, deviceIndex);
}
AudioCDBurner::~AudioCDBurner()
{
}
AudioCDBurner* AudioCDBurner::openDevice (const int deviceIndex)
{
ScopedPointer<AudioCDBurner> b (new AudioCDBurner (deviceIndex));
if (b->pimpl->device == nil)
b = nullptr;
return b.release();
}
StringArray AudioCDBurner::findAvailableDevices()
{
StringArray s;
for (NSDictionary* dic in [DRDevice devices])
if (NSString* name = [dic valueForKey: DRDeviceProductNameKey])
s.add (nsStringToJuce (name));
return s;
}
AudioCDBurner::DiskState AudioCDBurner::getDiskState() const
{
return pimpl->getDiskState();
}
bool AudioCDBurner::isDiskPresent() const
{
return getDiskState() == writableDiskPresent;
}
bool AudioCDBurner::openTray()
{
return pimpl->openTray();
}
AudioCDBurner::DiskState AudioCDBurner::waitUntilStateChange (int timeOutMilliseconds)
{
const int64 timeout = Time::currentTimeMillis() + timeOutMilliseconds;
DiskState oldState = getDiskState();
DiskState newState = oldState;
while (newState == oldState && Time::currentTimeMillis() < timeout)
{
newState = getDiskState();
Thread::sleep (100);
}
return newState;
}
Array<int> AudioCDBurner::getAvailableWriteSpeeds() const
{
return pimpl->getAvailableWriteSpeeds();
}
bool AudioCDBurner::setBufferUnderrunProtection (const bool shouldBeEnabled)
{
return pimpl->setBufferUnderrunProtection (shouldBeEnabled);
}
int AudioCDBurner::getNumAvailableAudioBlocks() const
{
return pimpl->getNumAvailableAudioBlocks();
}
bool AudioCDBurner::addAudioTrack (AudioSource* source, int numSamps)
{
if ([pimpl->device->device isValid])
{
pimpl->device->addSourceTrack (source, numSamps);
return true;
}
return false;
}
String AudioCDBurner::burn (AudioCDBurner::BurnProgressListener* listener,
bool ejectDiscAfterwards,
bool performFakeBurnForTesting,
int writeSpeed)
{
if ([pimpl->device->device isValid])
return pimpl->device->burn (listener, ejectDiscAfterwards, performFakeBurnForTesting, writeSpeed);
return "Couldn't open or write to the CD device";
}

View file

@ -0,0 +1,261 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
namespace CDReaderHelpers
{
inline const XmlElement* getElementForKey (const XmlElement& xml, const String& key)
{
forEachXmlChildElementWithTagName (xml, child, "key")
if (child->getAllSubText().trim() == key)
return child->getNextElement();
return nullptr;
}
static int getIntValueForKey (const XmlElement& xml, const String& key, int defaultValue = -1)
{
const XmlElement* const block = getElementForKey (xml, key);
return block != nullptr ? block->getAllSubText().trim().getIntValue() : defaultValue;
}
// Get the track offsets for a CD given an XmlElement representing its TOC.Plist.
// Returns NULL on success, otherwise a const char* representing an error.
static const char* getTrackOffsets (XmlDocument& xmlDocument, Array<int>& offsets)
{
const ScopedPointer<XmlElement> xml (xmlDocument.getDocumentElement());
if (xml == nullptr)
return "Couldn't parse XML in file";
const XmlElement* const dict = xml->getChildByName ("dict");
if (dict == nullptr)
return "Couldn't get top level dictionary";
const XmlElement* const sessions = getElementForKey (*dict, "Sessions");
if (sessions == nullptr)
return "Couldn't find sessions key";
const XmlElement* const session = sessions->getFirstChildElement();
if (session == nullptr)
return "Couldn't find first session";
const int leadOut = getIntValueForKey (*session, "Leadout Block");
if (leadOut < 0)
return "Couldn't find Leadout Block";
const XmlElement* const trackArray = getElementForKey (*session, "Track Array");
if (trackArray == nullptr)
return "Couldn't find Track Array";
forEachXmlChildElement (*trackArray, track)
{
const int trackValue = getIntValueForKey (*track, "Start Block");
if (trackValue < 0)
return "Couldn't find Start Block in the track";
offsets.add (trackValue * AudioCDReader::samplesPerFrame - 88200);
}
offsets.add (leadOut * AudioCDReader::samplesPerFrame - 88200);
return nullptr;
}
static void findDevices (Array<File>& cds)
{
File volumes ("/Volumes");
volumes.findChildFiles (cds, File::findDirectories, false);
for (int i = cds.size(); --i >= 0;)
if (! cds.getReference(i).getChildFile (".TOC.plist").exists())
cds.remove (i);
}
struct TrackSorter
{
static int getCDTrackNumber (const File& file)
{
return file.getFileName().initialSectionContainingOnly ("0123456789").getIntValue();
}
static int compareElements (const File& first, const File& second)
{
const int firstTrack = getCDTrackNumber (first);
const int secondTrack = getCDTrackNumber (second);
jassert (firstTrack > 0 && secondTrack > 0);
return firstTrack - secondTrack;
}
};
}
//==============================================================================
StringArray AudioCDReader::getAvailableCDNames()
{
Array<File> cds;
CDReaderHelpers::findDevices (cds);
StringArray names;
for (int i = 0; i < cds.size(); ++i)
names.add (cds.getReference(i).getFileName());
return names;
}
AudioCDReader* AudioCDReader::createReaderForCD (const int index)
{
Array<File> cds;
CDReaderHelpers::findDevices (cds);
if (cds[index].exists())
return new AudioCDReader (cds[index]);
return nullptr;
}
AudioCDReader::AudioCDReader (const File& volume)
: AudioFormatReader (0, "CD Audio"),
volumeDir (volume),
currentReaderTrack (-1),
reader (0)
{
sampleRate = 44100.0;
bitsPerSample = 16;
numChannels = 2;
usesFloatingPointData = false;
refreshTrackLengths();
}
AudioCDReader::~AudioCDReader()
{
}
void AudioCDReader::refreshTrackLengths()
{
tracks.clear();
trackStartSamples.clear();
lengthInSamples = 0;
volumeDir.findChildFiles (tracks, File::findFiles | File::ignoreHiddenFiles, false, "*.aiff");
CDReaderHelpers::TrackSorter sorter;
tracks.sort (sorter);
const File toc (volumeDir.getChildFile (".TOC.plist"));
if (toc.exists())
{
XmlDocument doc (toc);
const char* error = CDReaderHelpers::getTrackOffsets (doc, trackStartSamples);
(void) error; // could be logged..
lengthInSamples = trackStartSamples.getLast() - trackStartSamples.getFirst();
}
}
bool AudioCDReader::readSamples (int** destSamples, int numDestChannels, int startOffsetInDestBuffer,
int64 startSampleInFile, int numSamples)
{
while (numSamples > 0)
{
int track = -1;
for (int i = 0; i < trackStartSamples.size() - 1; ++i)
{
if (startSampleInFile < trackStartSamples.getUnchecked (i + 1))
{
track = i;
break;
}
}
if (track < 0)
return false;
if (track != currentReaderTrack)
{
reader = nullptr;
if (FileInputStream* const in = tracks [track].createInputStream())
{
BufferedInputStream* const bin = new BufferedInputStream (in, 65536, true);
AiffAudioFormat format;
reader = format.createReaderFor (bin, true);
if (reader == nullptr)
currentReaderTrack = -1;
else
currentReaderTrack = track;
}
}
if (reader == nullptr)
return false;
const int startPos = (int) (startSampleInFile - trackStartSamples.getUnchecked (track));
const int numAvailable = (int) jmin ((int64) numSamples, reader->lengthInSamples - startPos);
reader->readSamples (destSamples, numDestChannels, startOffsetInDestBuffer, startPos, numAvailable);
numSamples -= numAvailable;
startSampleInFile += numAvailable;
}
return true;
}
bool AudioCDReader::isCDStillPresent() const
{
return volumeDir.exists();
}
void AudioCDReader::ejectDisk()
{
JUCE_AUTORELEASEPOOL
{
[[NSWorkspace sharedWorkspace] unmountAndEjectDeviceAtPath: juceStringToNS (volumeDir.getFullPathName())];
}
}
bool AudioCDReader::isTrackAudio (int trackNum) const
{
return tracks [trackNum].hasFileExtension (".aiff");
}
void AudioCDReader::enableIndexScanning (bool)
{
// any way to do this on a Mac??
}
int AudioCDReader::getLastIndex() const
{
return 0;
}
Array<int> AudioCDReader::findIndexesInTrack (const int /*trackNumber*/)
{
return Array<int>();
}

View file

@ -0,0 +1,530 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_LOG_COREMIDI_ERRORS
#define JUCE_LOG_COREMIDI_ERRORS 1
#endif
namespace CoreMidiHelpers
{
static bool checkError (const OSStatus err, const int lineNum)
{
if (err == noErr)
return true;
#if JUCE_LOG_COREMIDI_ERRORS
Logger::writeToLog ("CoreMIDI error: " + String (lineNum) + " - " + String::toHexString ((int) err));
#endif
(void) lineNum;
return false;
}
#undef CHECK_ERROR
#define CHECK_ERROR(a) CoreMidiHelpers::checkError (a, __LINE__)
//==============================================================================
static String getMidiObjectName (MIDIObjectRef entity)
{
String result;
CFStringRef str = nullptr;
MIDIObjectGetStringProperty (entity, kMIDIPropertyName, &str);
if (str != nullptr)
{
result = String::fromCFString (str);
CFRelease (str);
}
return result;
}
static String getEndpointName (MIDIEndpointRef endpoint, bool isExternal)
{
String result (getMidiObjectName (endpoint));
MIDIEntityRef entity = 0; // NB: don't attempt to use nullptr for refs - it fails in some types of build.
MIDIEndpointGetEntity (endpoint, &entity);
if (entity == 0)
return result; // probably virtual
if (result.isEmpty())
result = getMidiObjectName (entity); // endpoint name is empty - try the entity
// now consider the device's name
MIDIDeviceRef device = 0;
MIDIEntityGetDevice (entity, &device);
if (device != 0)
{
const String deviceName (getMidiObjectName (device));
if (deviceName.isNotEmpty())
{
// if an external device has only one entity, throw away
// the endpoint name and just use the device name
if (isExternal && MIDIDeviceGetNumberOfEntities (device) < 2)
{
result = deviceName;
}
else if (! result.startsWithIgnoreCase (deviceName))
{
// prepend the device name to the entity name
result = (deviceName + " " + result).trimEnd();
}
}
}
return result;
}
static String getConnectedEndpointName (MIDIEndpointRef endpoint)
{
String result;
// Does the endpoint have connections?
CFDataRef connections = nullptr;
int numConnections = 0;
MIDIObjectGetDataProperty (endpoint, kMIDIPropertyConnectionUniqueID, &connections);
if (connections != nullptr)
{
numConnections = ((int) CFDataGetLength (connections)) / (int) sizeof (MIDIUniqueID);
if (numConnections > 0)
{
const SInt32* pid = reinterpret_cast <const SInt32*> (CFDataGetBytePtr (connections));
for (int i = 0; i < numConnections; ++i, ++pid)
{
MIDIUniqueID uid = (MIDIUniqueID) ByteOrder::swapIfLittleEndian ((uint32) *pid);
MIDIObjectRef connObject;
MIDIObjectType connObjectType;
OSStatus err = MIDIObjectFindByUniqueID (uid, &connObject, &connObjectType);
if (err == noErr)
{
String s;
if (connObjectType == kMIDIObjectType_ExternalSource
|| connObjectType == kMIDIObjectType_ExternalDestination)
{
// Connected to an external device's endpoint (10.3 and later).
s = getEndpointName (static_cast <MIDIEndpointRef> (connObject), true);
}
else
{
// Connected to an external device (10.2) (or something else, catch-all)
s = getMidiObjectName (connObject);
}
if (s.isNotEmpty())
{
if (result.isNotEmpty())
result += ", ";
result += s;
}
}
}
}
CFRelease (connections);
}
if (result.isEmpty()) // Here, either the endpoint had no connections, or we failed to obtain names for them.
result = getEndpointName (endpoint, false);
return result;
}
static StringArray findDevices (const bool forInput)
{
const ItemCount num = forInput ? MIDIGetNumberOfSources()
: MIDIGetNumberOfDestinations();
StringArray s;
for (ItemCount i = 0; i < num; ++i)
{
MIDIEndpointRef dest = forInput ? MIDIGetSource (i)
: MIDIGetDestination (i);
String name;
if (dest != 0)
name = getConnectedEndpointName (dest);
if (name.isEmpty())
name = "<error>";
s.add (name);
}
return s;
}
static void globalSystemChangeCallback (const MIDINotification*, void*)
{
// TODO.. Should pass-on this notification..
}
static String getGlobalMidiClientName()
{
if (JUCEApplicationBase* const app = JUCEApplicationBase::getInstance())
return app->getApplicationName();
return "JUCE";
}
static MIDIClientRef getGlobalMidiClient()
{
static MIDIClientRef globalMidiClient = 0;
if (globalMidiClient == 0)
{
// Since OSX 10.6, the MIDIClientCreate function will only work
// correctly when called from the message thread!
jassert (MessageManager::getInstance()->isThisTheMessageThread());
CFStringRef name = getGlobalMidiClientName().toCFString();
CHECK_ERROR (MIDIClientCreate (name, &globalSystemChangeCallback, nullptr, &globalMidiClient));
CFRelease (name);
}
return globalMidiClient;
}
//==============================================================================
class MidiPortAndEndpoint
{
public:
MidiPortAndEndpoint (MIDIPortRef p, MIDIEndpointRef ep)
: port (p), endPoint (ep)
{
}
~MidiPortAndEndpoint()
{
if (port != 0)
MIDIPortDispose (port);
if (port == 0 && endPoint != 0) // if port == nullptr, it means we created the endpoint, so it's safe to delete it
MIDIEndpointDispose (endPoint);
}
void send (const MIDIPacketList* const packets)
{
if (port != 0)
MIDISend (port, endPoint, packets);
else
MIDIReceived (endPoint, packets);
}
MIDIPortRef port;
MIDIEndpointRef endPoint;
};
//==============================================================================
class MidiPortAndCallback;
CriticalSection callbackLock;
Array<MidiPortAndCallback*> activeCallbacks;
class MidiPortAndCallback
{
public:
MidiPortAndCallback (MidiInputCallback& cb)
: input (nullptr), active (false), callback (cb), concatenator (2048)
{
}
~MidiPortAndCallback()
{
active = false;
{
const ScopedLock sl (callbackLock);
activeCallbacks.removeFirstMatchingValue (this);
}
if (portAndEndpoint != 0 && portAndEndpoint->port != 0)
CHECK_ERROR (MIDIPortDisconnectSource (portAndEndpoint->port, portAndEndpoint->endPoint));
}
void handlePackets (const MIDIPacketList* const pktlist)
{
const double time = Time::getMillisecondCounterHiRes() * 0.001;
const ScopedLock sl (callbackLock);
if (activeCallbacks.contains (this) && active)
{
const MIDIPacket* packet = &pktlist->packet[0];
for (unsigned int i = 0; i < pktlist->numPackets; ++i)
{
concatenator.pushMidiData (packet->data, (int) packet->length, time,
input, callback);
packet = MIDIPacketNext (packet);
}
}
}
MidiInput* input;
ScopedPointer<MidiPortAndEndpoint> portAndEndpoint;
volatile bool active;
private:
MidiInputCallback& callback;
MidiDataConcatenator concatenator;
};
static void midiInputProc (const MIDIPacketList* pktlist, void* readProcRefCon, void* /*srcConnRefCon*/)
{
static_cast <MidiPortAndCallback*> (readProcRefCon)->handlePackets (pktlist);
}
}
//==============================================================================
StringArray MidiOutput::getDevices() { return CoreMidiHelpers::findDevices (false); }
int MidiOutput::getDefaultDeviceIndex() { return 0; }
MidiOutput* MidiOutput::openDevice (int index)
{
MidiOutput* mo = nullptr;
if (isPositiveAndBelow (index, (int) MIDIGetNumberOfDestinations()))
{
MIDIEndpointRef endPoint = MIDIGetDestination ((ItemCount) index);
CFStringRef pname;
if (CHECK_ERROR (MIDIObjectGetStringProperty (endPoint, kMIDIPropertyName, &pname)))
{
MIDIClientRef client = CoreMidiHelpers::getGlobalMidiClient();
MIDIPortRef port;
if (client != 0 && CHECK_ERROR (MIDIOutputPortCreate (client, pname, &port)))
{
mo = new MidiOutput();
mo->internal = new CoreMidiHelpers::MidiPortAndEndpoint (port, endPoint);
}
CFRelease (pname);
}
}
return mo;
}
MidiOutput* MidiOutput::createNewDevice (const String& deviceName)
{
MidiOutput* mo = nullptr;
MIDIClientRef client = CoreMidiHelpers::getGlobalMidiClient();
MIDIEndpointRef endPoint;
CFStringRef name = deviceName.toCFString();
if (client != 0 && CHECK_ERROR (MIDISourceCreate (client, name, &endPoint)))
{
mo = new MidiOutput();
mo->internal = new CoreMidiHelpers::MidiPortAndEndpoint (0, endPoint);
}
CFRelease (name);
return mo;
}
MidiOutput::~MidiOutput()
{
stopBackgroundThread();
delete static_cast<CoreMidiHelpers::MidiPortAndEndpoint*> (internal);
}
void MidiOutput::sendMessageNow (const MidiMessage& message)
{
#if JUCE_IOS
const MIDITimeStamp timeStamp = mach_absolute_time();
#else
const MIDITimeStamp timeStamp = AudioGetCurrentHostTime();
#endif
HeapBlock <MIDIPacketList> allocatedPackets;
MIDIPacketList stackPacket;
MIDIPacketList* packetToSend = &stackPacket;
const size_t dataSize = (size_t) message.getRawDataSize();
if (message.isSysEx())
{
const int maxPacketSize = 256;
int pos = 0, bytesLeft = (int) dataSize;
const int numPackets = (bytesLeft + maxPacketSize - 1) / maxPacketSize;
allocatedPackets.malloc ((size_t) (32 * (size_t) numPackets + dataSize), 1);
packetToSend = allocatedPackets;
packetToSend->numPackets = (UInt32) numPackets;
MIDIPacket* p = packetToSend->packet;
for (int i = 0; i < numPackets; ++i)
{
p->timeStamp = timeStamp;
p->length = (UInt16) jmin (maxPacketSize, bytesLeft);
memcpy (p->data, message.getRawData() + pos, p->length);
pos += p->length;
bytesLeft -= p->length;
p = MIDIPacketNext (p);
}
}
else if (dataSize < 65536) // max packet size
{
const size_t stackCapacity = sizeof (stackPacket.packet->data);
if (dataSize > stackCapacity)
{
allocatedPackets.malloc ((sizeof (MIDIPacketList) - stackCapacity) + dataSize, 1);
packetToSend = allocatedPackets;
}
packetToSend->numPackets = 1;
MIDIPacket& p = *(packetToSend->packet);
p.timeStamp = timeStamp;
p.length = (UInt16) dataSize;
memcpy (p.data, message.getRawData(), dataSize);
}
else
{
jassertfalse; // packet too large to send!
return;
}
static_cast<CoreMidiHelpers::MidiPortAndEndpoint*> (internal)->send (packetToSend);
}
//==============================================================================
StringArray MidiInput::getDevices() { return CoreMidiHelpers::findDevices (true); }
int MidiInput::getDefaultDeviceIndex() { return 0; }
MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
{
jassert (callback != nullptr);
using namespace CoreMidiHelpers;
MidiInput* newInput = nullptr;
if (isPositiveAndBelow (index, (int) MIDIGetNumberOfSources()))
{
if (MIDIEndpointRef endPoint = MIDIGetSource ((ItemCount) index))
{
CFStringRef name;
if (CHECK_ERROR (MIDIObjectGetStringProperty (endPoint, kMIDIPropertyName, &name)))
{
if (MIDIClientRef client = getGlobalMidiClient())
{
MIDIPortRef port;
ScopedPointer <MidiPortAndCallback> mpc (new MidiPortAndCallback (*callback));
if (CHECK_ERROR (MIDIInputPortCreate (client, name, midiInputProc, mpc, &port)))
{
if (CHECK_ERROR (MIDIPortConnectSource (port, endPoint, nullptr)))
{
mpc->portAndEndpoint = new MidiPortAndEndpoint (port, endPoint);
newInput = new MidiInput (getDevices() [index]);
mpc->input = newInput;
newInput->internal = mpc;
const ScopedLock sl (callbackLock);
activeCallbacks.add (mpc.release());
}
else
{
CHECK_ERROR (MIDIPortDispose (port));
}
}
}
}
CFRelease (name);
}
}
return newInput;
}
MidiInput* MidiInput::createNewDevice (const String& deviceName, MidiInputCallback* callback)
{
jassert (callback != nullptr);
using namespace CoreMidiHelpers;
MidiInput* mi = nullptr;
if (MIDIClientRef client = getGlobalMidiClient())
{
ScopedPointer <MidiPortAndCallback> mpc (new MidiPortAndCallback (*callback));
mpc->active = false;
MIDIEndpointRef endPoint;
CFStringRef name = deviceName.toCFString();
if (CHECK_ERROR (MIDIDestinationCreate (client, name, midiInputProc, mpc, &endPoint)))
{
mpc->portAndEndpoint = new MidiPortAndEndpoint (0, endPoint);
mi = new MidiInput (deviceName);
mpc->input = mi;
mi->internal = mpc;
const ScopedLock sl (callbackLock);
activeCallbacks.add (mpc.release());
}
CFRelease (name);
}
return mi;
}
MidiInput::MidiInput (const String& nm) : name (nm)
{
}
MidiInput::~MidiInput()
{
delete static_cast<CoreMidiHelpers::MidiPortAndCallback*> (internal);
}
void MidiInput::start()
{
const ScopedLock sl (CoreMidiHelpers::callbackLock);
static_cast<CoreMidiHelpers::MidiPortAndCallback*> (internal)->active = true;
}
void MidiInput::stop()
{
const ScopedLock sl (CoreMidiHelpers::callbackLock);
static_cast<CoreMidiHelpers::MidiPortAndCallback*> (internal)->active = false;
}
#undef CHECK_ERROR

View file

@ -0,0 +1,411 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
namespace CDBurnerHelpers
{
IDiscRecorder* enumCDBurners (StringArray* list, int indexToOpen, IDiscMaster** master)
{
CoInitialize (0);
IDiscMaster* dm;
IDiscRecorder* result = nullptr;
if (SUCCEEDED (CoCreateInstance (CLSID_MSDiscMasterObj, 0,
CLSCTX_INPROC_SERVER | CLSCTX_LOCAL_SERVER,
IID_IDiscMaster,
(void**) &dm)))
{
if (SUCCEEDED (dm->Open()))
{
IEnumDiscRecorders* drEnum = nullptr;
if (SUCCEEDED (dm->EnumDiscRecorders (&drEnum)))
{
IDiscRecorder* dr = nullptr;
DWORD dummy;
int index = 0;
while (drEnum->Next (1, &dr, &dummy) == S_OK)
{
if (indexToOpen == index)
{
result = dr;
break;
}
else if (list != nullptr)
{
BSTR path;
if (SUCCEEDED (dr->GetPath (&path)))
list->add ((const WCHAR*) path);
}
++index;
dr->Release();
}
drEnum->Release();
}
if (master == 0)
dm->Close();
}
if (master != nullptr)
*master = dm;
else
dm->Release();
}
return result;
}
}
//==============================================================================
class AudioCDBurner::Pimpl : public ComBaseClassHelper <IDiscMasterProgressEvents>,
public Timer
{
public:
Pimpl (AudioCDBurner& owner_, IDiscMaster* discMaster_, IDiscRecorder* discRecorder_)
: owner (owner_), discMaster (discMaster_), discRecorder (discRecorder_), redbook (0),
listener (0), progress (0), shouldCancel (false)
{
HRESULT hr = discMaster->SetActiveDiscMasterFormat (IID_IRedbookDiscMaster, (void**) &redbook);
jassert (SUCCEEDED (hr));
hr = discMaster->SetActiveDiscRecorder (discRecorder);
//jassert (SUCCEEDED (hr));
lastState = getDiskState();
startTimer (2000);
}
~Pimpl() {}
void releaseObjects()
{
discRecorder->Close();
if (redbook != nullptr)
redbook->Release();
discRecorder->Release();
discMaster->Release();
Release();
}
JUCE_COMRESULT QueryCancel (boolean* pbCancel)
{
if (listener != nullptr && ! shouldCancel)
shouldCancel = listener->audioCDBurnProgress (progress);
*pbCancel = shouldCancel;
return S_OK;
}
JUCE_COMRESULT NotifyBlockProgress (long nCompleted, long nTotal)
{
progress = nCompleted / (float) nTotal;
shouldCancel = listener != nullptr && listener->audioCDBurnProgress (progress);
return E_NOTIMPL;
}
JUCE_COMRESULT NotifyPnPActivity (void) { return E_NOTIMPL; }
JUCE_COMRESULT NotifyAddProgress (long /*nCompletedSteps*/, long /*nTotalSteps*/) { return E_NOTIMPL; }
JUCE_COMRESULT NotifyTrackProgress (long /*nCurrentTrack*/, long /*nTotalTracks*/) { return E_NOTIMPL; }
JUCE_COMRESULT NotifyPreparingBurn (long /*nEstimatedSeconds*/) { return E_NOTIMPL; }
JUCE_COMRESULT NotifyClosingDisc (long /*nEstimatedSeconds*/) { return E_NOTIMPL; }
JUCE_COMRESULT NotifyBurnComplete (HRESULT /*status*/) { return E_NOTIMPL; }
JUCE_COMRESULT NotifyEraseComplete (HRESULT /*status*/) { return E_NOTIMPL; }
class ScopedDiscOpener
{
public:
ScopedDiscOpener (Pimpl& p) : pimpl (p) { pimpl.discRecorder->OpenExclusive(); }
~ScopedDiscOpener() { pimpl.discRecorder->Close(); }
private:
Pimpl& pimpl;
JUCE_DECLARE_NON_COPYABLE (ScopedDiscOpener)
};
DiskState getDiskState()
{
const ScopedDiscOpener opener (*this);
long type, flags;
HRESULT hr = discRecorder->QueryMediaType (&type, &flags);
if (FAILED (hr))
return unknown;
if (type != 0 && (flags & MEDIA_WRITABLE) != 0)
return writableDiskPresent;
if (type == 0)
return noDisc;
return readOnlyDiskPresent;
}
int getIntProperty (const LPOLESTR name, const int defaultReturn) const
{
ComSmartPtr<IPropertyStorage> prop;
if (FAILED (discRecorder->GetRecorderProperties (prop.resetAndGetPointerAddress())))
return defaultReturn;
PROPSPEC iPropSpec;
iPropSpec.ulKind = PRSPEC_LPWSTR;
iPropSpec.lpwstr = name;
PROPVARIANT iPropVariant;
return FAILED (prop->ReadMultiple (1, &iPropSpec, &iPropVariant))
? defaultReturn : (int) iPropVariant.lVal;
}
bool setIntProperty (const LPOLESTR name, const int value) const
{
ComSmartPtr<IPropertyStorage> prop;
if (FAILED (discRecorder->GetRecorderProperties (prop.resetAndGetPointerAddress())))
return false;
PROPSPEC iPropSpec;
iPropSpec.ulKind = PRSPEC_LPWSTR;
iPropSpec.lpwstr = name;
PROPVARIANT iPropVariant;
if (FAILED (prop->ReadMultiple (1, &iPropSpec, &iPropVariant)))
return false;
iPropVariant.lVal = (long) value;
return SUCCEEDED (prop->WriteMultiple (1, &iPropSpec, &iPropVariant, iPropVariant.vt))
&& SUCCEEDED (discRecorder->SetRecorderProperties (prop));
}
void timerCallback() override
{
const DiskState state = getDiskState();
if (state != lastState)
{
lastState = state;
owner.sendChangeMessage();
}
}
AudioCDBurner& owner;
DiskState lastState;
IDiscMaster* discMaster;
IDiscRecorder* discRecorder;
IRedbookDiscMaster* redbook;
AudioCDBurner::BurnProgressListener* listener;
float progress;
bool shouldCancel;
};
//==============================================================================
AudioCDBurner::AudioCDBurner (const int deviceIndex)
{
IDiscMaster* discMaster = nullptr;
IDiscRecorder* discRecorder = CDBurnerHelpers::enumCDBurners (0, deviceIndex, &discMaster);
if (discRecorder != nullptr)
pimpl = new Pimpl (*this, discMaster, discRecorder);
}
AudioCDBurner::~AudioCDBurner()
{
if (pimpl != nullptr)
pimpl.release()->releaseObjects();
}
StringArray AudioCDBurner::findAvailableDevices()
{
StringArray devs;
CDBurnerHelpers::enumCDBurners (&devs, -1, 0);
return devs;
}
AudioCDBurner* AudioCDBurner::openDevice (const int deviceIndex)
{
ScopedPointer<AudioCDBurner> b (new AudioCDBurner (deviceIndex));
if (b->pimpl == 0)
b = nullptr;
return b.release();
}
AudioCDBurner::DiskState AudioCDBurner::getDiskState() const
{
return pimpl->getDiskState();
}
bool AudioCDBurner::isDiskPresent() const
{
return getDiskState() == writableDiskPresent;
}
bool AudioCDBurner::openTray()
{
const Pimpl::ScopedDiscOpener opener (*pimpl);
return SUCCEEDED (pimpl->discRecorder->Eject());
}
AudioCDBurner::DiskState AudioCDBurner::waitUntilStateChange (int timeOutMilliseconds)
{
const int64 timeout = Time::currentTimeMillis() + timeOutMilliseconds;
DiskState oldState = getDiskState();
DiskState newState = oldState;
while (newState == oldState && Time::currentTimeMillis() < timeout)
{
newState = getDiskState();
Thread::sleep (jmin (250, (int) (timeout - Time::currentTimeMillis())));
}
return newState;
}
Array<int> AudioCDBurner::getAvailableWriteSpeeds() const
{
Array<int> results;
const int maxSpeed = pimpl->getIntProperty (L"MaxWriteSpeed", 1);
const int speeds[] = { 1, 2, 4, 8, 12, 16, 20, 24, 32, 40, 64, 80 };
for (int i = 0; i < numElementsInArray (speeds); ++i)
if (speeds[i] <= maxSpeed)
results.add (speeds[i]);
results.addIfNotAlreadyThere (maxSpeed);
return results;
}
bool AudioCDBurner::setBufferUnderrunProtection (const bool shouldBeEnabled)
{
if (pimpl->getIntProperty (L"BufferUnderrunFreeCapable", 0) == 0)
return false;
pimpl->setIntProperty (L"EnableBufferUnderrunFree", shouldBeEnabled ? -1 : 0);
return pimpl->getIntProperty (L"EnableBufferUnderrunFree", 0) != 0;
}
int AudioCDBurner::getNumAvailableAudioBlocks() const
{
long blocksFree = 0;
pimpl->redbook->GetAvailableAudioTrackBlocks (&blocksFree);
return blocksFree;
}
String AudioCDBurner::burn (AudioCDBurner::BurnProgressListener* listener, bool ejectDiscAfterwards,
bool performFakeBurnForTesting, int writeSpeed)
{
pimpl->setIntProperty (L"WriteSpeed", writeSpeed > 0 ? writeSpeed : -1);
pimpl->listener = listener;
pimpl->progress = 0;
pimpl->shouldCancel = false;
UINT_PTR cookie;
HRESULT hr = pimpl->discMaster->ProgressAdvise ((AudioCDBurner::Pimpl*) pimpl, &cookie);
hr = pimpl->discMaster->RecordDisc (performFakeBurnForTesting,
ejectDiscAfterwards);
String error;
if (hr != S_OK)
{
const char* e = "Couldn't open or write to the CD device";
if (hr == IMAPI_E_USERABORT)
e = "User cancelled the write operation";
else if (hr == IMAPI_E_MEDIUM_NOTPRESENT || hr == IMAPI_E_TRACKOPEN)
e = "No Disk present";
error = e;
}
pimpl->discMaster->ProgressUnadvise (cookie);
pimpl->listener = 0;
return error;
}
bool AudioCDBurner::addAudioTrack (AudioSource* audioSource, int numSamples)
{
if (audioSource == 0)
return false;
ScopedPointer<AudioSource> source (audioSource);
long bytesPerBlock;
HRESULT hr = pimpl->redbook->GetAudioBlockSize (&bytesPerBlock);
const int samplesPerBlock = bytesPerBlock / 4;
bool ok = true;
hr = pimpl->redbook->CreateAudioTrack ((long) numSamples / (bytesPerBlock * 4));
HeapBlock <byte> buffer (bytesPerBlock);
AudioSampleBuffer sourceBuffer (2, samplesPerBlock);
int samplesDone = 0;
source->prepareToPlay (samplesPerBlock, 44100.0);
while (ok)
{
{
AudioSourceChannelInfo info (&sourceBuffer, 0, samplesPerBlock);
sourceBuffer.clear();
source->getNextAudioBlock (info);
}
buffer.clear (bytesPerBlock);
typedef AudioData::Pointer <AudioData::Int16, AudioData::LittleEndian,
AudioData::Interleaved, AudioData::NonConst> CDSampleFormat;
typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian,
AudioData::NonInterleaved, AudioData::Const> SourceSampleFormat;
CDSampleFormat left (buffer, 2);
left.convertSamples (SourceSampleFormat (sourceBuffer.getReadPointer (0)), samplesPerBlock);
CDSampleFormat right (buffer + 2, 2);
right.convertSamples (SourceSampleFormat (sourceBuffer.getReadPointer (1)), samplesPerBlock);
hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock);
if (FAILED (hr))
ok = false;
samplesDone += samplesPerBlock;
if (samplesDone >= numSamples)
break;
}
hr = pimpl->redbook->CloseAudioTrack();
return ok && hr == S_OK;
}

View file

@ -0,0 +1,489 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
class MidiInCollector
{
public:
MidiInCollector (MidiInput* const input_,
MidiInputCallback& callback_)
: deviceHandle (0),
input (input_),
callback (callback_),
concatenator (4096),
isStarted (false),
startTime (0)
{
}
~MidiInCollector()
{
stop();
if (deviceHandle != 0)
{
for (int count = 5; --count >= 0;)
{
if (midiInClose (deviceHandle) == MMSYSERR_NOERROR)
break;
Sleep (20);
}
}
}
//==============================================================================
void handleMessage (const uint8* bytes, const uint32 timeStamp)
{
if (bytes[0] >= 0x80 && isStarted)
{
concatenator.pushMidiData (bytes, MidiMessage::getMessageLengthFromFirstByte (bytes[0]),
convertTimeStamp (timeStamp), input, callback);
writeFinishedBlocks();
}
}
void handleSysEx (MIDIHDR* const hdr, const uint32 timeStamp)
{
if (isStarted && hdr->dwBytesRecorded > 0)
{
concatenator.pushMidiData (hdr->lpData, (int) hdr->dwBytesRecorded,
convertTimeStamp (timeStamp), input, callback);
writeFinishedBlocks();
}
}
void start()
{
if (deviceHandle != 0 && ! isStarted)
{
activeMidiCollectors.addIfNotAlreadyThere (this);
for (int i = 0; i < (int) numHeaders; ++i)
{
headers[i].prepare (deviceHandle);
headers[i].write (deviceHandle);
}
startTime = Time::getMillisecondCounterHiRes();
MMRESULT res = midiInStart (deviceHandle);
if (res == MMSYSERR_NOERROR)
{
concatenator.reset();
isStarted = true;
}
else
{
unprepareAllHeaders();
}
}
}
void stop()
{
if (isStarted)
{
isStarted = false;
midiInReset (deviceHandle);
midiInStop (deviceHandle);
activeMidiCollectors.removeFirstMatchingValue (this);
unprepareAllHeaders();
concatenator.reset();
}
}
static void CALLBACK midiInCallback (HMIDIIN, UINT uMsg, DWORD_PTR dwInstance,
DWORD_PTR midiMessage, DWORD_PTR timeStamp)
{
MidiInCollector* const collector = reinterpret_cast <MidiInCollector*> (dwInstance);
if (activeMidiCollectors.contains (collector))
{
if (uMsg == MIM_DATA)
collector->handleMessage ((const uint8*) &midiMessage, (uint32) timeStamp);
else if (uMsg == MIM_LONGDATA)
collector->handleSysEx ((MIDIHDR*) midiMessage, (uint32) timeStamp);
}
}
HMIDIIN deviceHandle;
private:
static Array <MidiInCollector*, CriticalSection> activeMidiCollectors;
MidiInput* input;
MidiInputCallback& callback;
MidiDataConcatenator concatenator;
bool volatile isStarted;
double startTime;
class MidiHeader
{
public:
MidiHeader() {}
void prepare (HMIDIIN deviceHandle)
{
zerostruct (hdr);
hdr.lpData = data;
hdr.dwBufferLength = (DWORD) numElementsInArray (data);
midiInPrepareHeader (deviceHandle, &hdr, sizeof (hdr));
}
void unprepare (HMIDIIN deviceHandle)
{
if ((hdr.dwFlags & WHDR_DONE) != 0)
{
int c = 10;
while (--c >= 0 && midiInUnprepareHeader (deviceHandle, &hdr, sizeof (hdr)) == MIDIERR_STILLPLAYING)
Thread::sleep (20);
jassert (c >= 0);
}
}
void write (HMIDIIN deviceHandle)
{
hdr.dwBytesRecorded = 0;
midiInAddBuffer (deviceHandle, &hdr, sizeof (hdr));
}
void writeIfFinished (HMIDIIN deviceHandle)
{
if ((hdr.dwFlags & WHDR_DONE) != 0)
write (deviceHandle);
}
private:
MIDIHDR hdr;
char data [256];
JUCE_DECLARE_NON_COPYABLE (MidiHeader)
};
enum { numHeaders = 32 };
MidiHeader headers [numHeaders];
void writeFinishedBlocks()
{
for (int i = 0; i < (int) numHeaders; ++i)
headers[i].writeIfFinished (deviceHandle);
}
void unprepareAllHeaders()
{
for (int i = 0; i < (int) numHeaders; ++i)
headers[i].unprepare (deviceHandle);
}
double convertTimeStamp (uint32 timeStamp)
{
double t = startTime + timeStamp;
const double now = Time::getMillisecondCounterHiRes();
if (t > now)
{
if (t > now + 2.0)
startTime -= 1.0;
t = now;
}
return t * 0.001;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiInCollector)
};
Array <MidiInCollector*, CriticalSection> MidiInCollector::activeMidiCollectors;
//==============================================================================
StringArray MidiInput::getDevices()
{
StringArray s;
const UINT num = midiInGetNumDevs();
for (UINT i = 0; i < num; ++i)
{
MIDIINCAPS mc = { 0 };
if (midiInGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
s.add (String (mc.szPname, sizeof (mc.szPname)));
}
return s;
}
int MidiInput::getDefaultDeviceIndex()
{
return 0;
}
MidiInput* MidiInput::openDevice (const int index, MidiInputCallback* const callback)
{
if (callback == nullptr)
return nullptr;
UINT deviceId = MIDI_MAPPER;
int n = 0;
String name;
const UINT num = midiInGetNumDevs();
for (UINT i = 0; i < num; ++i)
{
MIDIINCAPS mc = { 0 };
if (midiInGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
{
if (index == n)
{
deviceId = i;
name = String (mc.szPname, (size_t) numElementsInArray (mc.szPname));
break;
}
++n;
}
}
ScopedPointer <MidiInput> in (new MidiInput (name));
ScopedPointer <MidiInCollector> collector (new MidiInCollector (in, *callback));
HMIDIIN h;
MMRESULT err = midiInOpen (&h, deviceId,
(DWORD_PTR) &MidiInCollector::midiInCallback,
(DWORD_PTR) (MidiInCollector*) collector,
CALLBACK_FUNCTION);
if (err == MMSYSERR_NOERROR)
{
collector->deviceHandle = h;
in->internal = collector.release();
return in.release();
}
return nullptr;
}
MidiInput::MidiInput (const String& name_)
: name (name_),
internal (0)
{
}
MidiInput::~MidiInput()
{
delete static_cast<MidiInCollector*> (internal);
}
void MidiInput::start() { static_cast<MidiInCollector*> (internal)->start(); }
void MidiInput::stop() { static_cast<MidiInCollector*> (internal)->stop(); }
//==============================================================================
struct MidiOutHandle
{
int refCount;
UINT deviceId;
HMIDIOUT handle;
static Array<MidiOutHandle*> activeHandles;
private:
JUCE_LEAK_DETECTOR (MidiOutHandle)
};
Array<MidiOutHandle*> MidiOutHandle::activeHandles;
//==============================================================================
StringArray MidiOutput::getDevices()
{
StringArray s;
const UINT num = midiOutGetNumDevs();
for (UINT i = 0; i < num; ++i)
{
MIDIOUTCAPS mc = { 0 };
if (midiOutGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
s.add (String (mc.szPname, sizeof (mc.szPname)));
}
return s;
}
int MidiOutput::getDefaultDeviceIndex()
{
const UINT num = midiOutGetNumDevs();
int n = 0;
for (UINT i = 0; i < num; ++i)
{
MIDIOUTCAPS mc = { 0 };
if (midiOutGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
{
if ((mc.wTechnology & MOD_MAPPER) != 0)
return n;
++n;
}
}
return 0;
}
MidiOutput* MidiOutput::openDevice (int index)
{
UINT deviceId = MIDI_MAPPER;
const UINT num = midiOutGetNumDevs();
int n = 0;
for (UINT i = 0; i < num; ++i)
{
MIDIOUTCAPS mc = { 0 };
if (midiOutGetDevCaps (i, &mc, sizeof (mc)) == MMSYSERR_NOERROR)
{
// use the microsoft sw synth as a default - best not to allow deviceId
// to be MIDI_MAPPER, or else device sharing breaks
if (String (mc.szPname, sizeof (mc.szPname)).containsIgnoreCase ("microsoft"))
deviceId = i;
if (index == n)
{
deviceId = i;
break;
}
++n;
}
}
for (int i = MidiOutHandle::activeHandles.size(); --i >= 0;)
{
MidiOutHandle* const han = MidiOutHandle::activeHandles.getUnchecked(i);
if (han->deviceId == deviceId)
{
han->refCount++;
MidiOutput* const out = new MidiOutput();
out->internal = han;
return out;
}
}
for (int i = 4; --i >= 0;)
{
HMIDIOUT h = 0;
MMRESULT res = midiOutOpen (&h, deviceId, 0, 0, CALLBACK_NULL);
if (res == MMSYSERR_NOERROR)
{
MidiOutHandle* const han = new MidiOutHandle();
han->deviceId = deviceId;
han->refCount = 1;
han->handle = h;
MidiOutHandle::activeHandles.add (han);
MidiOutput* const out = new MidiOutput();
out->internal = han;
return out;
}
else if (res == MMSYSERR_ALLOCATED)
{
Sleep (100);
}
else
{
break;
}
}
return nullptr;
}
MidiOutput::~MidiOutput()
{
stopBackgroundThread();
MidiOutHandle* const h = static_cast<MidiOutHandle*> (internal);
if (MidiOutHandle::activeHandles.contains (h) && --(h->refCount) == 0)
{
midiOutClose (h->handle);
MidiOutHandle::activeHandles.removeFirstMatchingValue (h);
delete h;
}
}
void MidiOutput::sendMessageNow (const MidiMessage& message)
{
const MidiOutHandle* const handle = static_cast<const MidiOutHandle*> (internal);
if (message.getRawDataSize() > 3 || message.isSysEx())
{
MIDIHDR h = { 0 };
h.lpData = (char*) message.getRawData();
h.dwBytesRecorded = h.dwBufferLength = (DWORD) message.getRawDataSize();
if (midiOutPrepareHeader (handle->handle, &h, sizeof (MIDIHDR)) == MMSYSERR_NOERROR)
{
MMRESULT res = midiOutLongMsg (handle->handle, &h, sizeof (MIDIHDR));
if (res == MMSYSERR_NOERROR)
{
while ((h.dwFlags & MHDR_DONE) == 0)
Sleep (1);
int count = 500; // 1 sec timeout
while (--count >= 0)
{
res = midiOutUnprepareHeader (handle->handle, &h, sizeof (MIDIHDR));
if (res == MIDIERR_STILLPLAYING)
Sleep (2);
else
break;
}
}
}
}
else
{
for (int i = 0; i < 50; ++i)
{
if (midiOutShortMsg (handle->handle, *(unsigned int*) message.getRawData()) != MIDIERR_NOTREADY)
break;
Sleep (1);
}
}
}

View file

@ -0,0 +1,182 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
AudioSourcePlayer::AudioSourcePlayer()
: source (nullptr),
sampleRate (0),
bufferSize (0),
lastGain (1.0f),
gain (1.0f)
{
}
AudioSourcePlayer::~AudioSourcePlayer()
{
setSource (nullptr);
}
void AudioSourcePlayer::setSource (AudioSource* newSource)
{
if (source != newSource)
{
AudioSource* const oldSource = source;
if (newSource != nullptr && bufferSize > 0 && sampleRate > 0)
newSource->prepareToPlay (bufferSize, sampleRate);
{
const ScopedLock sl (readLock);
source = newSource;
}
if (oldSource != nullptr)
oldSource->releaseResources();
}
}
void AudioSourcePlayer::setGain (const float newGain) noexcept
{
gain = newGain;
}
void AudioSourcePlayer::audioDeviceIOCallback (const float** inputChannelData,
int totalNumInputChannels,
float** outputChannelData,
int totalNumOutputChannels,
int numSamples)
{
// these should have been prepared by audioDeviceAboutToStart()...
jassert (sampleRate > 0 && bufferSize > 0);
const ScopedLock sl (readLock);
if (source != nullptr)
{
int numActiveChans = 0, numInputs = 0, numOutputs = 0;
// messy stuff needed to compact the channels down into an array
// of non-zero pointers..
for (int i = 0; i < totalNumInputChannels; ++i)
{
if (inputChannelData[i] != nullptr)
{
inputChans [numInputs++] = inputChannelData[i];
if (numInputs >= numElementsInArray (inputChans))
break;
}
}
for (int i = 0; i < totalNumOutputChannels; ++i)
{
if (outputChannelData[i] != nullptr)
{
outputChans [numOutputs++] = outputChannelData[i];
if (numOutputs >= numElementsInArray (outputChans))
break;
}
}
if (numInputs > numOutputs)
{
// if there aren't enough output channels for the number of
// inputs, we need to create some temporary extra ones (can't
// use the input data in case it gets written to)
tempBuffer.setSize (numInputs - numOutputs, numSamples,
false, false, true);
for (int i = 0; i < numOutputs; ++i)
{
channels[numActiveChans] = outputChans[i];
memcpy (channels[numActiveChans], inputChans[i], sizeof (float) * (size_t) numSamples);
++numActiveChans;
}
for (int i = numOutputs; i < numInputs; ++i)
{
channels[numActiveChans] = tempBuffer.getWritePointer (i - numOutputs);
memcpy (channels[numActiveChans], inputChans[i], sizeof (float) * (size_t) numSamples);
++numActiveChans;
}
}
else
{
for (int i = 0; i < numInputs; ++i)
{
channels[numActiveChans] = outputChans[i];
memcpy (channels[numActiveChans], inputChans[i], sizeof (float) * (size_t) numSamples);
++numActiveChans;
}
for (int i = numInputs; i < numOutputs; ++i)
{
channels[numActiveChans] = outputChans[i];
zeromem (channels[numActiveChans], sizeof (float) * (size_t) numSamples);
++numActiveChans;
}
}
AudioSampleBuffer buffer (channels, numActiveChans, numSamples);
AudioSourceChannelInfo info (&buffer, 0, numSamples);
source->getNextAudioBlock (info);
for (int i = info.buffer->getNumChannels(); --i >= 0;)
buffer.applyGainRamp (i, info.startSample, info.numSamples, lastGain, gain);
lastGain = gain;
}
else
{
for (int i = 0; i < totalNumOutputChannels; ++i)
if (outputChannelData[i] != nullptr)
zeromem (outputChannelData[i], sizeof (float) * (size_t) numSamples);
}
}
void AudioSourcePlayer::audioDeviceAboutToStart (AudioIODevice* device)
{
prepareToPlay (device->getCurrentSampleRate(),
device->getCurrentBufferSizeSamples());
}
void AudioSourcePlayer::prepareToPlay (double newSampleRate, int newBufferSize)
{
sampleRate = newSampleRate;
bufferSize = newBufferSize;
zeromem (channels, sizeof (channels));
if (source != nullptr)
source->prepareToPlay (bufferSize, sampleRate);
}
void AudioSourcePlayer::audioDeviceStopped()
{
if (source != nullptr)
source->releaseResources();
sampleRate = 0.0;
bufferSize = 0;
tempBuffer.setSize (2, 8);
}

View file

@ -0,0 +1,115 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIOSOURCEPLAYER_H_INCLUDED
#define JUCE_AUDIOSOURCEPLAYER_H_INCLUDED
//==============================================================================
/**
Wrapper class to continuously stream audio from an audio source to an
AudioIODevice.
This object acts as an AudioIODeviceCallback, so can be attached to an
output device, and will stream audio from an AudioSource.
*/
class JUCE_API AudioSourcePlayer : public AudioIODeviceCallback
{
public:
//==============================================================================
/** Creates an empty AudioSourcePlayer. */
AudioSourcePlayer();
/** Destructor.
Make sure this object isn't still being used by an AudioIODevice before
deleting it!
*/
virtual ~AudioSourcePlayer();
//==============================================================================
/** Changes the current audio source to play from.
If the source passed in is already being used, this method will do nothing.
If the source is not null, its prepareToPlay() method will be called
before it starts being used for playback.
If there's another source currently playing, its releaseResources() method
will be called after it has been swapped for the new one.
@param newSource the new source to use - this will NOT be deleted
by this object when no longer needed, so it's the
caller's responsibility to manage it.
*/
void setSource (AudioSource* newSource);
/** Returns the source that's playing.
May return nullptr if there's no source.
*/
AudioSource* getCurrentSource() const noexcept { return source; }
/** Sets a gain to apply to the audio data.
@see getGain
*/
void setGain (float newGain) noexcept;
/** Returns the current gain.
@see setGain
*/
float getGain() const noexcept { return gain; }
//==============================================================================
/** Implementation of the AudioIODeviceCallback method. */
void audioDeviceIOCallback (const float** inputChannelData,
int totalNumInputChannels,
float** outputChannelData,
int totalNumOutputChannels,
int numSamples) override;
/** Implementation of the AudioIODeviceCallback method. */
void audioDeviceAboutToStart (AudioIODevice* device) override;
/** Implementation of the AudioIODeviceCallback method. */
void audioDeviceStopped() override;
/** An alternative method for initialising the source without an AudioIODevice. */
void prepareToPlay (double sampleRate, int blockSize);
private:
//==============================================================================
CriticalSection readLock;
AudioSource* source;
double sampleRate;
int bufferSize;
float* channels [128];
float* outputChans [128];
const float* inputChans [128];
AudioSampleBuffer tempBuffer;
float lastGain, gain;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioSourcePlayer)
};
#endif // JUCE_AUDIOSOURCEPLAYER_H_INCLUDED

View file

@ -0,0 +1,298 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
AudioTransportSource::AudioTransportSource()
: source (nullptr),
resamplerSource (nullptr),
bufferingSource (nullptr),
positionableSource (nullptr),
masterSource (nullptr),
gain (1.0f),
lastGain (1.0f),
playing (false),
stopped (true),
sampleRate (44100.0),
sourceSampleRate (0.0),
blockSize (128),
readAheadBufferSize (0),
isPrepared (false),
inputStreamEOF (false)
{
}
AudioTransportSource::~AudioTransportSource()
{
setSource (nullptr);
releaseMasterResources();
}
void AudioTransportSource::setSource (PositionableAudioSource* const newSource,
int readAheadSize, TimeSliceThread* readAheadThread,
double sourceSampleRateToCorrectFor, int maxNumChannels)
{
if (source == newSource)
{
if (source == nullptr)
return;
setSource (nullptr, 0, nullptr); // deselect and reselect to avoid releasing resources wrongly
}
readAheadBufferSize = readAheadSize;
sourceSampleRate = sourceSampleRateToCorrectFor;
ResamplingAudioSource* newResamplerSource = nullptr;
BufferingAudioSource* newBufferingSource = nullptr;
PositionableAudioSource* newPositionableSource = nullptr;
AudioSource* newMasterSource = nullptr;
ScopedPointer<ResamplingAudioSource> oldResamplerSource (resamplerSource);
ScopedPointer<BufferingAudioSource> oldBufferingSource (bufferingSource);
AudioSource* oldMasterSource = masterSource;
if (newSource != nullptr)
{
newPositionableSource = newSource;
if (readAheadSize > 0)
{
// If you want to use a read-ahead buffer, you must also provide a TimeSliceThread
// for it to use!
jassert (readAheadThread != nullptr);
newPositionableSource = newBufferingSource
= new BufferingAudioSource (newPositionableSource, *readAheadThread,
false, readAheadSize, maxNumChannels);
}
newPositionableSource->setNextReadPosition (0);
if (sourceSampleRateToCorrectFor > 0)
newMasterSource = newResamplerSource
= new ResamplingAudioSource (newPositionableSource, false, maxNumChannels);
else
newMasterSource = newPositionableSource;
if (isPrepared)
{
if (newResamplerSource != nullptr && sourceSampleRate > 0 && sampleRate > 0)
newResamplerSource->setResamplingRatio (sourceSampleRate / sampleRate);
newMasterSource->prepareToPlay (blockSize, sampleRate);
}
}
{
const ScopedLock sl (callbackLock);
source = newSource;
resamplerSource = newResamplerSource;
bufferingSource = newBufferingSource;
masterSource = newMasterSource;
positionableSource = newPositionableSource;
inputStreamEOF = false;
playing = false;
}
if (oldMasterSource != nullptr)
oldMasterSource->releaseResources();
}
void AudioTransportSource::start()
{
if ((! playing) && masterSource != nullptr)
{
{
const ScopedLock sl (callbackLock);
playing = true;
stopped = false;
inputStreamEOF = false;
}
sendChangeMessage();
}
}
void AudioTransportSource::stop()
{
if (playing)
{
{
const ScopedLock sl (callbackLock);
playing = false;
}
int n = 500;
while (--n >= 0 && ! stopped)
Thread::sleep (2);
sendChangeMessage();
}
}
void AudioTransportSource::setPosition (double newPosition)
{
if (sampleRate > 0.0)
setNextReadPosition ((int64) (newPosition * sampleRate));
}
double AudioTransportSource::getCurrentPosition() const
{
if (sampleRate > 0.0)
return getNextReadPosition() / sampleRate;
return 0.0;
}
double AudioTransportSource::getLengthInSeconds() const
{
if (sampleRate > 0.0)
return getTotalLength() / sampleRate;
return 0.0;
}
void AudioTransportSource::setNextReadPosition (int64 newPosition)
{
if (positionableSource != nullptr)
{
if (sampleRate > 0 && sourceSampleRate > 0)
newPosition = (int64) (newPosition * sourceSampleRate / sampleRate);
positionableSource->setNextReadPosition (newPosition);
if (resamplerSource != nullptr)
resamplerSource->flushBuffers();
inputStreamEOF = false;
}
}
int64 AudioTransportSource::getNextReadPosition() const
{
if (positionableSource != nullptr)
{
const double ratio = (sampleRate > 0 && sourceSampleRate > 0) ? sampleRate / sourceSampleRate : 1.0;
return (int64) (positionableSource->getNextReadPosition() * ratio);
}
return 0;
}
int64 AudioTransportSource::getTotalLength() const
{
const ScopedLock sl (callbackLock);
if (positionableSource != nullptr)
{
const double ratio = (sampleRate > 0 && sourceSampleRate > 0) ? sampleRate / sourceSampleRate : 1.0;
return (int64) (positionableSource->getTotalLength() * ratio);
}
return 0;
}
bool AudioTransportSource::isLooping() const
{
const ScopedLock sl (callbackLock);
return positionableSource != nullptr && positionableSource->isLooping();
}
void AudioTransportSource::setGain (const float newGain) noexcept
{
gain = newGain;
}
void AudioTransportSource::prepareToPlay (int samplesPerBlockExpected, double newSampleRate)
{
const ScopedLock sl (callbackLock);
sampleRate = newSampleRate;
blockSize = samplesPerBlockExpected;
if (masterSource != nullptr)
masterSource->prepareToPlay (samplesPerBlockExpected, sampleRate);
if (resamplerSource != nullptr && sourceSampleRate > 0)
resamplerSource->setResamplingRatio (sourceSampleRate / sampleRate);
inputStreamEOF = false;
isPrepared = true;
}
void AudioTransportSource::releaseMasterResources()
{
const ScopedLock sl (callbackLock);
if (masterSource != nullptr)
masterSource->releaseResources();
isPrepared = false;
}
void AudioTransportSource::releaseResources()
{
releaseMasterResources();
}
void AudioTransportSource::getNextAudioBlock (const AudioSourceChannelInfo& info)
{
const ScopedLock sl (callbackLock);
if (masterSource != nullptr && ! stopped)
{
masterSource->getNextAudioBlock (info);
if (! playing)
{
// just stopped playing, so fade out the last block..
for (int i = info.buffer->getNumChannels(); --i >= 0;)
info.buffer->applyGainRamp (i, info.startSample, jmin (256, info.numSamples), 1.0f, 0.0f);
if (info.numSamples > 256)
info.buffer->clear (info.startSample + 256, info.numSamples - 256);
}
if (positionableSource->getNextReadPosition() > positionableSource->getTotalLength() + 1
&& ! positionableSource->isLooping())
{
playing = false;
inputStreamEOF = true;
sendChangeMessage();
}
stopped = ! playing;
for (int i = info.buffer->getNumChannels(); --i >= 0;)
info.buffer->applyGainRamp (i, info.startSample, info.numSamples, lastGain, gain);
}
else
{
info.clearActiveBufferRegion();
stopped = true;
}
lastGain = gain;
}

View file

@ -0,0 +1,180 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_AUDIOTRANSPORTSOURCE_H_INCLUDED
#define JUCE_AUDIOTRANSPORTSOURCE_H_INCLUDED
//==============================================================================
/**
An AudioSource that takes a PositionableAudioSource and allows it to be
played, stopped, started, etc.
This can also be told use a buffer and background thread to read ahead, and
if can correct for different sample-rates.
You may want to use one of these along with an AudioSourcePlayer and AudioIODevice
to control playback of an audio file.
@see AudioSource, AudioSourcePlayer
*/
class JUCE_API AudioTransportSource : public PositionableAudioSource,
public ChangeBroadcaster
{
public:
//==============================================================================
/** Creates an AudioTransportSource.
After creating one of these, use the setSource() method to select an input source.
*/
AudioTransportSource();
/** Destructor. */
~AudioTransportSource();
//==============================================================================
/** Sets the reader that is being used as the input source.
This will stop playback, reset the position to 0 and change to the new reader.
The source passed in will not be deleted by this object, so must be managed by
the caller.
@param newSource the new input source to use. This may be zero
@param readAheadBufferSize a size of buffer to use for reading ahead. If this
is zero, no reading ahead will be done; if it's
greater than zero, a BufferingAudioSource will be used
to do the reading-ahead. If you set a non-zero value here,
you'll also need to set the readAheadThread parameter.
@param readAheadThread if you set readAheadBufferSize to a non-zero value, then
you'll also need to supply this TimeSliceThread object for
the background reader to use. The thread object must not be
deleted while the AudioTransport source is still using it.
@param sourceSampleRateToCorrectFor if this is non-zero, it specifies the sample
rate of the source, and playback will be sample-rate
adjusted to maintain playback at the correct pitch. If
this is 0, no sample-rate adjustment will be performed
@param maxNumChannels the maximum number of channels that may need to be played
*/
void setSource (PositionableAudioSource* newSource,
int readAheadBufferSize = 0,
TimeSliceThread* readAheadThread = nullptr,
double sourceSampleRateToCorrectFor = 0.0,
int maxNumChannels = 2);
//==============================================================================
/** Changes the current playback position in the source stream.
The next time the getNextAudioBlock() method is called, this
is the time from which it'll read data.
@see getPosition
*/
void setPosition (double newPosition);
/** Returns the position that the next data block will be read from
This is a time in seconds.
*/
double getCurrentPosition() const;
/** Returns the stream's length in seconds. */
double getLengthInSeconds() const;
/** Returns true if the player has stopped because its input stream ran out of data. */
bool hasStreamFinished() const noexcept { return inputStreamEOF; }
//==============================================================================
/** Starts playing (if a source has been selected).
If it starts playing, this will send a message to any ChangeListeners
that are registered with this object.
*/
void start();
/** Stops playing.
If it's actually playing, this will send a message to any ChangeListeners
that are registered with this object.
*/
void stop();
/** Returns true if it's currently playing. */
bool isPlaying() const noexcept { return playing; }
//==============================================================================
/** Changes the gain to apply to the output.
@param newGain a factor by which to multiply the outgoing samples,
so 1.0 = 0dB, 0.5 = -6dB, 2.0 = 6dB, etc.
*/
void setGain (float newGain) noexcept;
/** Returns the current gain setting.
@see setGain
*/
float getGain() const noexcept { return gain; }
//==============================================================================
/** Implementation of the AudioSource method. */
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
/** Implementation of the AudioSource method. */
void releaseResources() override;
/** Implementation of the AudioSource method. */
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
//==============================================================================
/** Implements the PositionableAudioSource method. */
void setNextReadPosition (int64 newPosition) override;
/** Implements the PositionableAudioSource method. */
int64 getNextReadPosition() const override;
/** Implements the PositionableAudioSource method. */
int64 getTotalLength() const override;
/** Implements the PositionableAudioSource method. */
bool isLooping() const override;
private:
//==============================================================================
PositionableAudioSource* source;
ResamplingAudioSource* resamplerSource;
BufferingAudioSource* bufferingSource;
PositionableAudioSource* positionableSource;
AudioSource* masterSource;
CriticalSection callbackLock;
float volatile gain, lastGain;
bool volatile playing, stopped;
double sampleRate, sourceSampleRate;
int blockSize, readAheadBufferSize;
bool volatile isPrepared, inputStreamEOF;
void releaseMasterResources();
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioTransportSource)
};
#endif // JUCE_AUDIOTRANSPORTSOURCE_H_INCLUDED