mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-02-01 03:10:06 +00:00
iOSAudioDevice: Refactored and added better support for buffer size changes
This commit is contained in:
parent
01e18b1c23
commit
8482a9de64
2 changed files with 240 additions and 208 deletions
|
|
@ -211,20 +211,22 @@ static void logNSError (NSError* e)
|
|||
#endif
|
||||
|
||||
//==============================================================================
|
||||
class iOSAudioIODevice::Pimpl : public AudioPlayHead,
|
||||
private AsyncUpdater
|
||||
struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
|
||||
public AsyncUpdater
|
||||
{
|
||||
public:
|
||||
Pimpl (iOSAudioIODevice& ioDevice)
|
||||
: owner (ioDevice)
|
||||
{
|
||||
sessionHolder->activeDevices.add (&owner);
|
||||
|
||||
updateSampleRateAndAudioInput();
|
||||
}
|
||||
|
||||
~Pimpl()
|
||||
{
|
||||
sessionHolder->activeDevices.removeFirstMatchingValue (&owner);
|
||||
owner.close();
|
||||
|
||||
close();
|
||||
}
|
||||
|
||||
static void setAudioSessionActive (bool enabled)
|
||||
|
|
@ -253,7 +255,7 @@ public:
|
|||
|
||||
AudioUnitRemovePropertyListenerWithUserData (audioUnit,
|
||||
kAudioUnitProperty_StreamFormat,
|
||||
handleStreamFormatChangeCallback,
|
||||
dispatchAudioUnitPropertyChange,
|
||||
this);
|
||||
|
||||
const double lowestRate = trySampleRate (4000);
|
||||
|
|
@ -269,12 +271,12 @@ public:
|
|||
rate = jmax (rate, supportedRate);
|
||||
}
|
||||
|
||||
trySampleRate (owner.getCurrentSampleRate());
|
||||
trySampleRate (sampleRate);
|
||||
updateCurrentBufferSize();
|
||||
|
||||
AudioUnitAddPropertyListener (audioUnit,
|
||||
kAudioUnitProperty_StreamFormat,
|
||||
handleStreamFormatChangeCallback,
|
||||
dispatchAudioUnitPropertyChange,
|
||||
this);
|
||||
|
||||
return rates;
|
||||
|
|
@ -290,33 +292,45 @@ public:
|
|||
return r;
|
||||
}
|
||||
|
||||
void updateSampleRateAndAudioInput()
|
||||
{
|
||||
auto session = [AVAudioSession sharedInstance];
|
||||
sampleRate = session.sampleRate;
|
||||
audioInputIsAvailable = session.isInputAvailable;
|
||||
actualBufferSize = roundToInt (sampleRate * session.IOBufferDuration);
|
||||
|
||||
JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate
|
||||
<< " Hz, audioInputAvailable: " << (int) audioInputIsAvailable
|
||||
<< ", buffer size: " << actualBufferSize);
|
||||
}
|
||||
|
||||
String open (const BigInteger& inputChannelsWanted,
|
||||
const BigInteger& outputChannelsWanted,
|
||||
double targetSampleRate, int bufferSize)
|
||||
{
|
||||
close();
|
||||
|
||||
owner.lastError.clear();
|
||||
owner.preferredBufferSize = bufferSize <= 0 ? owner.getDefaultBufferSize() : bufferSize;
|
||||
lastError.clear();
|
||||
preferredBufferSize = bufferSize <= 0 ? defaultBufferSize : bufferSize;
|
||||
|
||||
// xxx set up channel mapping
|
||||
|
||||
owner.activeOutputChans = outputChannelsWanted;
|
||||
owner.activeOutputChans.setRange (2, owner.activeOutputChans.getHighestBit(), false);
|
||||
owner.numOutputChannels = owner.activeOutputChans.countNumberOfSetBits();
|
||||
monoOutputChannelNumber = owner.activeOutputChans.findNextSetBit (0);
|
||||
activeOutputChans = outputChannelsWanted;
|
||||
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
|
||||
numOutputChannels = activeOutputChans.countNumberOfSetBits();
|
||||
monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
|
||||
|
||||
owner.activeInputChans = inputChannelsWanted;
|
||||
owner.activeInputChans.setRange (2, owner.activeInputChans.getHighestBit(), false);
|
||||
owner.numInputChannels = owner.activeInputChans.countNumberOfSetBits();
|
||||
monoInputChannelNumber = owner.activeInputChans.findNextSetBit (0);
|
||||
activeInputChans = inputChannelsWanted;
|
||||
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
|
||||
numInputChannels = activeInputChans.countNumberOfSetBits();
|
||||
monoInputChannelNumber = activeInputChans.findNextSetBit (0);
|
||||
|
||||
setAudioSessionActive (true);
|
||||
|
||||
// Set the session category & options:
|
||||
auto session = [AVAudioSession sharedInstance];
|
||||
|
||||
const bool useInputs = (owner.numInputChannels > 0 && owner.audioInputIsAvailable);
|
||||
const bool useInputs = (numInputChannels > 0 && audioInputIsAvailable);
|
||||
|
||||
NSString* category = (useInputs ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback);
|
||||
|
||||
|
|
@ -332,26 +346,26 @@ public:
|
|||
|
||||
// Set the sample rate
|
||||
trySampleRate (targetSampleRate);
|
||||
owner.updateSampleRateAndAudioInput();
|
||||
updateSampleRateAndAudioInput();
|
||||
updateCurrentBufferSize();
|
||||
|
||||
prepareFloatBuffers (owner.actualBufferSize);
|
||||
prepareFloatBuffers (actualBufferSize);
|
||||
|
||||
owner.isRunning = true;
|
||||
isRunning = true;
|
||||
handleRouteChange ("Started AudioUnit");
|
||||
|
||||
owner.lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
|
||||
lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
|
||||
|
||||
setAudioSessionActive (true);
|
||||
|
||||
return owner.lastError;
|
||||
return lastError;
|
||||
}
|
||||
|
||||
void close()
|
||||
{
|
||||
if (owner.isRunning)
|
||||
if (isRunning)
|
||||
{
|
||||
owner.isRunning = false;
|
||||
isRunning = false;
|
||||
|
||||
if (audioUnit != 0)
|
||||
{
|
||||
|
|
@ -366,26 +380,26 @@ public:
|
|||
|
||||
void start (AudioIODeviceCallback* newCallback)
|
||||
{
|
||||
if (owner.isRunning && owner.callback != newCallback)
|
||||
if (isRunning && callback != newCallback)
|
||||
{
|
||||
if (newCallback != nullptr)
|
||||
newCallback->audioDeviceAboutToStart (&owner);
|
||||
|
||||
const ScopedLock sl (callbackLock);
|
||||
owner.callback = newCallback;
|
||||
callback = newCallback;
|
||||
}
|
||||
}
|
||||
|
||||
void stop()
|
||||
{
|
||||
if (owner.isRunning)
|
||||
if (isRunning)
|
||||
{
|
||||
AudioIODeviceCallback* lastCallback;
|
||||
|
||||
{
|
||||
const ScopedLock sl (callbackLock);
|
||||
lastCallback = owner.callback;
|
||||
owner.callback = nullptr;
|
||||
lastCallback = callback;
|
||||
callback = nullptr;
|
||||
}
|
||||
|
||||
if (lastCallback != nullptr)
|
||||
|
|
@ -407,7 +421,7 @@ public:
|
|||
}
|
||||
|
||||
//==============================================================================
|
||||
bool canControlTransport() override { return owner.interAppAudioConnected; }
|
||||
bool canControlTransport() override { return interAppAudioConnected; }
|
||||
|
||||
void transportPlay (bool shouldSartPlaying) override
|
||||
{
|
||||
|
|
@ -503,7 +517,7 @@ public:
|
|||
result.ppqLoopStart = hostCycleStartBeat;
|
||||
result.ppqLoopEnd = hostCycleEndBeat;
|
||||
|
||||
result.timeInSeconds = result.timeInSamples / owner.sampleRate;
|
||||
result.timeInSeconds = result.timeInSamples / sampleRate;
|
||||
|
||||
Float64 hostBeat = 0;
|
||||
Float64 hostTempo = 0;
|
||||
|
|
@ -538,7 +552,7 @@ public:
|
|||
#if JUCE_MODULE_AVAILABLE_juce_graphics
|
||||
Image getIcon (int size)
|
||||
{
|
||||
if (owner.interAppAudioConnected)
|
||||
if (interAppAudioConnected)
|
||||
{
|
||||
UIImage* hostUIImage = AudioOutputUnitGetHostIcon (audioUnit, size);
|
||||
if (hostUIImage != nullptr)
|
||||
|
|
@ -550,7 +564,7 @@ public:
|
|||
|
||||
void switchApplication()
|
||||
{
|
||||
if (! owner.interAppAudioConnected)
|
||||
if (! interAppAudioConnected)
|
||||
return;
|
||||
|
||||
CFURLRef hostUrl;
|
||||
|
|
@ -570,8 +584,8 @@ public:
|
|||
{
|
||||
const ScopedLock sl (callbackLock);
|
||||
|
||||
if (owner.callback != nullptr)
|
||||
owner.callback->audioDeviceError (reason);
|
||||
if (callback != nullptr)
|
||||
callback->audioDeviceError (reason);
|
||||
}
|
||||
|
||||
void handleStatusChange (bool enabled, const char* reason)
|
||||
|
|
@ -580,7 +594,7 @@ public:
|
|||
|
||||
JUCE_IOS_AUDIO_LOG ("handleStatusChange: enabled: " << (int) enabled << ", reason: " << reason);
|
||||
|
||||
owner.isRunning = enabled;
|
||||
isRunning = enabled;
|
||||
setAudioSessionActive (enabled);
|
||||
|
||||
if (enabled)
|
||||
|
|
@ -600,41 +614,38 @@ public:
|
|||
|
||||
fixAudioRouteIfSetToReceiver();
|
||||
|
||||
if (owner.isRunning)
|
||||
{
|
||||
if (isRunning)
|
||||
invokeAudioDeviceErrorCallback (reason);
|
||||
owner.updateSampleRateAndAudioInput();
|
||||
updateCurrentBufferSize();
|
||||
createAudioUnit();
|
||||
|
||||
setAudioSessionActive (true);
|
||||
|
||||
if (audioUnit != 0)
|
||||
{
|
||||
UInt32 formatSize = sizeof (format);
|
||||
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
|
||||
AudioOutputUnitStart (audioUnit);
|
||||
}
|
||||
|
||||
if (owner.callback != nullptr)
|
||||
{
|
||||
owner.callback->audioDeviceStopped();
|
||||
owner.callback->audioDeviceAboutToStart (&owner);
|
||||
}
|
||||
}
|
||||
restart();
|
||||
}
|
||||
|
||||
void handleAudioUnitPropertyChange (AudioUnit,
|
||||
AudioUnitPropertyID propertyID,
|
||||
AudioUnitScope,
|
||||
AudioUnitElement)
|
||||
AudioUnitScope scope,
|
||||
AudioUnitElement element)
|
||||
{
|
||||
const ScopedLock myScopedLock (callbackLock);
|
||||
JUCE_IOS_AUDIO_LOG ("handleAudioUnitPropertyChange: propertyID: " << String (propertyID)
|
||||
<< " scope: " << String (scope)
|
||||
<< " element: " << String (element));
|
||||
|
||||
switch (propertyID)
|
||||
{
|
||||
case kAudioUnitProperty_IsInterAppConnected: return handleInterAppAudioConnectionChange();
|
||||
default: return;
|
||||
case kAudioUnitProperty_IsInterAppConnected:
|
||||
handleInterAppAudioConnectionChange();
|
||||
return;
|
||||
case kAudioUnitProperty_StreamFormat:
|
||||
if (scope == kAudioUnitScope_Output && element == 0)
|
||||
handleStreamFormatChange();
|
||||
|
||||
return;
|
||||
case kAudioUnitProperty_MaximumFramesPerSlice:
|
||||
JUCE_IOS_AUDIO_LOG ("buffer size change");
|
||||
triggerAsyncUpdate();
|
||||
return;
|
||||
default:
|
||||
jassertfalse;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -650,22 +661,22 @@ public:
|
|||
JUCE_IOS_AUDIO_LOG ("handleInterAppAudioConnectionChange: " << (connected ? "connected"
|
||||
: "disconnected"));
|
||||
|
||||
if (connected != owner.interAppAudioConnected)
|
||||
if (connected != interAppAudioConnected)
|
||||
{
|
||||
const ScopedLock myScopedLock (callbackLock);
|
||||
|
||||
owner.interAppAudioConnected = connected;
|
||||
interAppAudioConnected = connected;
|
||||
|
||||
UIApplicationState appstate = [UIApplication sharedApplication].applicationState;
|
||||
bool inForeground = (appstate != UIApplicationStateBackground);
|
||||
|
||||
if (owner.interAppAudioConnected || inForeground)
|
||||
if (interAppAudioConnected || inForeground)
|
||||
{
|
||||
setAudioSessionActive (true);
|
||||
AudioOutputUnitStart (audioUnit);
|
||||
|
||||
if (owner.callback != nullptr)
|
||||
owner.callback->audioDeviceAboutToStart (&owner);
|
||||
if (callback != nullptr)
|
||||
callback->audioDeviceAboutToStart (&owner);
|
||||
}
|
||||
else if (! inForeground)
|
||||
{
|
||||
|
|
@ -675,33 +686,20 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
private:
|
||||
//==============================================================================
|
||||
iOSAudioIODevice& owner;
|
||||
SharedResourcePointer<AudioSessionHolder> sessionHolder;
|
||||
CriticalSection callbackLock;
|
||||
|
||||
AudioStreamBasicDescription format;
|
||||
AudioUnit audioUnit {};
|
||||
|
||||
AudioSampleBuffer floatData;
|
||||
float* inputChannels[3];
|
||||
float* outputChannels[3];
|
||||
bool monoInputChannelNumber, monoOutputChannelNumber;
|
||||
|
||||
void prepareFloatBuffers (int bufferSize)
|
||||
{
|
||||
if (owner.numInputChannels + owner.numOutputChannels > 0)
|
||||
if (numInputChannels + numOutputChannels > 0)
|
||||
{
|
||||
floatData.setSize (owner.numInputChannels + owner.numOutputChannels, bufferSize);
|
||||
floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
|
||||
zeromem (inputChannels, sizeof (inputChannels));
|
||||
zeromem (outputChannels, sizeof (outputChannels));
|
||||
|
||||
for (int i = 0; i < owner.numInputChannels; ++i)
|
||||
for (int i = 0; i < numInputChannels; ++i)
|
||||
inputChannels[i] = floatData.getWritePointer (i);
|
||||
|
||||
for (int i = 0; i < owner.numOutputChannels; ++i)
|
||||
outputChannels[i] = floatData.getWritePointer (i + owner.numInputChannels);
|
||||
for (int i = 0; i < numOutputChannels; ++i)
|
||||
outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -711,21 +709,21 @@ private:
|
|||
{
|
||||
OSStatus err = noErr;
|
||||
|
||||
if (owner.audioInputIsAvailable && owner.numInputChannels > 0)
|
||||
if (audioInputIsAvailable && numInputChannels > 0)
|
||||
err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
|
||||
|
||||
const ScopedTryLock stl (callbackLock);
|
||||
|
||||
if (stl.isLocked() && owner.callback != nullptr)
|
||||
if (stl.isLocked() && callback != nullptr)
|
||||
{
|
||||
if ((int) numFrames > floatData.getNumSamples())
|
||||
prepareFloatBuffers ((int) numFrames);
|
||||
|
||||
if (owner.audioInputIsAvailable && owner.numInputChannels > 0)
|
||||
if (audioInputIsAvailable && numInputChannels > 0)
|
||||
{
|
||||
short* shortData = (short*) data->mBuffers[0].mData;
|
||||
|
||||
if (owner.numInputChannels >= 2)
|
||||
if (numInputChannels >= 2)
|
||||
{
|
||||
for (UInt32 i = 0; i < numFrames; ++i)
|
||||
{
|
||||
|
|
@ -747,17 +745,17 @@ private:
|
|||
}
|
||||
else
|
||||
{
|
||||
for (int i = owner.numInputChannels; --i >= 0;)
|
||||
for (int i = numInputChannels; --i >= 0;)
|
||||
zeromem (inputChannels[i], sizeof (float) * numFrames);
|
||||
}
|
||||
|
||||
owner.callback->audioDeviceIOCallback ((const float**) inputChannels, owner.numInputChannels,
|
||||
outputChannels, owner.numOutputChannels, (int) numFrames);
|
||||
callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
|
||||
outputChannels, numOutputChannels, (int) numFrames);
|
||||
|
||||
short* const shortData = (short*) data->mBuffers[0].mData;
|
||||
int n = 0;
|
||||
|
||||
if (owner.numOutputChannels >= 2)
|
||||
if (numOutputChannels >= 2)
|
||||
{
|
||||
for (UInt32 i = 0; i < numFrames; ++i)
|
||||
{
|
||||
|
|
@ -765,7 +763,7 @@ private:
|
|||
shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
|
||||
}
|
||||
}
|
||||
else if (owner.numOutputChannels == 1)
|
||||
else if (numOutputChannels == 1)
|
||||
{
|
||||
for (UInt32 i = 0; i < numFrames; ++i)
|
||||
{
|
||||
|
|
@ -789,11 +787,11 @@ private:
|
|||
|
||||
void updateCurrentBufferSize()
|
||||
{
|
||||
NSTimeInterval bufferDuration = owner.sampleRate > 0 ? (NSTimeInterval) ((owner.preferredBufferSize + 1) / owner.sampleRate) : 0.0;
|
||||
NSTimeInterval bufferDuration = sampleRate > 0 ? (NSTimeInterval) ((preferredBufferSize + 1) / sampleRate) : 0.0;
|
||||
|
||||
JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setPreferredIOBufferDuration: bufferDuration
|
||||
error: &error]);
|
||||
owner.updateSampleRateAndAudioInput();
|
||||
updateSampleRateAndAudioInput();
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
|
|
@ -859,12 +857,12 @@ private:
|
|||
|
||||
err = AudioUnitAddPropertyListener (audioUnit,
|
||||
kAudioUnitProperty_IsInterAppConnected,
|
||||
audioUnitPropertyChangeDispatcher,
|
||||
dispatchAudioUnitPropertyChange,
|
||||
this);
|
||||
jassert (err == noErr);
|
||||
#endif
|
||||
|
||||
if (owner.numInputChannels > 0)
|
||||
if (numInputChannels > 0)
|
||||
{
|
||||
const UInt32 one = 1;
|
||||
AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
|
||||
|
|
@ -898,12 +896,13 @@ private:
|
|||
|
||||
if (AudioUnitGetProperty (audioUnit, kAudioUnitProperty_MaximumFramesPerSlice,
|
||||
kAudioUnitScope_Global, 0, &framesPerSlice, &dataSize) == noErr
|
||||
&& dataSize == sizeof (framesPerSlice) && static_cast<int> (framesPerSlice) != owner.actualBufferSize)
|
||||
&& dataSize == sizeof (framesPerSlice) && static_cast<int> (framesPerSlice) != actualBufferSize)
|
||||
{
|
||||
prepareFloatBuffers (static_cast<int> (framesPerSlice));
|
||||
}
|
||||
|
||||
AudioUnitAddPropertyListener (audioUnit, kAudioUnitProperty_StreamFormat, handleStreamFormatChangeCallback, this);
|
||||
AudioUnitAddPropertyListener (audioUnit, kAudioUnitProperty_StreamFormat, dispatchAudioUnitPropertyChange, this);
|
||||
AudioUnitAddPropertyListener (audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, dispatchAudioUnitPropertyChange, this);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
@ -956,9 +955,34 @@ private:
|
|||
}
|
||||
}
|
||||
|
||||
void restart()
|
||||
{
|
||||
if (isRunning)
|
||||
{
|
||||
updateSampleRateAndAudioInput();
|
||||
updateCurrentBufferSize();
|
||||
createAudioUnit();
|
||||
|
||||
setAudioSessionActive (true);
|
||||
|
||||
if (audioUnit != 0)
|
||||
{
|
||||
UInt32 formatSize = sizeof (format);
|
||||
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
|
||||
AudioOutputUnitStart (audioUnit);
|
||||
}
|
||||
|
||||
if (callback != nullptr)
|
||||
{
|
||||
callback->audioDeviceStopped();
|
||||
callback->audioDeviceAboutToStart (&owner);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void handleAsyncUpdate() override
|
||||
{
|
||||
owner.handleRouteChange ("Stream format change");
|
||||
restart();
|
||||
}
|
||||
|
||||
void handleStreamFormatChange()
|
||||
|
|
@ -973,31 +997,23 @@ private:
|
|||
&desc,
|
||||
&dataSize);
|
||||
|
||||
if (desc.mSampleRate != owner.getCurrentSampleRate())
|
||||
if (desc.mSampleRate != sampleRate)
|
||||
{
|
||||
JUCE_IOS_AUDIO_LOG ("handleStreamFormatChange: sample rate " << desc.mSampleRate);
|
||||
triggerAsyncUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
static void handleStreamFormatChangeCallback (void* device,
|
||||
AudioUnit,
|
||||
AudioUnitPropertyID,
|
||||
AudioUnitScope scope,
|
||||
AudioUnitElement element)
|
||||
static void dispatchAudioUnitPropertyChange (void* data, AudioUnit unit, AudioUnitPropertyID propertyID,
|
||||
AudioUnitScope scope, AudioUnitElement element)
|
||||
{
|
||||
if (scope == kAudioUnitScope_Output && element == 0)
|
||||
static_cast<Pimpl*> (device)->handleStreamFormatChange();
|
||||
}
|
||||
|
||||
static void audioUnitPropertyChangeDispatcher (void* data, AudioUnit unit, AudioUnitPropertyID propertyID,
|
||||
AudioUnitScope scope, AudioUnitElement element)
|
||||
{
|
||||
Pimpl* device = (Pimpl*)data;
|
||||
device->handleAudioUnitPropertyChange (unit, propertyID, scope, element);
|
||||
static_cast<Pimpl*> (data)->handleAudioUnitPropertyChange (unit, propertyID, scope, element);
|
||||
}
|
||||
|
||||
void handleMidiMessage (MidiMessage msg)
|
||||
{
|
||||
if (owner.messageCollector != nullptr)
|
||||
owner.messageCollector->addMessageToQueue (msg);
|
||||
if (messageCollector != nullptr)
|
||||
messageCollector->addMessageToQueue (msg);
|
||||
}
|
||||
|
||||
static void midiEventCallback (void *client, UInt32 status, UInt32 data1, UInt32 data2, UInt32)
|
||||
|
|
@ -1008,6 +1024,41 @@ private:
|
|||
Time::getMillisecondCounter() / 1000.0));
|
||||
}
|
||||
|
||||
bool isRunning = false;
|
||||
AudioIODeviceCallback* callback = nullptr;
|
||||
|
||||
String lastError;
|
||||
|
||||
bool audioInputIsAvailable = false;
|
||||
|
||||
const int defaultBufferSize =
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
512;
|
||||
#else
|
||||
256;
|
||||
#endif
|
||||
double sampleRate = 0;
|
||||
int numInputChannels = 2, numOutputChannels = 2;
|
||||
int preferredBufferSize = 0, actualBufferSize = 0;
|
||||
|
||||
bool interAppAudioConnected = false;
|
||||
|
||||
BigInteger activeOutputChans, activeInputChans;
|
||||
|
||||
MidiMessageCollector* messageCollector = nullptr;
|
||||
|
||||
iOSAudioIODevice& owner;
|
||||
SharedResourcePointer<AudioSessionHolder> sessionHolder;
|
||||
CriticalSection callbackLock;
|
||||
|
||||
AudioStreamBasicDescription format;
|
||||
AudioUnit audioUnit {};
|
||||
|
||||
AudioSampleBuffer floatData;
|
||||
float* inputChannels[3];
|
||||
float* outputChannels[3];
|
||||
bool monoInputChannelNumber, monoOutputChannelNumber;
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE (Pimpl)
|
||||
};
|
||||
|
||||
|
|
@ -1015,64 +1066,57 @@ private:
|
|||
//==============================================================================
|
||||
iOSAudioIODevice::iOSAudioIODevice (const String& deviceName)
|
||||
: AudioIODevice (deviceName, iOSAudioDeviceName),
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
defaultBufferSize (512),
|
||||
#else
|
||||
defaultBufferSize (256),
|
||||
#endif
|
||||
sampleRate (0), numInputChannels (2), numOutputChannels (2),
|
||||
preferredBufferSize (0), actualBufferSize (0), isRunning (false),
|
||||
audioInputIsAvailable (false), interAppAudioConnected (false),
|
||||
callback (nullptr), messageCollector (nullptr),
|
||||
pimpl (new Pimpl (*this))
|
||||
{
|
||||
updateSampleRateAndAudioInput();
|
||||
}
|
||||
{}
|
||||
|
||||
//==============================================================================
|
||||
int iOSAudioIODevice::getOutputLatencyInSamples() { return roundToInt (sampleRate * [AVAudioSession sharedInstance].outputLatency); }
|
||||
int iOSAudioIODevice::getInputLatencyInSamples() { return roundToInt (sampleRate * [AVAudioSession sharedInstance].inputLatency); }
|
||||
|
||||
//==============================================================================
|
||||
AudioPlayHead* iOSAudioIODevice::getAudioPlayHead() const { return pimpl; }
|
||||
void iOSAudioIODevice::close() { pimpl->close(); }
|
||||
void iOSAudioIODevice::start (AudioIODeviceCallback* callbackToUse) { pimpl->start (callbackToUse); }
|
||||
void iOSAudioIODevice::stop() { pimpl->stop(); }
|
||||
Array<double> iOSAudioIODevice::getAvailableSampleRates() { return pimpl->getAvailableSampleRates(); }
|
||||
Array<int> iOSAudioIODevice::getAvailableBufferSizes() { return pimpl->getAvailableBufferSizes(); }
|
||||
bool iOSAudioIODevice::setAudioPreprocessingEnabled (bool enabled) { return pimpl->setAudioPreprocessingEnabled (enabled); }
|
||||
void iOSAudioIODevice::switchApplication() { return pimpl->switchApplication(); }
|
||||
|
||||
//==============================================================================
|
||||
void iOSAudioIODevice::handleStatusChange (bool enabled, const char* reason) { pimpl->handleStatusChange (enabled, reason); }
|
||||
void iOSAudioIODevice::handleRouteChange (const char* reason) { pimpl->handleRouteChange (reason); }
|
||||
|
||||
#if JUCE_MODULE_AVAILABLE_juce_graphics
|
||||
Image iOSAudioIODevice::getIcon (int size) { return pimpl->getIcon (size); }
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
String iOSAudioIODevice::open (const BigInteger& inChans, const BigInteger& outChans, double requestedSampleRate, int requestedBufferSize)
|
||||
String iOSAudioIODevice::open (const BigInteger& inChans, const BigInteger& outChans,
|
||||
double requestedSampleRate, int requestedBufferSize)
|
||||
{
|
||||
return pimpl->open (inChans, outChans, requestedSampleRate, requestedBufferSize);
|
||||
}
|
||||
void iOSAudioIODevice::close() { pimpl->close(); }
|
||||
|
||||
void iOSAudioIODevice::updateSampleRateAndAudioInput()
|
||||
{
|
||||
auto session = [AVAudioSession sharedInstance];
|
||||
sampleRate = session.sampleRate;
|
||||
audioInputIsAvailable = session.isInputAvailable;
|
||||
actualBufferSize = roundToInt (sampleRate * session.IOBufferDuration);
|
||||
void iOSAudioIODevice::start (AudioIODeviceCallback* callbackToUse) { pimpl->start (callbackToUse); }
|
||||
void iOSAudioIODevice::stop() { pimpl->stop(); }
|
||||
|
||||
JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate
|
||||
<< " Hz, audioInputAvailable: " << (int) audioInputIsAvailable
|
||||
<< ", buffer size: " << actualBufferSize);
|
||||
}
|
||||
Array<double> iOSAudioIODevice::getAvailableSampleRates() { return pimpl->getAvailableSampleRates(); }
|
||||
Array<int> iOSAudioIODevice::getAvailableBufferSizes() { return pimpl->getAvailableBufferSizes(); }
|
||||
|
||||
bool iOSAudioIODevice::setAudioPreprocessingEnabled (bool enabled) { return pimpl->setAudioPreprocessingEnabled (enabled); }
|
||||
|
||||
bool iOSAudioIODevice::isPlaying() { return pimpl->isRunning && pimpl->callback != nullptr; }
|
||||
bool iOSAudioIODevice::isOpen() { return pimpl->isRunning; }
|
||||
String iOSAudioIODevice::getLastError() { return pimpl->lastError; }
|
||||
|
||||
StringArray iOSAudioIODevice::getOutputChannelNames() { return { "Left", "Right" }; }
|
||||
StringArray iOSAudioIODevice::getInputChannelNames() { return pimpl->audioInputIsAvailable ? getOutputChannelNames() : StringArray(); }
|
||||
|
||||
int iOSAudioIODevice::getDefaultBufferSize() { return pimpl->defaultBufferSize; }
|
||||
int iOSAudioIODevice::getCurrentBufferSizeSamples() { return pimpl->actualBufferSize; }
|
||||
|
||||
double iOSAudioIODevice::getCurrentSampleRate() { return pimpl->sampleRate; }
|
||||
|
||||
int iOSAudioIODevice::getCurrentBitDepth() { return 16; }
|
||||
|
||||
BigInteger iOSAudioIODevice::getActiveOutputChannels() const { return pimpl->activeOutputChans; }
|
||||
BigInteger iOSAudioIODevice::getActiveInputChannels() const { return pimpl->activeInputChans; }
|
||||
|
||||
int iOSAudioIODevice::getOutputLatencyInSamples() { return roundToInt (pimpl->sampleRate * [AVAudioSession sharedInstance].outputLatency); }
|
||||
int iOSAudioIODevice::getInputLatencyInSamples() { return roundToInt (pimpl->sampleRate * [AVAudioSession sharedInstance].inputLatency); }
|
||||
|
||||
void iOSAudioIODevice::setMidiMessageCollector (MidiMessageCollector* collector) { pimpl->messageCollector = collector; }
|
||||
AudioPlayHead* iOSAudioIODevice::getAudioPlayHead() const { return pimpl; }
|
||||
|
||||
bool iOSAudioIODevice::isInterAppAudioConnected() const { return pimpl->interAppAudioConnected; }
|
||||
#if JUCE_MODULE_AVAILABLE_juce_graphics
|
||||
Image iOSAudioIODevice::getIcon (int size) { return pimpl->getIcon (size); }
|
||||
#endif
|
||||
void iOSAudioIODevice::switchApplication() { return pimpl->switchApplication(); }
|
||||
|
||||
//==============================================================================
|
||||
class iOSAudioIODeviceType : public AudioIODeviceType
|
||||
struct iOSAudioIODeviceType : public AudioIODeviceType
|
||||
{
|
||||
public:
|
||||
iOSAudioIODeviceType() : AudioIODeviceType (iOSAudioDeviceName) {}
|
||||
|
||||
void scanForDevices() {}
|
||||
|
|
@ -1089,7 +1133,6 @@ public:
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
|
||||
};
|
||||
|
||||
|
|
@ -1107,13 +1150,13 @@ void AudioSessionHolder::handleAsyncUpdate()
|
|||
{
|
||||
const ScopedLock sl (routeChangeLock);
|
||||
for (auto device: activeDevices)
|
||||
device->handleRouteChange (lastRouteChangeReason.toRawUTF8());
|
||||
device->pimpl->handleRouteChange (lastRouteChangeReason.toRawUTF8());
|
||||
}
|
||||
|
||||
void AudioSessionHolder::handleStatusChange (bool enabled, const char* reason) const
|
||||
{
|
||||
for (auto device: activeDevices)
|
||||
device->handleStatusChange (enabled, reason);
|
||||
device->pimpl->handleStatusChange (enabled, reason);
|
||||
}
|
||||
|
||||
void AudioSessionHolder::handleRouteChange (const char* reason)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
class iOSAudioIODeviceType;
|
||||
struct iOSAudioIODeviceType;
|
||||
|
||||
class iOSAudioIODevice : public AudioIODevice
|
||||
{
|
||||
|
|
@ -36,63 +36,52 @@ public:
|
|||
|
||||
Array<double> getAvailableSampleRates() override;
|
||||
Array<int> getAvailableBufferSizes() override;
|
||||
|
||||
bool setAudioPreprocessingEnabled (bool) override;
|
||||
|
||||
//==============================================================================
|
||||
bool isPlaying() override { return isRunning && callback != nullptr; }
|
||||
bool isOpen() override { return isRunning; }
|
||||
String getLastError() override { return lastError; }
|
||||
bool isPlaying() override;
|
||||
bool isOpen() override;
|
||||
String getLastError() override;
|
||||
|
||||
//==============================================================================
|
||||
StringArray getOutputChannelNames() override { return { "Left", "Right" }; }
|
||||
StringArray getInputChannelNames() override { return audioInputIsAvailable ? getOutputChannelNames() : StringArray(); }
|
||||
int getDefaultBufferSize() override { return defaultBufferSize; }
|
||||
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
|
||||
double getCurrentSampleRate() override { return sampleRate; }
|
||||
int getCurrentBitDepth() override { return 16; }
|
||||
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
|
||||
BigInteger getActiveInputChannels() const override { return activeInputChans; }
|
||||
StringArray getOutputChannelNames() override;
|
||||
StringArray getInputChannelNames() override;
|
||||
|
||||
int getDefaultBufferSize() override;
|
||||
int getCurrentBufferSizeSamples() override;
|
||||
|
||||
double getCurrentSampleRate() override;
|
||||
|
||||
int getCurrentBitDepth() override;
|
||||
|
||||
BigInteger getActiveOutputChannels() const override;
|
||||
BigInteger getActiveInputChannels() const override;
|
||||
|
||||
int getOutputLatencyInSamples() override;
|
||||
int getInputLatencyInSamples() override;
|
||||
|
||||
//==============================================================================
|
||||
void handleStatusChange (bool enabled, const char* reason);
|
||||
void handleRouteChange (const char* reason);
|
||||
void setMidiMessageCollector (MidiMessageCollector*);
|
||||
AudioPlayHead* getAudioPlayHead() const;
|
||||
|
||||
//==============================================================================
|
||||
virtual void setMidiMessageCollector (MidiMessageCollector* collector) { messageCollector = collector; }
|
||||
virtual AudioPlayHead* getAudioPlayHead() const;
|
||||
|
||||
//==============================================================================
|
||||
virtual bool isInterAppAudioConnected() const { return interAppAudioConnected; }
|
||||
bool isInterAppAudioConnected() const;
|
||||
#if JUCE_MODULE_AVAILABLE_juce_graphics
|
||||
virtual Image getIcon (int size);
|
||||
Image getIcon (int size);
|
||||
#endif
|
||||
virtual void switchApplication();
|
||||
void switchApplication();
|
||||
|
||||
private:
|
||||
//==============================================================================
|
||||
void updateSampleRateAndAudioInput();
|
||||
iOSAudioIODevice (const String&);
|
||||
|
||||
//==============================================================================
|
||||
friend class iOSAudioIODeviceType;
|
||||
iOSAudioIODevice (const String& deviceName);
|
||||
friend struct iOSAudioIODeviceType;
|
||||
friend struct AudioSessionHolder;
|
||||
|
||||
//==============================================================================
|
||||
const int defaultBufferSize;
|
||||
double sampleRate;
|
||||
int numInputChannels, numOutputChannels;
|
||||
int preferredBufferSize, actualBufferSize;
|
||||
bool isRunning;
|
||||
String lastError;
|
||||
|
||||
bool audioInputIsAvailable, interAppAudioConnected;
|
||||
BigInteger activeOutputChans, activeInputChans;
|
||||
|
||||
AudioIODeviceCallback* callback;
|
||||
MidiMessageCollector* messageCollector;
|
||||
|
||||
class Pimpl;
|
||||
friend class Pimpl;
|
||||
struct Pimpl;
|
||||
friend struct Pimpl;
|
||||
ScopedPointer<Pimpl> pimpl;
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue