mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-01-10 23:44:24 +00:00
iOS Audio: Add waiting mechanism after AVAudioSession setActive: to ensure correct buffer size detection on iOS 18
The waiting is only added on iOS 18 as this is the only platform where seems to be an asynchronous relationship between (1) AVAudioSession setActive: (2) AVAudioSession setPreferredIOBufferDuration: and (3) AVAudioSession.IOBufferDuration. The issue is not observable in the iOS 18 emulator.
This commit is contained in:
parent
d3e254e24f
commit
70c9c5bfdb
1 changed files with 101 additions and 6 deletions
|
|
@ -257,6 +257,81 @@ private:
|
|||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
|
||||
};
|
||||
|
||||
class SubstituteAudioUnit
|
||||
{
|
||||
public:
|
||||
/* Returns true if the audio callback was called. False if a timeout occurred. */
|
||||
bool waitForAudioCallback()
|
||||
{
|
||||
if (audioUnit != nullptr)
|
||||
{
|
||||
AudioComponentInstanceDispose (audioUnit);
|
||||
audioUnit = nullptr;
|
||||
}
|
||||
|
||||
AudioComponentDescription desc;
|
||||
desc.componentType = kAudioUnitType_Output;
|
||||
desc.componentSubType = kAudioUnitSubType_RemoteIO;
|
||||
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
|
||||
desc.componentFlags = 0;
|
||||
desc.componentFlagsMask = 0;
|
||||
|
||||
AudioComponent comp = AudioComponentFindNext (nullptr, &desc);
|
||||
AudioComponentInstanceNew (comp, &audioUnit);
|
||||
|
||||
if (audioUnit == nullptr)
|
||||
return false;
|
||||
|
||||
{
|
||||
AURenderCallbackStruct inputProc;
|
||||
inputProc.inputProc = audioUnitCallback;
|
||||
inputProc.inputProcRefCon = this;
|
||||
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
|
||||
}
|
||||
|
||||
{
|
||||
AudioStreamBasicDescription format;
|
||||
zerostruct (format);
|
||||
format.mSampleRate = [AVAudioSession sharedInstance].sampleRate;
|
||||
format.mFormatID = kAudioFormatLinearPCM;
|
||||
format.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagsNativeEndian | kLinearPCMFormatFlagIsPacked;
|
||||
format.mBitsPerChannel = 8 * sizeof (float);
|
||||
format.mFramesPerPacket = 1;
|
||||
format.mChannelsPerFrame = 2;
|
||||
format.mBytesPerFrame = format.mBytesPerPacket = sizeof (float);
|
||||
|
||||
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
|
||||
AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
|
||||
}
|
||||
|
||||
AudioUnitInitialize (audioUnit);
|
||||
AudioOutputUnitStart (audioUnit);
|
||||
|
||||
const auto result = audioCallbackOccurred.wait (1000.0);
|
||||
|
||||
AudioComponentInstanceDispose (audioUnit);
|
||||
audioUnit = nullptr;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private:
|
||||
static OSStatus audioUnitCallback (void* object,
|
||||
AudioUnitRenderActionFlags*,
|
||||
const AudioTimeStamp*,
|
||||
UInt32,
|
||||
UInt32,
|
||||
AudioBufferList*)
|
||||
{
|
||||
static_cast<SubstituteAudioUnit*> (object)->audioCallbackOccurred.signal();
|
||||
|
||||
return noErr;
|
||||
}
|
||||
|
||||
AudioUnit audioUnit{};
|
||||
WaitableEvent audioCallbackOccurred;
|
||||
};
|
||||
|
||||
//==============================================================================
|
||||
struct iOSAudioIODevice::Pimpl final : public AsyncUpdater
|
||||
{
|
||||
|
|
@ -311,6 +386,16 @@ struct iOSAudioIODevice::Pimpl final : public AsyncUpdater
|
|||
{
|
||||
JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setActive: enabled
|
||||
error: &error]);
|
||||
|
||||
if (@available (ios 18, *))
|
||||
{
|
||||
if (enabled)
|
||||
{
|
||||
SubstituteAudioUnit au;
|
||||
[[maybe_unused]] const auto success = au.waitForAudioCallback();
|
||||
jassert (success);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int getBufferSize (const double currentSampleRate)
|
||||
|
|
@ -334,8 +419,20 @@ struct iOSAudioIODevice::Pimpl final : public AsyncUpdater
|
|||
NSTimeInterval bufferDuration = currentSampleRate > 0 ? (NSTimeInterval) (newBufferSize + extraOffset) / currentSampleRate : 0.0;
|
||||
|
||||
auto session = [AVAudioSession sharedInstance];
|
||||
JUCE_NSERROR_CHECK ([session setPreferredIOBufferDuration: bufferDuration
|
||||
error: &error]);
|
||||
|
||||
// According to the apple docs, it's best to set preferred sample rates and block sizes
|
||||
// while the device is inactive, and then to query the real values after activation.
|
||||
// Unfortunately, on iOS 18.0, the real block size isn't immediately available after
|
||||
// a call to setActive, so we also need to wait for the first audio callback.
|
||||
// This will be slow!
|
||||
// https://developer.apple.com/library/archive/qa/qa1631/_index.html
|
||||
if (@available (ios 18, *))
|
||||
setAudioSessionActive (false);
|
||||
|
||||
JUCE_NSERROR_CHECK ([session setPreferredIOBufferDuration: bufferDuration error: &error]);
|
||||
|
||||
if (@available (ios 18, *))
|
||||
setAudioSessionActive (true);
|
||||
|
||||
return getBufferSize (currentSampleRate);
|
||||
}
|
||||
|
|
@ -347,7 +444,7 @@ struct iOSAudioIODevice::Pimpl final : public AsyncUpdater
|
|||
auto newBufferSize = tryBufferSize (sampleRate, 64);
|
||||
jassert (newBufferSize > 0);
|
||||
|
||||
const auto longestBufferSize = tryBufferSize (sampleRate, 4096);
|
||||
const auto longestBufferSize = tryBufferSize (sampleRate, 4096);
|
||||
|
||||
while (newBufferSize <= longestBufferSize)
|
||||
{
|
||||
|
|
@ -377,9 +474,7 @@ struct iOSAudioIODevice::Pimpl final : public AsyncUpdater
|
|||
double trySampleRate (double rate)
|
||||
{
|
||||
auto session = [AVAudioSession sharedInstance];
|
||||
JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate
|
||||
error: &error]);
|
||||
|
||||
JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate error: &error]);
|
||||
return session.sampleRate;
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue