diff --git a/modules/juce_audio_devices/native/juce_ios_Audio.cpp b/modules/juce_audio_devices/native/juce_ios_Audio.cpp index a1e87a0b13..51af02d23b 100644 --- a/modules/juce_audio_devices/native/juce_ios_Audio.cpp +++ b/modules/juce_audio_devices/native/juce_ios_Audio.cpp @@ -101,7 +101,7 @@ public: : kAudioSessionCategory_MediaPlayback; AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory); - AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, propertyChangedStatic, this); + AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this); fixAudioRouteIfSetToReceiver(); updateDeviceInfo(); @@ -113,7 +113,7 @@ public: prepareFloatBuffers(); isRunning = true; - propertyChanged (0, 0, 0); // creates and starts the AU + routingChanged (nullptr); // creates and starts the AU lastError = audioUnit != 0 ? "" : "Couldn't open the device"; return lastError; @@ -183,8 +183,7 @@ private: CriticalSection callbackLock; Float64 sampleRate; int numInputChannels, numOutputChannels; - int preferredBufferSize; - int actualBufferSize; + int preferredBufferSize, actualBufferSize; bool isRunning; String lastError; @@ -202,8 +201,8 @@ private: void prepareFloatBuffers() { floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize); - zerostruct (inputChannels); - zerostruct (outputChannels); + zeromem (inputChannels, sizeof (inputChannels)); + zeromem (outputChannels, sizeof (outputChannels)); for (int i = 0; i < numInputChannels; ++i) inputChannels[i] = floatData.getSampleData (i); @@ -213,13 +212,13 @@ private: } //================================================================================================== - OSStatus process (AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, - UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) + OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time, + const UInt32 numFrames, AudioBufferList* data) { OSStatus err = noErr; if (audioInputIsAvailable) - err = AudioUnitRender (audioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData); + err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data); const ScopedLock sl (callbackLock); @@ -227,11 +226,11 @@ private: { if (audioInputIsAvailable && numInputChannels > 0) { - short* shortData = (short*) ioData->mBuffers[0].mData; + short* shortData = (short*) data->mBuffers[0].mData; if (numInputChannels >= 2) { - for (UInt32 i = 0; i < inNumberFrames; ++i) + for (UInt32 i = 0; i < numFrames; ++i) { inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f); inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f); @@ -242,7 +241,7 @@ private: if (monoInputChannelNumber > 0) ++shortData; - for (UInt32 i = 0; i < inNumberFrames; ++i) + for (UInt32 i = 0; i < numFrames; ++i) { inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f); ++shortData; @@ -252,19 +251,18 @@ private: else { for (int i = numInputChannels; --i >= 0;) - zeromem (inputChannels[i], sizeof (float) * inNumberFrames); + zeromem (inputChannels[i], sizeof (float) * numFrames); } callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels, - outputChannels, numOutputChannels, - (int) inNumberFrames); + outputChannels, numOutputChannels, (int) numFrames); - short* shortData = (short*) ioData->mBuffers[0].mData; + short* shortData = (short*) data->mBuffers[0].mData; int n = 0; if (numOutputChannels >= 2) { - for (UInt32 i = 0; i < inNumberFrames; ++i) + for (UInt32 i = 0; i < numFrames; ++i) { shortData [n++] = (short) (outputChannels[0][i] * 32767.0f); shortData [n++] = (short) (outputChannels[1][i] * 32767.0f); @@ -272,7 +270,7 @@ private: } else if (numOutputChannels == 1) { - for (UInt32 i = 0; i < inNumberFrames; ++i) + for (UInt32 i = 0; i < numFrames; ++i) { const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f); shortData [n++] = s; @@ -281,12 +279,12 @@ private: } else { - zeromem (ioData->mBuffers[0].mData, 2 * sizeof (short) * inNumberFrames); + zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames); } } else { - zeromem (ioData->mBuffers[0].mData, 2 * sizeof (short) * inNumberFrames); + zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames); } return err; @@ -301,14 +299,14 @@ private: AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable); } - void propertyChanged (AudioSessionPropertyID inID, UInt32 inDataSize, const void* inPropertyValue) + void routingChanged (const void* propertyValue) { if (! isRunning) return; - if (inPropertyValue != nullptr) + if (propertyValue != nullptr) { - CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) inPropertyValue; + CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue; CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary, CFSTR (kAudioSession_AudioRouteChangeKey_Reason)); @@ -338,9 +336,9 @@ private: } } - void interruptionListener (UInt32 inInterruption) + void interruptionListener (const UInt32 interruptionType) { - /*if (inInterruption == kAudioSessionBeginInterruption) + /*if (interruptionType == kAudioSessionBeginInterruption) { isRunning = false; AudioOutputUnitStop (audioUnit); @@ -351,11 +349,11 @@ private: @"Cancel")) { isRunning = true; - propertyChanged (0, 0, 0); + routingChanged (nullptr); } }*/ - if (inInterruption == kAudioSessionEndInterruption) + if (interruptionType == kAudioSessionEndInterruption) { isRunning = true; AudioSessionSetActive (true); @@ -364,32 +362,32 @@ private: } //================================================================================================== - static OSStatus processStatic (void* inRefCon, AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, - UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) + static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time, + UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data) { - return ((IPhoneAudioIODevice*) inRefCon)->process (ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData); + return static_cast (client)->process (flags, time, numFrames, data); } - static void propertyChangedStatic (void* inClientData, AudioSessionPropertyID inID, UInt32 inDataSize, const void* inPropertyValue) + static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue) { - ((IPhoneAudioIODevice*) inClientData)->propertyChanged (inID, inDataSize, inPropertyValue); + static_cast (client)->routingChanged (propertyValue); } - static void interruptionListenerStatic (void* inClientData, UInt32 inInterruption) + static void interruptionListenerStatic (void* client, UInt32 interruptionType) { - ((IPhoneAudioIODevice*) inClientData)->interruptionListener (inInterruption); + static_cast (client)->interruptionListener (interruptionType); } //================================================================================================== - void resetFormat (const int numChannels) + void resetFormat (const int numChannels) noexcept { - memset (&format, 0, sizeof (format)); + zerostruct (format); format.mFormatID = kAudioFormatLinearPCM; - format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; + format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian; format.mBitsPerChannel = 8 * sizeof (short); - format.mChannelsPerFrame = 2; + format.mChannelsPerFrame = numChannels; format.mFramesPerPacket = 1; - format.mBytesPerFrame = format.mBytesPerPacket = 2 * sizeof (short); + format.mBytesPerFrame = format.mBytesPerPacket = numChannels * sizeof (short); } bool createAudioUnit() @@ -421,19 +419,23 @@ private: AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one)); } - AudioChannelLayout layout; - layout.mChannelBitmap = 0; - layout.mNumberChannelDescriptions = 0; - layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; - AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout)); - AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout)); + { + AudioChannelLayout layout; + layout.mChannelBitmap = 0; + layout.mNumberChannelDescriptions = 0; + layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; + AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout)); + AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout)); + } - AURenderCallbackStruct inputProc; - inputProc.inputProc = processStatic; - inputProc.inputProcRefCon = this; - AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc)); + { + AURenderCallbackStruct inputProc; + inputProc.inputProc = processStatic; + inputProc.inputProcRefCon = this; + AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc)); + } - AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format)); + AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format)); AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format)); AudioUnitInitialize (audioUnit); diff --git a/modules/juce_audio_formats/codecs/juce_CoreAudioFormat.cpp b/modules/juce_audio_formats/codecs/juce_CoreAudioFormat.cpp index b2348343eb..ddec86bdb2 100644 --- a/modules/juce_audio_formats/codecs/juce_CoreAudioFormat.cpp +++ b/modules/juce_audio_formats/codecs/juce_CoreAudioFormat.cpp @@ -93,7 +93,7 @@ public: destinationAudioFormat.mSampleRate = sampleRate; destinationAudioFormat.mFormatID = kAudioFormatLinearPCM; - destinationAudioFormat.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsNonInterleaved; + destinationAudioFormat.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsNonInterleaved | kAudioFormatFlagsNativeEndian; destinationAudioFormat.mBitsPerChannel = sizeof (float) * 8; destinationAudioFormat.mChannelsPerFrame = numChannels; destinationAudioFormat.mBytesPerFrame = sizeof (float); diff --git a/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.mm b/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.mm index a5834682c7..d681628a2f 100644 --- a/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.mm +++ b/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.mm @@ -398,7 +398,7 @@ public: AudioStreamBasicDescription stream = { 0 }; stream.mSampleRate = sampleRate_; stream.mFormatID = kAudioFormatLinearPCM; - stream.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved; + stream.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagsNativeEndian; stream.mFramesPerPacket = 1; stream.mBytesPerPacket = 4; stream.mBytesPerFrame = 4;