mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-01-10 23:44:24 +00:00
Update code to use new AudioData interleaving/deinterleaving helper methods
This commit is contained in:
parent
de64263416
commit
3e606cc378
5 changed files with 78 additions and 111 deletions
|
|
@ -339,20 +339,12 @@ public:
|
|||
|
||||
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
|
||||
{
|
||||
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getWritePointer (chan));
|
||||
|
||||
if (chan < numDeviceInputChannels)
|
||||
{
|
||||
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
|
||||
d.convertSamples (s, actualBufferSize);
|
||||
}
|
||||
else
|
||||
{
|
||||
d.clearSamples (actualBufferSize);
|
||||
}
|
||||
}
|
||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (src),
|
||||
numDeviceInputChannels,
|
||||
inputChannelBuffer.getArrayOfWritePointers(),
|
||||
inputChannelBuffer.getNumChannels(),
|
||||
actualBufferSize);
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, src, 0);
|
||||
}
|
||||
|
|
@ -382,14 +374,12 @@ public:
|
|||
|
||||
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
|
||||
{
|
||||
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
|
||||
|
||||
const float* const sourceChanData = outputChannelBuffer.getReadPointer (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
|
||||
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
|
||||
d.convertSamples (s, actualBufferSize);
|
||||
}
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::NativeEndian> (outputChannelBuffer.getArrayOfReadPointers(),
|
||||
outputChannelBuffer.getNumChannels(),
|
||||
reinterpret_cast<uint16*> (dest),
|
||||
numDeviceOutputChannels,
|
||||
actualBufferSize);
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
|
||||
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
|
||||
|
|
|
|||
|
|
@ -46,28 +46,26 @@ struct OboeAudioIODeviceBufferHelpers<int16>
|
|||
|
||||
static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
||||
{
|
||||
for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const>;
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
DstSampleType dstData (audioBuffer.getWritePointer (i));
|
||||
SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
|
||||
dstData.convertSamples (srcData, numSamples);
|
||||
}
|
||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
numSamples);
|
||||
}
|
||||
|
||||
static void convertToOboe (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved, int numSamples)
|
||||
{
|
||||
for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const>;
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
|
||||
SrcSampleType srcData (audioBuffer.getReadPointer (i));
|
||||
dstData.convertSamples (srcData, numSamples);
|
||||
}
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
reinterpret_cast<uint16*> (dstInterleaved),
|
||||
numChannels,
|
||||
numSamples);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -98,15 +96,12 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
|||
// No need to convert, we instructed the buffer to point to the src data directly already
|
||||
jassert (audioBuffer.getWritePointer (0) != srcInterleaved);
|
||||
|
||||
for (int i = 0; i < numChannels; ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const>;
|
||||
|
||||
DstSampleType dstData (audioBuffer.getWritePointer (i));
|
||||
SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
|
||||
dstData.convertSamples (srcData, numSamples);
|
||||
}
|
||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
numSamples);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -119,15 +114,12 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
|||
// No need to convert, we instructed the buffer to point to the src data directly already
|
||||
jassert (audioBuffer.getReadPointer (0) != dstInterleaved);
|
||||
|
||||
for (int i = 0; i < numChannels; ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const>;
|
||||
|
||||
DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
|
||||
SrcSampleType srcData (audioBuffer.getReadPointer (i));
|
||||
dstData.convertSamples (srcData, numSamples);
|
||||
}
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
dstInterleaved,
|
||||
numChannels,
|
||||
numSamples);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -196,29 +196,26 @@ struct BufferHelpers<int16>
|
|||
|
||||
static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
||||
{
|
||||
for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>;
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
DstSampleType dstData (audioBuffer.getWritePointer (i));
|
||||
SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
|
||||
dstData.convertSamples (srcData, audioBuffer.getNumSamples());
|
||||
}
|
||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::LittleEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved)
|
||||
{
|
||||
for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const>;
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
|
||||
SrcSampleType srcData (audioBuffer.getReadPointer (i));
|
||||
|
||||
dstData.convertSamples (srcData, audioBuffer.getNumSamples());
|
||||
}
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
reinterpret_cast<uint16*> (dstInterleaved),
|
||||
numChannels,
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
};
|
||||
|
|
@ -249,41 +246,38 @@ struct BufferHelpers<float>
|
|||
|
||||
static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
||||
{
|
||||
if (audioBuffer.getNumChannels() == 1)
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
if (numChannels == 1)
|
||||
{
|
||||
jassert (srcInterleaved == audioBuffer.getWritePointer (0));
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>;
|
||||
|
||||
DstSampleType dstData (audioBuffer.getWritePointer (i));
|
||||
SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
|
||||
dstData.convertSamples (srcData, audioBuffer.getNumSamples());
|
||||
}
|
||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::LittleEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved)
|
||||
{
|
||||
if (audioBuffer.getNumChannels() == 1)
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
if (numChannels == 1)
|
||||
{
|
||||
jassert (dstInterleaved == audioBuffer.getReadPointer (0));
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
|
||||
{
|
||||
using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst>;
|
||||
using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const>;
|
||||
|
||||
DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
|
||||
SrcSampleType srcData (audioBuffer.getReadPointer (i));
|
||||
|
||||
dstData.convertSamples (srcData, audioBuffer.getNumSamples());
|
||||
}
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
dstInterleaved,
|
||||
numChannels,
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue