mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-01-10 23:44:24 +00:00
Update code to use new AudioData interleaving/deinterleaving API
This commit is contained in:
parent
d9a3a2605d
commit
69d4e0186f
6 changed files with 68 additions and 81 deletions
|
|
@ -434,16 +434,21 @@ void AudioDataConverters::convertFormatToFloat (DataFormat sourceFormat, const v
|
|||
//==============================================================================
|
||||
void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels)
|
||||
{
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples);
|
||||
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { source, numChannels },
|
||||
AudioData::InterleavedDest<Format> { dest, numChannels },
|
||||
numSamples);
|
||||
}
|
||||
|
||||
void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels)
|
||||
{
|
||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples);
|
||||
}
|
||||
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { source, numChannels },
|
||||
AudioData::NonInterleavedDest<Format> { dest, numChannels },
|
||||
numSamples);
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
//==============================================================================
|
||||
|
|
@ -574,6 +579,8 @@ public:
|
|||
beginTest ("Round-trip conversion: Float32");
|
||||
Test1 <AudioData::Float32>::test (*this, r);
|
||||
|
||||
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
beginTest ("Interleaving");
|
||||
{
|
||||
constexpr auto numChannels = 4;
|
||||
|
|
@ -586,9 +593,8 @@ public:
|
|||
for (int i = 0; i < numSamples; ++i)
|
||||
sourceBuffer.setSample (ch, i, r.nextFloat());
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getArrayOfReadPointers(), numChannels,
|
||||
destBuffer.getWritePointer (0), numChannels,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { sourceBuffer.getArrayOfReadPointers(), numChannels },
|
||||
AudioData::InterleavedDest<Format> { destBuffer.getWritePointer (0), numChannels },
|
||||
numSamples);
|
||||
|
||||
for (int ch = 0; ch < numChannels; ++ch)
|
||||
|
|
@ -608,9 +614,8 @@ public:
|
|||
for (int i = 0; i < numSamples; ++i)
|
||||
sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat());
|
||||
|
||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getReadPointer (0), numChannels,
|
||||
destBuffer.getArrayOfWritePointers(), numChannels,
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { sourceBuffer.getReadPointer (0), numChannels },
|
||||
AudioData::NonInterleavedDest<Format> { destBuffer.getArrayOfWritePointers(), numChannels },
|
||||
numSamples);
|
||||
|
||||
for (int ch = 0; ch < numChannels; ++ch)
|
||||
|
|
|
|||
|
|
@ -326,6 +326,9 @@ public:
|
|||
JNIEnv* env = getEnv();
|
||||
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
|
||||
|
||||
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
|
||||
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
while (! threadShouldExit())
|
||||
{
|
||||
if (inputDevice != nullptr)
|
||||
|
|
@ -339,11 +342,8 @@ public:
|
|||
|
||||
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (src),
|
||||
numDeviceInputChannels,
|
||||
inputChannelBuffer.getArrayOfWritePointers(),
|
||||
inputChannelBuffer.getNumChannels(),
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (src), numDeviceInputChannels },
|
||||
AudioData::NonInterleavedDest<NativeFloat32> { inputChannelBuffer.getArrayOfWritePointers(), inputChannelBuffer.getNumChannels() },
|
||||
actualBufferSize);
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, src, 0);
|
||||
|
|
@ -374,11 +374,8 @@ public:
|
|||
|
||||
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::NativeEndian> (outputChannelBuffer.getArrayOfReadPointers(),
|
||||
outputChannelBuffer.getNumChannels(),
|
||||
reinterpret_cast<uint16*> (dest),
|
||||
numDeviceOutputChannels,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { outputChannelBuffer.getArrayOfReadPointers(), outputChannelBuffer.getNumChannels() },
|
||||
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dest), numDeviceOutputChannels },
|
||||
actualBufferSize);
|
||||
|
||||
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
|
||||
|
|
|
|||
|
|
@ -44,15 +44,15 @@ struct OboeAudioIODeviceBufferHelpers<int16>
|
|||
|
||||
static bool referAudioBufferDirectlyToOboeIfPossible (int16*, AudioBuffer<float>&, int) { return false; }
|
||||
|
||||
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
|
||||
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
||||
{
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
|
||||
AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||
numSamples);
|
||||
}
|
||||
|
||||
|
|
@ -60,11 +60,8 @@ struct OboeAudioIODeviceBufferHelpers<int16>
|
|||
{
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
reinterpret_cast<uint16*> (dstInterleaved),
|
||||
numChannels,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
|
||||
numSamples);
|
||||
}
|
||||
};
|
||||
|
|
@ -87,6 +84,8 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
|||
return false;
|
||||
}
|
||||
|
||||
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
static void convertFromOboe (const float* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
||||
{
|
||||
auto numChannels = audioBuffer.getNumChannels();
|
||||
|
|
@ -96,11 +95,8 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
|||
// No need to convert, we instructed the buffer to point to the src data directly already
|
||||
jassert (audioBuffer.getWritePointer (0) != srcInterleaved);
|
||||
|
||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { srcInterleaved, numChannels },
|
||||
AudioData::NonInterleavedDest<Format> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||
numSamples);
|
||||
}
|
||||
}
|
||||
|
|
@ -114,11 +110,8 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
|||
// No need to convert, we instructed the buffer to point to the src data directly already
|
||||
jassert (audioBuffer.getReadPointer (0) != dstInterleaved);
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
dstInterleaved,
|
||||
numChannels,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||
AudioData::InterleavedDest<Format> { dstInterleaved, numChannels },
|
||||
numSamples);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -194,15 +194,15 @@ struct BufferHelpers<int16>
|
|||
|
||||
static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {}
|
||||
|
||||
using LittleEndianInt16 = AudioData::Format<AudioData::Int16, AudioData::LittleEndian>;
|
||||
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
||||
{
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::LittleEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
|
||||
AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
|
|
@ -210,11 +210,8 @@ struct BufferHelpers<int16>
|
|||
{
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
reinterpret_cast<uint16*> (dstInterleaved),
|
||||
numChannels,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||
AudioData::InterleavedDest<LittleEndianInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
|
|
@ -244,6 +241,9 @@ struct BufferHelpers<float>
|
|||
audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
using LittleEndianFloat32 = AudioData::Format<AudioData::Float32, AudioData::LittleEndian>;
|
||||
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||
|
||||
static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
||||
{
|
||||
const auto numChannels = audioBuffer.getNumChannels();
|
||||
|
|
@ -254,11 +254,8 @@ struct BufferHelpers<float>
|
|||
return;
|
||||
}
|
||||
|
||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::LittleEndian,
|
||||
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
|
||||
numChannels,
|
||||
audioBuffer.getArrayOfWritePointers(),
|
||||
numChannels,
|
||||
AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianFloat32> { srcInterleaved, numChannels },
|
||||
AudioData::nonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
|
||||
|
|
@ -272,11 +269,8 @@ struct BufferHelpers<float>
|
|||
return;
|
||||
}
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Float32, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
|
||||
numChannels,
|
||||
dstInterleaved,
|
||||
numChannels,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||
AudioData::InterleavedDest<LittleEndianFloat32> { dstInterleaved, numChannels },
|
||||
audioBuffer.getNumSamples());
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -132,9 +132,8 @@ private:
|
|||
|
||||
source->source->getNextAudioBlock (info);
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::LittleEndian> (tempBuffer.getArrayOfReadPointers(), 2,
|
||||
reinterpret_cast<uint16*> (buffer), 2,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { tempBuffer.getArrayOfReadPointers(), 2 },
|
||||
AudioData::InterleavedDest<AudioData::Int16, AudioData::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 },
|
||||
numSamples);
|
||||
|
||||
source->readPosition += numSamples;
|
||||
|
|
|
|||
|
|
@ -388,9 +388,8 @@ bool AudioCDBurner::addAudioTrack (AudioSource* audioSource, int numSamples)
|
|||
|
||||
buffer.clear (bytesPerBlock);
|
||||
|
||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
||||
AudioData::Int16, AudioData::LittleEndian> (sourceBuffer.getArrayOfReadPointers(), 2,
|
||||
reinterpret_cast<uint16*> (buffer), 2,
|
||||
AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { sourceBuffer.getArrayOfReadPointers(), 2 },
|
||||
AudioData::InterleavedDest<AudioData::Int16, Audiodata::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 },
|
||||
samplesPerBlock);
|
||||
|
||||
hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue