mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-01-10 23:44:24 +00:00
Update code to use new AudioData interleaving/deinterleaving API
This commit is contained in:
parent
d9a3a2605d
commit
69d4e0186f
6 changed files with 68 additions and 81 deletions
|
|
@ -434,16 +434,21 @@ void AudioDataConverters::convertFormatToFloat (DataFormat sourceFormat, const v
|
||||||
//==============================================================================
|
//==============================================================================
|
||||||
void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels)
|
void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels)
|
||||||
{
|
{
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples);
|
|
||||||
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { source, numChannels },
|
||||||
|
AudioData::InterleavedDest<Format> { dest, numChannels },
|
||||||
|
numSamples);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels)
|
void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels)
|
||||||
{
|
{
|
||||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { source, numChannels },
|
||||||
|
AudioData::NonInterleavedDest<Format> { dest, numChannels },
|
||||||
|
numSamples);
|
||||||
|
}
|
||||||
|
|
||||||
//==============================================================================
|
//==============================================================================
|
||||||
//==============================================================================
|
//==============================================================================
|
||||||
|
|
@ -574,6 +579,8 @@ public:
|
||||||
beginTest ("Round-trip conversion: Float32");
|
beginTest ("Round-trip conversion: Float32");
|
||||||
Test1 <AudioData::Float32>::test (*this, r);
|
Test1 <AudioData::Float32>::test (*this, r);
|
||||||
|
|
||||||
|
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
|
|
||||||
beginTest ("Interleaving");
|
beginTest ("Interleaving");
|
||||||
{
|
{
|
||||||
constexpr auto numChannels = 4;
|
constexpr auto numChannels = 4;
|
||||||
|
|
@ -586,10 +593,9 @@ public:
|
||||||
for (int i = 0; i < numSamples; ++i)
|
for (int i = 0; i < numSamples; ++i)
|
||||||
sourceBuffer.setSample (ch, i, r.nextFloat());
|
sourceBuffer.setSample (ch, i, r.nextFloat());
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { sourceBuffer.getArrayOfReadPointers(), numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getArrayOfReadPointers(), numChannels,
|
AudioData::InterleavedDest<Format> { destBuffer.getWritePointer (0), numChannels },
|
||||||
destBuffer.getWritePointer (0), numChannels,
|
numSamples);
|
||||||
numSamples);
|
|
||||||
|
|
||||||
for (int ch = 0; ch < numChannels; ++ch)
|
for (int ch = 0; ch < numChannels; ++ch)
|
||||||
for (int i = 0; i < numSamples; ++i)
|
for (int i = 0; i < numSamples; ++i)
|
||||||
|
|
@ -608,10 +614,9 @@ public:
|
||||||
for (int i = 0; i < numSamples; ++i)
|
for (int i = 0; i < numSamples; ++i)
|
||||||
sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat());
|
sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat());
|
||||||
|
|
||||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { sourceBuffer.getReadPointer (0), numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getReadPointer (0), numChannels,
|
AudioData::NonInterleavedDest<Format> { destBuffer.getArrayOfWritePointers(), numChannels },
|
||||||
destBuffer.getArrayOfWritePointers(), numChannels,
|
numSamples);
|
||||||
numSamples);
|
|
||||||
|
|
||||||
for (int ch = 0; ch < numChannels; ++ch)
|
for (int ch = 0; ch < numChannels; ++ch)
|
||||||
for (int i = 0; i < numSamples; ++i)
|
for (int i = 0; i < numSamples; ++i)
|
||||||
|
|
|
||||||
|
|
@ -326,6 +326,9 @@ public:
|
||||||
JNIEnv* env = getEnv();
|
JNIEnv* env = getEnv();
|
||||||
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
|
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
|
||||||
|
|
||||||
|
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
|
||||||
|
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
|
|
||||||
while (! threadShouldExit())
|
while (! threadShouldExit())
|
||||||
{
|
{
|
||||||
if (inputDevice != nullptr)
|
if (inputDevice != nullptr)
|
||||||
|
|
@ -339,12 +342,9 @@ public:
|
||||||
|
|
||||||
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
|
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||||
|
|
||||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (src), numDeviceInputChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (src),
|
AudioData::NonInterleavedDest<NativeFloat32> { inputChannelBuffer.getArrayOfWritePointers(), inputChannelBuffer.getNumChannels() },
|
||||||
numDeviceInputChannels,
|
actualBufferSize);
|
||||||
inputChannelBuffer.getArrayOfWritePointers(),
|
|
||||||
inputChannelBuffer.getNumChannels(),
|
|
||||||
actualBufferSize);
|
|
||||||
|
|
||||||
env->ReleaseShortArrayElements (audioBuffer, src, 0);
|
env->ReleaseShortArrayElements (audioBuffer, src, 0);
|
||||||
}
|
}
|
||||||
|
|
@ -374,12 +374,9 @@ public:
|
||||||
|
|
||||||
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
|
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { outputChannelBuffer.getArrayOfReadPointers(), outputChannelBuffer.getNumChannels() },
|
||||||
AudioData::Int16, AudioData::NativeEndian> (outputChannelBuffer.getArrayOfReadPointers(),
|
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dest), numDeviceOutputChannels },
|
||||||
outputChannelBuffer.getNumChannels(),
|
actualBufferSize);
|
||||||
reinterpret_cast<uint16*> (dest),
|
|
||||||
numDeviceOutputChannels,
|
|
||||||
actualBufferSize);
|
|
||||||
|
|
||||||
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
|
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
|
||||||
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
|
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
|
||||||
|
|
|
||||||
|
|
@ -44,28 +44,25 @@ struct OboeAudioIODeviceBufferHelpers<int16>
|
||||||
|
|
||||||
static bool referAudioBufferDirectlyToOboeIfPossible (int16*, AudioBuffer<float>&, int) { return false; }
|
static bool referAudioBufferDirectlyToOboeIfPossible (int16*, AudioBuffer<float>&, int) { return false; }
|
||||||
|
|
||||||
|
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
|
||||||
|
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
|
|
||||||
static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
||||||
{
|
{
|
||||||
const auto numChannels = audioBuffer.getNumChannels();
|
const auto numChannels = audioBuffer.getNumChannels();
|
||||||
|
|
||||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
|
AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||||
numChannels,
|
numSamples);
|
||||||
audioBuffer.getArrayOfWritePointers(),
|
|
||||||
numChannels,
|
|
||||||
numSamples);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void convertToOboe (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved, int numSamples)
|
static void convertToOboe (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved, int numSamples)
|
||||||
{
|
{
|
||||||
const auto numChannels = audioBuffer.getNumChannels();
|
const auto numChannels = audioBuffer.getNumChannels();
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||||
AudioData::Int16, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
|
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
|
||||||
numChannels,
|
numSamples);
|
||||||
reinterpret_cast<uint16*> (dstInterleaved),
|
|
||||||
numChannels,
|
|
||||||
numSamples);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -87,6 +84,8 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
|
|
||||||
static void convertFromOboe (const float* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
static void convertFromOboe (const float* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
|
||||||
{
|
{
|
||||||
auto numChannels = audioBuffer.getNumChannels();
|
auto numChannels = audioBuffer.getNumChannels();
|
||||||
|
|
@ -96,12 +95,9 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
||||||
// No need to convert, we instructed the buffer to point to the src data directly already
|
// No need to convert, we instructed the buffer to point to the src data directly already
|
||||||
jassert (audioBuffer.getWritePointer (0) != srcInterleaved);
|
jassert (audioBuffer.getWritePointer (0) != srcInterleaved);
|
||||||
|
|
||||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { srcInterleaved, numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
|
AudioData::NonInterleavedDest<Format> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||||
numChannels,
|
numSamples);
|
||||||
audioBuffer.getArrayOfWritePointers(),
|
|
||||||
numChannels,
|
|
||||||
numSamples);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -114,12 +110,9 @@ struct OboeAudioIODeviceBufferHelpers<float>
|
||||||
// No need to convert, we instructed the buffer to point to the src data directly already
|
// No need to convert, we instructed the buffer to point to the src data directly already
|
||||||
jassert (audioBuffer.getReadPointer (0) != dstInterleaved);
|
jassert (audioBuffer.getReadPointer (0) != dstInterleaved);
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
|
AudioData::InterleavedDest<Format> { dstInterleaved, numChannels },
|
||||||
numChannels,
|
numSamples);
|
||||||
dstInterleaved,
|
|
||||||
numChannels,
|
|
||||||
numSamples);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -194,28 +194,25 @@ struct BufferHelpers<int16>
|
||||||
|
|
||||||
static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {}
|
static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {}
|
||||||
|
|
||||||
|
using LittleEndianInt16 = AudioData::Format<AudioData::Int16, AudioData::LittleEndian>;
|
||||||
|
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
|
|
||||||
static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
||||||
{
|
{
|
||||||
const auto numChannels = audioBuffer.getNumChannels();
|
const auto numChannels = audioBuffer.getNumChannels();
|
||||||
|
|
||||||
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::LittleEndian,
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
|
AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||||
numChannels,
|
audioBuffer.getNumSamples());
|
||||||
audioBuffer.getArrayOfWritePointers(),
|
|
||||||
numChannels,
|
|
||||||
audioBuffer.getNumSamples());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved)
|
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved)
|
||||||
{
|
{
|
||||||
const auto numChannels = audioBuffer.getNumChannels();
|
const auto numChannels = audioBuffer.getNumChannels();
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||||
AudioData::Int16, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
|
AudioData::InterleavedDest<LittleEndianInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
|
||||||
numChannels,
|
audioBuffer.getNumSamples());
|
||||||
reinterpret_cast<uint16*> (dstInterleaved),
|
|
||||||
numChannels,
|
|
||||||
audioBuffer.getNumSamples());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
@ -244,6 +241,9 @@ struct BufferHelpers<float>
|
||||||
audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
|
audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
using LittleEndianFloat32 = AudioData::Format<AudioData::Float32, AudioData::LittleEndian>;
|
||||||
|
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
|
||||||
|
|
||||||
static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
|
||||||
{
|
{
|
||||||
const auto numChannels = audioBuffer.getNumChannels();
|
const auto numChannels = audioBuffer.getNumChannels();
|
||||||
|
|
@ -254,12 +254,9 @@ struct BufferHelpers<float>
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::LittleEndian,
|
AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianFloat32> { srcInterleaved, numChannels },
|
||||||
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
|
AudioData::nonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
|
||||||
numChannels,
|
audioBuffer.getNumSamples());
|
||||||
audioBuffer.getArrayOfWritePointers(),
|
|
||||||
numChannels,
|
|
||||||
audioBuffer.getNumSamples());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved)
|
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved)
|
||||||
|
|
@ -272,12 +269,9 @@ struct BufferHelpers<float>
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
|
||||||
AudioData::Float32, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
|
AudioData::InterleavedDest<LittleEndianFloat32> { dstInterleaved, numChannels },
|
||||||
numChannels,
|
audioBuffer.getNumSamples());
|
||||||
dstInterleaved,
|
|
||||||
numChannels,
|
|
||||||
audioBuffer.getNumSamples());
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -132,10 +132,9 @@ private:
|
||||||
|
|
||||||
source->source->getNextAudioBlock (info);
|
source->source->getNextAudioBlock (info);
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { tempBuffer.getArrayOfReadPointers(), 2 },
|
||||||
AudioData::Int16, AudioData::LittleEndian> (tempBuffer.getArrayOfReadPointers(), 2,
|
AudioData::InterleavedDest<AudioData::Int16, AudioData::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 },
|
||||||
reinterpret_cast<uint16*> (buffer), 2,
|
numSamples);
|
||||||
numSamples);
|
|
||||||
|
|
||||||
source->readPosition += numSamples;
|
source->readPosition += numSamples;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -388,10 +388,9 @@ bool AudioCDBurner::addAudioTrack (AudioSource* audioSource, int numSamples)
|
||||||
|
|
||||||
buffer.clear (bytesPerBlock);
|
buffer.clear (bytesPerBlock);
|
||||||
|
|
||||||
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
|
AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { sourceBuffer.getArrayOfReadPointers(), 2 },
|
||||||
AudioData::Int16, AudioData::LittleEndian> (sourceBuffer.getArrayOfReadPointers(), 2,
|
AudioData::InterleavedDest<AudioData::Int16, Audiodata::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 },
|
||||||
reinterpret_cast<uint16*> (buffer), 2,
|
samplesPerBlock);
|
||||||
samplesPerBlock);
|
|
||||||
|
|
||||||
hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock);
|
hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock);
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue