1
0
Fork 0
mirror of https://github.com/juce-framework/JUCE.git synced 2026-01-10 23:44:24 +00:00

AudioPlayHead: Improve granularity of position info

This commit is contained in:
reuk 2022-06-13 19:37:49 +01:00
parent 891daf1332
commit 8fbd99c424
No known key found for this signature in database
GPG key ID: 9ADCD339CFC98A11
27 changed files with 924 additions and 572 deletions

View file

@ -4,6 +4,36 @@ JUCE breaking changes
develop develop
======= =======
Change
------
AudioPlayHead::getCurrentPosition() has been deprecated and replaced with
AudioPlayHead::getPosition().
Possible Issues
---------------
Hosts that implemented custom playhead types may no longer compile. Plugins
that used host-provided timing information may trigger deprecation warnings
when building.
Workaround
----------
Classes that derive from AudioPlayHead must now override getPosition() instead
of getCurrentPosition(). Code that used to use the playhead's
CurrentPositionInfo must switch to using the new PositionInfo type.
Rationale
---------
Not all hosts and plugin formats are capable of providing the full complement
of timing information contained in the old CurrentPositionInfo class.
Previously, in the case that some information could not be provided, fallback
values would be used instead, but it was not possible for clients to distinguish
between "real" values set explicitly by the host, and "fallback" values set by
a plugin wrapper. The new PositionInfo type keeps track of which members have
been explicitly set, so clients can implement their own fallback behaviour.
The new PositionInfo type also includes a new "barCount" member, which is
currently only used by the LV2 host and client.
Change Change
------ ------
The optional JUCE_COREGRAPHICS_RENDER_WITH_MULTIPLE_PAINT_CALLS preprocessor The optional JUCE_COREGRAPHICS_RENDER_WITH_MULTIPLE_PAINT_CALLS preprocessor

View file

@ -293,14 +293,14 @@ public:
bool processBlock (AudioBuffer<float>& buffer, bool processBlock (AudioBuffer<float>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept override const AudioPlayHead::PositionInfo& positionInfo) noexcept override
{ {
const auto numSamples = buffer.getNumSamples(); const auto numSamples = buffer.getNumSamples();
jassert (numSamples <= maximumSamplesPerBlock); jassert (numSamples <= maximumSamplesPerBlock);
jassert (numChannels == buffer.getNumChannels()); jassert (numChannels == buffer.getNumChannels());
jassert (realtime == AudioProcessor::Realtime::no || useBufferedAudioSourceReader); jassert (realtime == AudioProcessor::Realtime::no || useBufferedAudioSourceReader);
const auto timeInSamples = positionInfo.timeInSamples; const auto timeInSamples = positionInfo.getTimeInSamples().orFallback (0);
const auto isPlaying = positionInfo.isPlaying; const auto isPlaying = positionInfo.getIsPlaying();
bool success = true; bool success = true;
bool didRenderAnyRegion = false; bool didRenderAnyRegion = false;
@ -482,7 +482,7 @@ public:
bool processBlock (AudioBuffer<float>& buffer, bool processBlock (AudioBuffer<float>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept override const AudioPlayHead::PositionInfo& positionInfo) noexcept override
{ {
ignoreUnused (realtime); ignoreUnused (realtime);
@ -491,7 +491,7 @@ public:
if (! locked) if (! locked)
return true; return true;
if (positionInfo.isPlaying) if (positionInfo.getIsPlaying())
return true; return true;
if (const auto previewedRegion = previewState->previewedRegion.load()) if (const auto previewedRegion = previewState->previewedRegion.load())
@ -1075,12 +1075,11 @@ private:
void doResize() void doResize()
{ {
auto* aph = getAudioPlayhead(); auto* aph = getAudioPlayhead();
AudioPlayHead::CurrentPositionInfo positionInfo; const auto info = aph->getPosition();
aph->getCurrentPosition (positionInfo);
if (positionInfo.isPlaying) if (info.hasValue() && info->getIsPlaying())
{ {
const auto markerX = positionInfo.timeInSeconds * pixelPerSecond; const auto markerX = info->getTimeInSeconds().orFallback (0) * pixelPerSecond;
const auto playheadLine = getLocalBounds().withTrimmedLeft ((int) (markerX - markerWidth / 2.0) - horizontalOffset) const auto playheadLine = getLocalBounds().withTrimmedLeft ((int) (markerX - markerWidth / 2.0) - horizontalOffset)
.removeFromLeft ((int) markerWidth); .removeFromLeft ((int) markerWidth);
playheadMarker.setVisible (true); playheadMarker.setVisible (true);

View file

@ -321,12 +321,10 @@ public:
class SpinLockedPosInfo class SpinLockedPosInfo
{ {
public: public:
SpinLockedPosInfo() { info.resetToDefault(); }
// Wait-free, but setting new info may fail if the main thread is currently // Wait-free, but setting new info may fail if the main thread is currently
// calling `get`. This is unlikely to matter in practice because // calling `get`. This is unlikely to matter in practice because
// we'll be calling `set` much more frequently than `get`. // we'll be calling `set` much more frequently than `get`.
void set (const AudioPlayHead::CurrentPositionInfo& newInfo) void set (const AudioPlayHead::PositionInfo& newInfo)
{ {
const juce::SpinLock::ScopedTryLockType lock (mutex); const juce::SpinLock::ScopedTryLockType lock (mutex);
@ -334,7 +332,7 @@ public:
info = newInfo; info = newInfo;
} }
AudioPlayHead::CurrentPositionInfo get() const noexcept AudioPlayHead::PositionInfo get() const noexcept
{ {
const juce::SpinLock::ScopedLockType lock (mutex); const juce::SpinLock::ScopedLockType lock (mutex);
return info; return info;
@ -342,7 +340,7 @@ public:
private: private:
juce::SpinLock mutex; juce::SpinLock mutex;
AudioPlayHead::CurrentPositionInfo info; AudioPlayHead::PositionInfo info;
}; };
//============================================================================== //==============================================================================
@ -510,13 +508,13 @@ private:
} }
// quick-and-dirty function to format a bars/beats string // quick-and-dirty function to format a bars/beats string
static String quarterNotePositionToBarsBeatsString (double quarterNotes, int numerator, int denominator) static String quarterNotePositionToBarsBeatsString (double quarterNotes, AudioPlayHead::TimeSignature sig)
{ {
if (numerator == 0 || denominator == 0) if (sig.numerator == 0 || sig.denominator == 0)
return "1|1|000"; return "1|1|000";
auto quarterNotesPerBar = (numerator * 4 / denominator); auto quarterNotesPerBar = (sig.numerator * 4 / sig.denominator);
auto beats = (fmod (quarterNotes, quarterNotesPerBar) / quarterNotesPerBar) * numerator; auto beats = (fmod (quarterNotes, quarterNotesPerBar) / quarterNotesPerBar) * sig.numerator;
auto bar = ((int) quarterNotes) / quarterNotesPerBar + 1; auto bar = ((int) quarterNotes) / quarterNotesPerBar + 1;
auto beat = ((int) beats) + 1; auto beat = ((int) beats) + 1;
@ -526,21 +524,21 @@ private:
} }
// Updates the text in our position label. // Updates the text in our position label.
void updateTimecodeDisplay (AudioPlayHead::CurrentPositionInfo pos) void updateTimecodeDisplay (const AudioPlayHead::PositionInfo& pos)
{ {
MemoryOutputStream displayText; MemoryOutputStream displayText;
displayText << "[" << SystemStats::getJUCEVersion() << "] " const auto sig = pos.getTimeSignature().orFallback (AudioPlayHead::TimeSignature{});
<< String (pos.bpm, 2) << " bpm, "
<< pos.timeSigNumerator << '/' << pos.timeSigDenominator
<< " - " << timeToTimecodeString (pos.timeInSeconds)
<< " - " << quarterNotePositionToBarsBeatsString (pos.ppqPosition,
pos.timeSigNumerator,
pos.timeSigDenominator);
if (pos.isRecording) displayText << "[" << SystemStats::getJUCEVersion() << "] "
<< String (pos.getBpm().orFallback (120.0), 2) << " bpm, "
<< sig.numerator << '/' << sig.denominator
<< " - " << timeToTimecodeString (pos.getTimeInSeconds().orFallback (0.0))
<< " - " << quarterNotePositionToBarsBeatsString (pos.getPpqPosition().orFallback (0.0), sig);
if (pos.getIsRecording())
displayText << " (recording)"; displayText << " (recording)";
else if (pos.isPlaying) else if (pos.getIsPlaying())
displayText << " (playing)"; displayText << " (playing)";
timecodeDisplayLabel.setText (displayText.toString(), dontSendNotification); timecodeDisplayLabel.setText (displayText.toString(), dontSendNotification);
@ -647,17 +645,11 @@ private:
const auto newInfo = [&] const auto newInfo = [&]
{ {
if (auto* ph = getPlayHead()) if (auto* ph = getPlayHead())
{ if (auto result = ph->getPosition())
AudioPlayHead::CurrentPositionInfo result; return *result;
if (ph->getCurrentPosition (result))
return result;
}
// If the host fails to provide the current time, we'll just use default values // If the host fails to provide the current time, we'll just use default values
AudioPlayHead::CurrentPositionInfo result; return AudioPlayHead::PositionInfo{};
result.resetToDefault();
return result;
}(); }();
lastPosInfo.set (newInfo); lastPosInfo.set (newInfo);

View file

@ -486,11 +486,12 @@ public:
struct SimplePlayHead : public juce::AudioPlayHead struct SimplePlayHead : public juce::AudioPlayHead
{ {
bool getCurrentPosition (CurrentPositionInfo& result) override Optional<PositionInfo> getPosition() const override
{ {
result.timeInSamples = timeInSamples.load(); PositionInfo result;
result.isPlaying = isPlaying.load(); result.setTimeInSamples (timeInSamples.load());
return true; result.setIsPlaying (isPlaying.load());
return result;
} }
std::atomic<int64_t> timeInSamples { 0 }; std::atomic<int64_t> timeInSamples { 0 };

View file

@ -152,8 +152,60 @@ public:
bool drop = false, pulldown = false; bool drop = false, pulldown = false;
}; };
/** Describes a musical time signature.
@see PositionInfo::getTimeSignature() PositionInfo::setTimeSignature()
*/
struct JUCE_API TimeSignature
{
/** Time signature numerator, e.g. the 3 of a 3/4 time sig */
int numerator = 4;
/** Time signature denominator, e.g. the 4 of a 3/4 time sig */
int denominator = 4;
bool operator== (const TimeSignature& other) const
{
const auto tie = [] (auto& x) { return std::tie (x.numerator, x.denominator); };
return tie (*this) == tie (other);
}
bool operator!= (const TimeSignature& other) const
{
return ! operator== (other);
}
};
/** Holds the begin and end points of a looped region.
@see PositionInfo::getIsLooping() PositionInfo::setIsLooping() PositionInfo::getLoopPoints() PositionInfo::setLoopPoints()
*/
struct JUCE_API LoopPoints
{
/** The current cycle start position in units of quarter-notes. */
double ppqStart = 0;
/** The current cycle end position in units of quarter-notes. */
double ppqEnd = 0;
bool operator== (const LoopPoints& other) const
{
const auto tie = [] (auto& x) { return std::tie (x.ppqStart, x.ppqEnd); };
return tie (*this) == tie (other);
}
bool operator!= (const LoopPoints& other) const
{
return ! operator== (other);
}
};
//============================================================================== //==============================================================================
/** This structure is filled-in by the AudioPlayHead::getCurrentPosition() method. /** This type is deprecated; prefer PositionInfo instead.
Some position info may be unavailable, depending on the host or plugin format.
Unfortunately, CurrentPositionInfo doesn't have any way of differentiating between
default values and values that have been set explicitly.
*/ */
struct JUCE_API CurrentPositionInfo struct JUCE_API CurrentPositionInfo
{ {
@ -162,6 +214,7 @@ public:
/** Time signature numerator, e.g. the 3 of a 3/4 time sig */ /** Time signature numerator, e.g. the 3 of a 3/4 time sig */
int timeSigNumerator = 4; int timeSigNumerator = 4;
/** Time signature denominator, e.g. the 4 of a 3/4 time sig */ /** Time signature denominator, e.g. the 4 of a 3/4 time sig */
int timeSigDenominator = 4; int timeSigDenominator = 4;
@ -248,7 +301,199 @@ public:
}; };
//============================================================================== //==============================================================================
/** Fills-in the given structure with details about the transport's /**
Describes the time at the start of the current audio callback.
Not all hosts and plugin formats can provide all of the possible time
information, so most of the getter functions in this class return
an Optional that will only be engaged if the host provides the corresponding
information. As a plugin developer, you should code defensively so that
the plugin behaves sensibly even when the host fails to provide timing
information.
A default-constructed instance of this class will return nullopt from
all functions that return an Optional.
*/
class PositionInfo
{
public:
/** Returns the number of samples that have elapsed. */
Optional<int64_t> getTimeInSamples() const { return getOptional (flagTimeSamples, timeInSamples); }
/** @see getTimeInSamples() */
void setTimeInSamples (Optional<int64_t> timeInSamplesIn) { setOptional (flagTimeSamples, timeInSamples, timeInSamplesIn); }
/** Returns the number of samples that have elapsed. */
Optional<double> getTimeInSeconds() const { return getOptional (flagTimeSeconds, timeInSeconds); }
/** @see getTimeInSamples() */
void setTimeInSeconds (Optional<double> timeInSecondsIn) { setOptional (flagTimeSeconds, timeInSeconds, timeInSecondsIn); }
/** Returns the bpm, if available. */
Optional<double> getBpm() const { return getOptional (flagTempo, tempoBpm); }
/** @see getBpm() */
void setBpm (Optional<double> bpmIn) { setOptional (flagTempo, tempoBpm, bpmIn); }
/** Returns the time signature, if available. */
Optional<TimeSignature> getTimeSignature() const { return getOptional (flagTimeSignature, timeSignature); }
/** @see getTimeSignature() */
void setTimeSignature (Optional<TimeSignature> timeSignatureIn) { setOptional (flagTimeSignature, timeSignature, timeSignatureIn); }
/** Returns host loop points, if available. */
Optional<LoopPoints> getLoopPoints() const { return getOptional (flagLoopPoints, loopPoints); }
/** @see getLoopPoints() */
void setLoopPoints (Optional<LoopPoints> loopPointsIn) { setOptional (flagLoopPoints, loopPoints, loopPointsIn); }
/** The number of bars since the beginning of the timeline.
This value isn't available in all hosts or in all plugin formats.
*/
Optional<int64_t> getBarCount() const { return getOptional (flagBarCount, barCount); }
/** @see getBarCount() */
void setBarCount (Optional<int64_t> barCountIn) { setOptional (flagBarCount, barCount, barCountIn); }
/** The position of the start of the last bar, in units of quarter-notes.
This is the time from the start of the timeline to the start of the current
bar, in ppq units.
Note - this value may be unavailable on some hosts, e.g. Pro-Tools.
*/
Optional<double> getPpqPositionOfLastBarStart() const { return getOptional (flagLastBarStartPpq, lastBarStartPpq); }
/** @see getPpqPositionOfLastBarStart() */
void setPpqPositionOfLastBarStart (Optional<double> positionIn) { setOptional (flagLastBarStartPpq, lastBarStartPpq, positionIn); }
/** The video frame rate, if available. */
Optional<FrameRate> getFrameRate() const { return getOptional (flagFrameRate, frame); }
/** @see getFrameRate() */
void setFrameRate (Optional<FrameRate> frameRateIn) { setOptional (flagFrameRate, frame, frameRateIn); }
/** The current play position, in units of quarter-notes. */
Optional<double> getPpqPosition() const { return getOptional (flagPpqPosition, positionPpq); }
/** @see getPpqPosition() */
void setPpqPosition (Optional<double> ppqPositionIn) { setOptional (flagPpqPosition, positionPpq, ppqPositionIn); }
/** For timecode, the position of the start of the timeline, in seconds from 00:00:00:00. */
Optional<double> getEditOriginTime() const { return getOptional (flagOriginTime, originTime); }
/** @see getEditOriginTime() */
void setEditOriginTime (Optional<double> editOriginTimeIn) { setOptional (flagOriginTime, originTime, editOriginTimeIn); }
/** True if the transport is currently playing. */
bool getIsPlaying() const { return getFlag (flagIsPlaying); }
/** @see getIsPlaying() */
void setIsPlaying (bool isPlayingIn) { setFlag (flagIsPlaying, isPlayingIn); }
/** True if the transport is currently recording.
(When isRecording is true, then isPlaying will also be true).
*/
bool getIsRecording() const { return getFlag (flagIsRecording); }
/** @see getIsRecording() */
void setIsRecording (bool isRecordingIn) { setFlag (flagIsRecording, isRecordingIn); }
/** True if the transport is currently looping. */
bool getIsLooping() const { return getFlag (flagIsLooping); }
/** @see getIsLooping() */
void setIsLooping (bool isLoopingIn) { setFlag (flagIsLooping, isLoopingIn); }
bool operator== (const PositionInfo& other) const noexcept
{
const auto tie = [] (const PositionInfo& i)
{
return std::make_tuple (i.getTimeInSamples(),
i.getTimeInSeconds(),
i.getPpqPosition(),
i.getEditOriginTime(),
i.getPpqPositionOfLastBarStart(),
i.getFrameRate(),
i.getBarCount(),
i.getTimeSignature(),
i.getBpm(),
i.getLoopPoints(),
i.getIsPlaying(),
i.getIsRecording(),
i.getIsLooping());
};
return tie (*this) == tie (other);
}
bool operator!= (const PositionInfo& other) const noexcept
{
return ! operator== (other);
}
private:
bool getFlag (int64_t flagToCheck) const
{
return (flagToCheck & flags) != 0;
}
void setFlag (int64_t flagToCheck, bool value)
{
flags = (value ? flags | flagToCheck : flags & ~flagToCheck);
}
template <typename Value>
Optional<Value> getOptional (int64_t flagToCheck, Value value) const
{
return getFlag (flagToCheck) ? makeOptional (std::move (value)) : nullopt;
}
template <typename Value>
void setOptional (int64_t flagToCheck, Value& value, Optional<Value> opt)
{
if (opt.hasValue())
value = *opt;
setFlag (flagToCheck, opt.hasValue());
}
enum
{
flagTimeSignature = 1 << 0,
flagLoopPoints = 1 << 1,
flagFrameRate = 1 << 2,
flagTimeSeconds = 1 << 3,
flagLastBarStartPpq = 1 << 4,
flagPpqPosition = 1 << 5,
flagOriginTime = 1 << 6,
flagTempo = 1 << 7,
flagTimeSamples = 1 << 8,
flagBarCount = 1 << 9,
flagIsPlaying = 1 << 10,
flagIsRecording = 1 << 11,
flagIsLooping = 1 << 12
};
TimeSignature timeSignature;
LoopPoints loopPoints;
FrameRate frame = FrameRateType::fps23976;
double timeInSeconds = 0.0;
double lastBarStartPpq = 0.0;
double positionPpq = 0.0;
double originTime = 0.0;
double tempoBpm = 0.0;
int64_t timeInSamples = 0;
int64_t barCount = 0;
int64_t flags = 0;
};
//==============================================================================
/** Deprecated, use getPosition() instead.
Fills-in the given structure with details about the transport's
position at the start of the current processing block. If this method returns position at the start of the current processing block. If this method returns
false then the current play head position is not available and the given false then the current play head position is not available and the given
structure will be undefined. structure will be undefined.
@ -258,7 +503,66 @@ public:
in which a time would make sense, and some hosts will almost certainly have in which a time would make sense, and some hosts will almost certainly have
multithreading issues if it's not called on the audio thread. multithreading issues if it's not called on the audio thread.
*/ */
virtual bool getCurrentPosition (CurrentPositionInfo& result) = 0; [[deprecated ("Use getPosition instead. Not all hosts are able to provide all time position information; getPosition differentiates clearly between set and unset fields.")]]
bool getCurrentPosition (CurrentPositionInfo& result)
{
if (const auto pos = getPosition())
{
result.resetToDefault();
if (const auto sig = pos->getTimeSignature())
{
result.timeSigNumerator = sig->numerator;
result.timeSigDenominator = sig->denominator;
}
if (const auto loop = pos->getLoopPoints())
{
result.ppqLoopStart = loop->ppqStart;
result.ppqLoopEnd = loop->ppqEnd;
}
if (const auto frame = pos->getFrameRate())
result.frameRate = *frame;
if (const auto timeInSeconds = pos->getTimeInSeconds())
result.timeInSeconds = *timeInSeconds;
if (const auto lastBarStartPpq = pos->getPpqPositionOfLastBarStart())
result.ppqPositionOfLastBarStart = *lastBarStartPpq;
if (const auto ppqPosition = pos->getPpqPosition())
result.ppqPosition = *ppqPosition;
if (const auto originTime = pos->getEditOriginTime())
result.editOriginTime = *originTime;
if (const auto bpm = pos->getBpm())
result.bpm = *bpm;
if (const auto timeInSamples = pos->getTimeInSamples())
result.timeInSamples = *timeInSamples;
return true;
}
return false;
}
/** Fetches details about the transport's position at the start of the current
processing block. If this method returns nullopt then the current play head
position is not available.
A non-null return value just indicates that the host was able to provide
*some* relevant timing information. Individual PositionInfo getters may
still return nullopt.
You can ONLY call this from your processBlock() method! Calling it at other
times will produce undefined behaviour, as the host may not have any context
in which a time would make sense, and some hosts will almost certainly have
multithreading issues if it's not called on the audio thread.
*/
virtual Optional<PositionInfo> getPosition() const = 0;
/** Returns true if this object can control the transport. */ /** Returns true if this object can control the transport. */
virtual bool canControlTransport() { return false; } virtual bool canControlTransport() { return false; }

View file

@ -252,8 +252,7 @@ private:
}; };
//============================================================================== //==============================================================================
struct iOSAudioIODevice::Pimpl : public AudioPlayHead, struct iOSAudioIODevice::Pimpl : public AsyncUpdater
public AsyncUpdater
{ {
Pimpl (iOSAudioIODeviceType* ioDeviceType, iOSAudioIODevice& ioDevice) Pimpl (iOSAudioIODeviceType* ioDeviceType, iOSAudioIODevice& ioDevice)
: deviceType (ioDeviceType), : deviceType (ioDeviceType),
@ -566,7 +565,12 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
} }
//============================================================================== //==============================================================================
bool canControlTransport() override { return interAppAudioConnected; } class PlayHead : public AudioPlayHead
{
public:
explicit PlayHead (Pimpl& implIn) : impl (implIn) {}
bool canControlTransport() override { return canControlTransportImpl(); }
void transportPlay (bool shouldSartPlaying) override void transportPlay (bool shouldSartPlaying) override
{ {
@ -574,7 +578,7 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
return; return;
HostCallbackInfo callbackInfo; HostCallbackInfo callbackInfo;
fillHostCallbackInfo (callbackInfo); impl.fillHostCallbackInfo (callbackInfo);
Boolean hostIsPlaying = NO; Boolean hostIsPlaying = NO;
OSStatus err = callbackInfo.transportStateProc2 (callbackInfo.hostUserData, OSStatus err = callbackInfo.transportStateProc2 (callbackInfo.hostUserData,
@ -590,7 +594,7 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
jassert (err == noErr); jassert (err == noErr);
if (hostIsPlaying != shouldSartPlaying) if (hostIsPlaying != shouldSartPlaying)
handleAudioTransportEvent (kAudioUnitRemoteControlEvent_TogglePlayPause); impl.handleAudioTransportEvent (kAudioUnitRemoteControlEvent_TogglePlayPause);
} }
void transportRecord (bool shouldStartRecording) override void transportRecord (bool shouldStartRecording) override
@ -599,7 +603,7 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
return; return;
HostCallbackInfo callbackInfo; HostCallbackInfo callbackInfo;
fillHostCallbackInfo (callbackInfo); impl.fillHostCallbackInfo (callbackInfo);
Boolean hostIsRecording = NO; Boolean hostIsRecording = NO;
OSStatus err = callbackInfo.transportStateProc2 (callbackInfo.hostUserData, OSStatus err = callbackInfo.transportStateProc2 (callbackInfo.hostUserData,
@ -614,27 +618,25 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
jassert (err == noErr); jassert (err == noErr);
if (hostIsRecording != shouldStartRecording) if (hostIsRecording != shouldStartRecording)
handleAudioTransportEvent (kAudioUnitRemoteControlEvent_ToggleRecord); impl.handleAudioTransportEvent (kAudioUnitRemoteControlEvent_ToggleRecord);
} }
void transportRewind() override void transportRewind() override
{ {
if (canControlTransport()) if (canControlTransport())
handleAudioTransportEvent (kAudioUnitRemoteControlEvent_Rewind); impl.handleAudioTransportEvent (kAudioUnitRemoteControlEvent_Rewind);
} }
bool getCurrentPosition (CurrentPositionInfo& result) override Optional<PositionInfo> getPosition() const override
{ {
if (! canControlTransport()) if (! canControlTransportImpl())
return false; return {};
zerostruct (result);
HostCallbackInfo callbackInfo; HostCallbackInfo callbackInfo;
fillHostCallbackInfo (callbackInfo); impl.fillHostCallbackInfo (callbackInfo);
if (callbackInfo.hostUserData == nullptr) if (callbackInfo.hostUserData == nullptr)
return false; return {};
Boolean hostIsPlaying = NO; Boolean hostIsPlaying = NO;
Boolean hostIsRecording = NO; Boolean hostIsRecording = NO;
@ -651,18 +653,18 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
&hostCycleStartBeat, &hostCycleStartBeat,
&hostCycleEndBeat); &hostCycleEndBeat);
if (err == kAUGraphErr_CannotDoInCurrentContext) if (err == kAUGraphErr_CannotDoInCurrentContext)
return false; return {};
jassert (err == noErr); jassert (err == noErr);
result.timeInSamples = (int64) hostCurrentSampleInTimeLine; PositionInfo result;
result.isPlaying = hostIsPlaying;
result.isRecording = hostIsRecording;
result.isLooping = hostIsCycling;
result.ppqLoopStart = hostCycleStartBeat;
result.ppqLoopEnd = hostCycleEndBeat;
result.timeInSeconds = result.timeInSamples / sampleRate; result.setTimeInSamples ((int64) hostCurrentSampleInTimeLine);
result.setIsPlaying (hostIsPlaying);
result.setIsRecording (hostIsRecording);
result.setIsLooping (hostIsCycling);
result.setLoopPoints (LoopPoints { hostCycleStartBeat, hostCycleEndBeat });
result.setTimeInSeconds (*result.getTimeInSamples() / impl.sampleRate);
Float64 hostBeat = 0; Float64 hostBeat = 0;
Float64 hostTempo = 0; Float64 hostTempo = 0;
@ -671,8 +673,8 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
&hostTempo); &hostTempo);
jassert (err == noErr); jassert (err == noErr);
result.ppqPosition = hostBeat; result.setPpqPosition (hostBeat);
result.bpm = hostTempo; result.setBpm (hostTempo);
Float32 hostTimeSigNumerator = 0; Float32 hostTimeSigNumerator = 0;
UInt32 hostTimeSigDenominator = 0; UInt32 hostTimeSigDenominator = 0;
@ -684,15 +686,20 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
&hostCurrentMeasureDownBeat); &hostCurrentMeasureDownBeat);
jassert (err == noErr); jassert (err == noErr);
result.ppqPositionOfLastBarStart = hostCurrentMeasureDownBeat; result.setPpqPositionOfLastBarStart (hostCurrentMeasureDownBeat);
result.timeSigNumerator = (int) hostTimeSigNumerator; result.setTimeSignature (TimeSignature { (int) hostTimeSigNumerator, (int) hostTimeSigDenominator });
result.timeSigDenominator = (int) hostTimeSigDenominator;
result.frameRate = AudioPlayHead::fpsUnknown; result.setFrameRate (AudioPlayHead::fpsUnknown);
return true; return result;
} }
private:
bool canControlTransportImpl() const { return impl.interAppAudioConnected; }
Pimpl& impl;
};
//============================================================================== //==============================================================================
#if JUCE_MODULE_AVAILABLE_juce_graphics #if JUCE_MODULE_AVAILABLE_juce_graphics
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations") JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
@ -1379,6 +1386,7 @@ struct iOSAudioIODevice::Pimpl : public AudioPlayHead,
Float64 lastSampleTime; Float64 lastSampleTime;
unsigned int lastNumFrames; unsigned int lastNumFrames;
int xrun; int xrun;
PlayHead playhead { *this };
JUCE_DECLARE_NON_COPYABLE (Pimpl) JUCE_DECLARE_NON_COPYABLE (Pimpl)
}; };
@ -1429,7 +1437,7 @@ int iOSAudioIODevice::getOutputLatencyInSamples() { return rou
int iOSAudioIODevice::getXRunCount() const noexcept { return pimpl->xrun; } int iOSAudioIODevice::getXRunCount() const noexcept { return pimpl->xrun; }
void iOSAudioIODevice::setMidiMessageCollector (MidiMessageCollector* collector) { pimpl->messageCollector = collector; } void iOSAudioIODevice::setMidiMessageCollector (MidiMessageCollector* collector) { pimpl->messageCollector = collector; }
AudioPlayHead* iOSAudioIODevice::getAudioPlayHead() const { return pimpl.get(); } AudioPlayHead* iOSAudioIODevice::getAudioPlayHead() const { return &pimpl->playhead; }
bool iOSAudioIODevice::isInterAppAudioConnected() const { return pimpl->interAppAudioConnected; } bool iOSAudioIODevice::isInterAppAudioConnected() const { return pimpl->interAppAudioConnected; }
#if JUCE_MODULE_AVAILABLE_juce_graphics #if JUCE_MODULE_AVAILABLE_juce_graphics

View file

@ -172,8 +172,7 @@ ARAPlaybackRegionReader::ARAPlaybackRegionReader (double rate, int numChans,
// We're only providing the minimal set of meaningful values, since the ARA renderer should only // We're only providing the minimal set of meaningful values, since the ARA renderer should only
// look at the time position and the playing state, and read any related tempo or bar signature // look at the time position and the playing state, and read any related tempo or bar signature
// information from the ARA model directly (MusicalContext). // information from the ARA model directly (MusicalContext).
positionInfo.resetToDefault(); positionInfo.setIsPlaying (true);
positionInfo.isPlaying = true;
sampleRate = rate; sampleRate = rate;
numChannels = (unsigned int) numChans; numChannels = (unsigned int) numChans;
@ -252,16 +251,17 @@ bool ARAPlaybackRegionReader::readSamples (int** destSamples, int numDestChannel
{ {
success = true; success = true;
needClearSamples = false; needClearSamples = false;
positionInfo.timeInSamples = startSampleInFile + startInSamples; positionInfo.setTimeInSamples (startSampleInFile + startInSamples);
while (numSamples > 0) while (numSamples > 0)
{ {
const int numSliceSamples = jmin (numSamples, maximumBlockSize); const int numSliceSamples = jmin (numSamples, maximumBlockSize);
AudioBuffer<float> buffer ((float **) destSamples, numDestChannels, startOffsetInDestBuffer, numSliceSamples); AudioBuffer<float> buffer ((float **) destSamples, numDestChannels, startOffsetInDestBuffer, numSliceSamples);
positionInfo.timeInSeconds = static_cast<double> (positionInfo.timeInSamples) / sampleRate; positionInfo.setTimeInSeconds (static_cast<double> (*positionInfo.getTimeInSamples()) / sampleRate);
success &= playbackRenderer->processBlock (buffer, AudioProcessor::Realtime::no, positionInfo); success &= playbackRenderer->processBlock (buffer, AudioProcessor::Realtime::no, positionInfo);
numSamples -= numSliceSamples; numSamples -= numSliceSamples;
startOffsetInDestBuffer += numSliceSamples; startOffsetInDestBuffer += numSliceSamples;
positionInfo.timeInSamples += numSliceSamples; positionInfo.setTimeInSamples (*positionInfo.getTimeInSamples() + numSliceSamples);
} }
} }
} }

View file

@ -183,7 +183,7 @@ public:
private: private:
std::unique_ptr<ARAPlaybackRenderer> playbackRenderer; std::unique_ptr<ARAPlaybackRenderer> playbackRenderer;
AudioPlayHead::CurrentPositionInfo positionInfo; AudioPlayHead::PositionInfo positionInfo;
ReadWriteLock lock; ReadWriteLock lock;
static constexpr int maximumBlockSize = 4 * 1024; static constexpr int maximumBlockSize = 4 * 1024;

View file

@ -1035,52 +1035,72 @@ namespace AAXClasses
AudioProcessor& getPluginInstance() const noexcept { return *pluginInstance; } AudioProcessor& getPluginInstance() const noexcept { return *pluginInstance; }
bool getCurrentPosition (juce::AudioPlayHead::CurrentPositionInfo& info) override Optional<PositionInfo> getPosition() const override
{ {
PositionInfo info;
const AAX_ITransport& transport = *Transport(); const AAX_ITransport& transport = *Transport();
info.bpm = 0.0; info.setBpm ([&]
check (transport.GetCurrentTempo (&info.bpm)); {
double bpm = 0.0;
return transport.GetCurrentTempo (&bpm) == AAX_SUCCESS ? makeOptional (bpm) : nullopt;
}());
info.setTimeSignature ([&]
{
int32_t num = 4, den = 4; int32_t num = 4, den = 4;
transport.GetCurrentMeter (&num, &den);
info.timeSigNumerator = (int) num;
info.timeSigDenominator = (int) den;
info.timeInSamples = 0;
if (transport.IsTransportPlaying (&info.isPlaying) != AAX_SUCCESS) return transport.GetCurrentMeter (&num, &den) == AAX_SUCCESS
info.isPlaying = false; ? makeOptional (TimeSignature { (int) num, (int) den })
: nullopt;
}());
if (info.isPlaying info.setIsPlaying ([&]
|| transport.GetTimelineSelectionStartPosition (&info.timeInSamples) != AAX_SUCCESS) {
check (transport.GetCurrentNativeSampleLocation (&info.timeInSamples)); bool isPlaying = false;
info.timeInSeconds = (float) info.timeInSamples / sampleRate; return transport.IsTransportPlaying (&isPlaying) == AAX_SUCCESS && isPlaying;
}());
info.setTimeInSamples ([&]
{
int64_t timeInSamples = 0;
return ((! info.getIsPlaying() && transport.GetTimelineSelectionStartPosition (&timeInSamples) == AAX_SUCCESS)
|| transport.GetCurrentNativeSampleLocation (&timeInSamples) == AAX_SUCCESS)
? makeOptional (timeInSamples)
: nullopt;
}());
info.setTimeInSeconds ((float) info.getTimeInSamples().orFallback (0) / sampleRate);
info.setPpqPosition ([&]
{
int64_t ticks = 0; int64_t ticks = 0;
if (info.isPlaying) return ((info.getIsPlaying() && transport.GetCustomTickPosition (&ticks, info.getTimeInSamples().orFallback (0))) == AAX_SUCCESS)
check (transport.GetCustomTickPosition (&ticks, info.timeInSamples)); || transport.GetCurrentTickPosition (&ticks) == AAX_SUCCESS
else ? makeOptional (ticks / 960000.0)
check (transport.GetCurrentTickPosition (&ticks)); : nullopt;
}());
info.ppqPosition = (double) ticks / 960000.0; bool isLooping = false;
info.isLooping = false;
int64_t loopStartTick = 0, loopEndTick = 0; int64_t loopStartTick = 0, loopEndTick = 0;
check (transport.GetCurrentLoopPosition (&info.isLooping, &loopStartTick, &loopEndTick));
info.ppqLoopStart = (double) loopStartTick / 960000.0;
info.ppqLoopEnd = (double) loopEndTick / 960000.0;
std::tie (info.frameRate, info.editOriginTime) = [&transport] if (transport.GetCurrentLoopPosition (&isLooping, &loopStartTick, &loopEndTick) == AAX_SUCCESS)
{ {
info.setIsLooping (isLooping);
info.setLoopPoints (LoopPoints { (double) loopStartTick / 960000.0, (double) loopEndTick / 960000.0 });
}
AAX_EFrameRate frameRate; AAX_EFrameRate frameRate;
int32_t offset; int32_t offset;
if (transport.GetTimeCodeInfo (&frameRate, &offset) != AAX_SUCCESS) if (transport.GetTimeCodeInfo (&frameRate, &offset) == AAX_SUCCESS)
return std::make_tuple (FrameRate(), 0.0); {
info.setFrameRate ([&]() -> Optional<FrameRate>
const auto rate = [&]
{ {
switch ((JUCE_AAX_EFrameRate) frameRate) switch ((JUCE_AAX_EFrameRate) frameRate)
{ {
@ -1114,18 +1134,14 @@ namespace AAXClasses
case JUCE_AAX_eFrameRate_Undeclared: break; case JUCE_AAX_eFrameRate_Undeclared: break;
} }
return FrameRate(); return {};
}(); }());
}
const auto effectiveRate = rate.getEffectiveRate(); const auto effectiveRate = info.getFrameRate().hasValue() ? info.getFrameRate()->getEffectiveRate() : 0.0;
return std::make_tuple (rate, effectiveRate != 0.0 ? offset / effectiveRate : 0.0); info.setEditOriginTime (effectiveRate != 0.0 ? makeOptional (offset / effectiveRate) : nullopt);
}();
// No way to get these: (?) return info;
info.isRecording = false;
info.ppqPositionOfLastBarStart = 0;
return true;
} }
void audioProcessorParameterChanged (AudioProcessor* /*processor*/, int parameterIndex, float newValue) override void audioProcessorParameterChanged (AudioProcessor* /*processor*/, int parameterIndex, float newValue) override

View file

@ -103,7 +103,6 @@ struct AudioProcessorHolder
class JuceAU : public AudioProcessorHolder, class JuceAU : public AudioProcessorHolder,
public MusicDeviceBase, public MusicDeviceBase,
public AudioProcessorListener, public AudioProcessorListener,
public AudioPlayHead,
public AudioProcessorParameter::Listener public AudioProcessorParameter::Listener
{ {
public: public:
@ -140,7 +139,6 @@ public:
totalInChannels = juceFilter->getTotalNumInputChannels(); totalInChannels = juceFilter->getTotalNumInputChannels();
totalOutChannels = juceFilter->getTotalNumOutputChannels(); totalOutChannels = juceFilter->getTotalNumOutputChannels();
juceFilter->setPlayHead (this);
juceFilter->addListener (this); juceFilter->addListener (this);
addParameters(); addParameters();
@ -1089,18 +1087,28 @@ public:
return rate > 0 ? juceFilter->getLatencySamples() / rate : 0; return rate > 0 ? juceFilter->getLatencySamples() / rate : 0;
} }
//============================================================================== class ScopedPlayHead : private AudioPlayHead
bool getCurrentPosition (AudioPlayHead::CurrentPositionInfo& info) override
{ {
info.timeSigNumerator = 0; public:
info.timeSigDenominator = 0; explicit ScopedPlayHead (JuceAU& juceAudioUnit)
info.editOriginTime = 0; : audioUnit (juceAudioUnit)
info.ppqPositionOfLastBarStart = 0; {
info.isRecording = false; audioUnit.juceFilter->setPlayHead (this);
}
info.frameRate = [this] ~ScopedPlayHead() override
{ {
switch (lastTimeStamp.mSMPTETime.mType) audioUnit.juceFilter->setPlayHead (nullptr);
}
private:
Optional<PositionInfo> getPosition() const override
{
PositionInfo info;
info.setFrameRate ([this]() -> Optional<FrameRate>
{
switch (audioUnit.lastTimeStamp.mSMPTETime.mType)
{ {
case kSMPTETimeType2398: return FrameRate().withBaseRate (24).withPullDown(); case kSMPTETimeType2398: return FrameRate().withBaseRate (24).withPullDown();
case kSMPTETimeType24: return FrameRate().withBaseRate (24); case kSMPTETimeType24: return FrameRate().withBaseRate (24);
@ -1117,13 +1125,16 @@ public:
default: break; default: break;
} }
return FrameRate(); return {};
}(); }());
if (CallHostBeatAndTempo (&info.ppqPosition, &info.bpm) != noErr) double ppqPosition = 0.0;
double bpm = 0.0;
if (audioUnit.CallHostBeatAndTempo (&ppqPosition, &bpm) == noErr)
{ {
info.ppqPosition = 0; info.setPpqPosition (ppqPosition);
info.bpm = 0; info.setBpm (bpm);
} }
UInt32 outDeltaSampleOffsetToNextBeat; UInt32 outDeltaSampleOffsetToNextBeat;
@ -1131,38 +1142,44 @@ public:
float num; float num;
UInt32 den; UInt32 den;
if (CallHostMusicalTimeLocation (&outDeltaSampleOffsetToNextBeat, &num, &den, if (audioUnit.CallHostMusicalTimeLocation (&outDeltaSampleOffsetToNextBeat,
&num,
&den,
&outCurrentMeasureDownBeat) == noErr) &outCurrentMeasureDownBeat) == noErr)
{ {
info.timeSigNumerator = (int) num; info.setTimeSignature (TimeSignature { (int) num, (int) den });
info.timeSigDenominator = (int) den; info.setPpqPositionOfLastBarStart (outCurrentMeasureDownBeat);
info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
} }
double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0; double outCurrentSampleInTimeLine = 0, outCycleStartBeat = 0, outCycleEndBeat = 0;
Boolean playing = false, looping = false, playchanged; Boolean playing = false, looping = false, playchanged;
if (CallHostTransportState (&playing, if (audioUnit.CallHostTransportState (&playing,
&playchanged, &playchanged,
&outCurrentSampleInTimeLine, &outCurrentSampleInTimeLine,
&looping, &looping,
&outCycleStartBeat, &outCycleStartBeat,
&outCycleEndBeat) != noErr) &outCycleEndBeat) == noErr)
{
info.setIsPlaying (playing);
info.setTimeInSamples ((int64) (outCurrentSampleInTimeLine + 0.5));
info.setTimeInSeconds (*info.getTimeInSamples() / audioUnit.getSampleRate());
info.setIsLooping (looping);
info.setLoopPoints (LoopPoints { outCycleStartBeat, outCycleEndBeat });
}
else
{ {
// If the host doesn't support this callback, then use the sample time from lastTimeStamp: // If the host doesn't support this callback, then use the sample time from lastTimeStamp:
outCurrentSampleInTimeLine = lastTimeStamp.mSampleTime; outCurrentSampleInTimeLine = audioUnit.lastTimeStamp.mSampleTime;
} }
info.isPlaying = playing; return info;
info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
info.timeInSeconds = info.timeInSamples / getSampleRate();
info.isLooping = looping;
info.ppqLoopStart = outCycleStartBeat;
info.ppqLoopEnd = outCycleEndBeat;
return true;
} }
JuceAU& audioUnit;
};
//==============================================================================
void sendAUEvent (const AudioUnitEventType type, const int juceParamIndex) void sendAUEvent (const AudioUnitEventType type, const int juceParamIndex)
{ {
if (restoringState) if (restoringState)
@ -1309,14 +1326,11 @@ public:
jassert (! juceFilter->getHostTimeNs()); jassert (! juceFilter->getHostTimeNs());
if ((inTimeStamp.mFlags & kAudioTimeStampHostTimeValid) != 0) if ((inTimeStamp.mFlags & kAudioTimeStampHostTimeValid) != 0)
{ juceFilter->setHostTimeNanos (timeConversions.hostTimeToNanos (inTimeStamp.mHostTime));
const auto timestamp = timeConversions.hostTimeToNanos (inTimeStamp.mHostTime);
juceFilter->setHostTimeNanos (&timestamp);
}
struct AtEndOfScope struct AtEndOfScope
{ {
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); } ~AtEndOfScope() { proc.setHostTimeNanos (nullopt); }
AudioProcessor& proc; AudioProcessor& proc;
}; };
@ -1952,6 +1966,7 @@ private:
void processBlock (juce::AudioBuffer<float>& buffer, MidiBuffer& midiBuffer) noexcept void processBlock (juce::AudioBuffer<float>& buffer, MidiBuffer& midiBuffer) noexcept
{ {
const ScopedLock sl (juceFilter->getCallbackLock()); const ScopedLock sl (juceFilter->getCallbackLock());
const ScopedPlayHead playhead { *this };
if (juceFilter->isSuspended()) if (juceFilter->isSuspended())
{ {

View file

@ -519,6 +519,7 @@ public:
{ {
midiMessages.clear(); midiMessages.clear();
lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max(); lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
lastTimeStamp.mFlags = 0;
} }
//============================================================================== //==============================================================================
@ -852,7 +853,6 @@ public:
midiMessages.ensureSize (2048); midiMessages.ensureSize (2048);
midiMessages.clear(); midiMessages.clear();
zeromem (&lastAudioHead, sizeof (lastAudioHead));
hostMusicalContextCallback = [getAudioUnit() musicalContextBlock]; hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
hostTransportStateCallback = [getAudioUnit() transportStateBlock]; hostTransportStateCallback = [getAudioUnit() transportStateBlock];
@ -1004,16 +1004,13 @@ public:
} }
//============================================================================== //==============================================================================
bool getCurrentPosition (CurrentPositionInfo& info) override Optional<PositionInfo> getPosition() const override
{ {
bool musicContextCallSucceeded = false; PositionInfo info;
bool transportStateCallSucceeded = false; info.setTimeInSamples ((int64) (lastTimeStamp.mSampleTime + 0.5));
info.setTimeInSeconds (*info.getTimeInSamples() / getAudioProcessor().getSampleRate());
info = lastAudioHead; info.setFrameRate ([this]
info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
info.frameRate = [this]
{ {
switch (lastTimeStamp.mSMPTETime.mType) switch (lastTimeStamp.mSMPTETime.mType)
{ {
@ -1033,7 +1030,7 @@ public:
} }
return FrameRate(); return FrameRate();
}(); }());
double num; double num;
NSInteger den; NSInteger den;
@ -1047,17 +1044,14 @@ public:
if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat)) if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
{ {
musicContextCallSucceeded = true; info.setTimeSignature (TimeSignature { (int) num, (int) den });
info.setPpqPositionOfLastBarStart (outCurrentMeasureDownBeat);
info.timeSigNumerator = (int) num; info.setBpm (bpm);
info.timeSigDenominator = (int) den; info.setPpqPosition (ppqPosition);
info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
info.bpm = bpm;
info.ppqPosition = ppqPosition;
} }
} }
double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0; double outCurrentSampleInTimeLine = 0, outCycleStartBeat = 0, outCycleEndBeat = 0;
AUHostTransportStateFlags flags; AUHostTransportStateFlags flags;
if (hostTransportStateCallback != nullptr) if (hostTransportStateCallback != nullptr)
@ -1066,22 +1060,16 @@ public:
if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat)) if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
{ {
transportStateCallSucceeded = true; info.setTimeInSamples ((int64) (outCurrentSampleInTimeLine + 0.5));
info.setTimeInSeconds (*info.getTimeInSamples() / getAudioProcessor().getSampleRate());
info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5); info.setIsPlaying ((flags & AUHostTransportStateMoving) != 0);
info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate(); info.setIsLooping ((flags & AUHostTransportStateCycling) != 0);
info.isPlaying = ((flags & AUHostTransportStateMoving) != 0); info.setIsRecording ((flags & AUHostTransportStateRecording) != 0);
info.isLooping = ((flags & AUHostTransportStateCycling) != 0); info.setLoopPoints (LoopPoints { outCycleStartBeat, outCycleEndBeat });
info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
info.ppqLoopStart = outCycleStartBeat;
info.ppqLoopEnd = outCycleEndBeat;
} }
} }
if (musicContextCallSucceeded && transportStateCallSucceeded) return info;
lastAudioHead = info;
return true;
} }
//============================================================================== //==============================================================================
@ -1556,15 +1544,12 @@ private:
if (timestamp != nullptr) if (timestamp != nullptr)
{ {
if ((timestamp->mFlags & kAudioTimeStampHostTimeValid) != 0) if ((timestamp->mFlags & kAudioTimeStampHostTimeValid) != 0)
{ getAudioProcessor().setHostTimeNanos (timeConversions.hostTimeToNanos (timestamp->mHostTime));
const auto convertedTime = timeConversions.hostTimeToNanos (timestamp->mHostTime);
getAudioProcessor().setHostTimeNanos (&convertedTime);
}
} }
struct AtEndOfScope struct AtEndOfScope
{ {
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); } ~AtEndOfScope() { proc.setHostTimeNanos (nullopt); }
AudioProcessor& proc; AudioProcessor& proc;
}; };
@ -1854,7 +1839,6 @@ private:
ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback; ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
AudioTimeStamp lastTimeStamp; AudioTimeStamp lastTimeStamp;
CurrentPositionInfo lastAudioHead;
String contextName; String contextName;

View file

@ -267,15 +267,9 @@ public:
PlayHead (LV2_URID_Map mapFeatureIn, double sampleRateIn) PlayHead (LV2_URID_Map mapFeatureIn, double sampleRateIn)
: parser (mapFeatureIn), sampleRate (sampleRateIn) : parser (mapFeatureIn), sampleRate (sampleRateIn)
{ {
info.frameRate = fpsUnknown;
info.isLooping = false;
info.isRecording = false;
info.ppqLoopEnd = 0;
info.ppqLoopStart = 0;
info.ppqPositionOfLastBarStart = 0;
} }
void invalidate() { valid = false; } void invalidate() { info = nullopt; }
void readNewInfo (const LV2_Atom_Event* event) void readNewInfo (const LV2_Atom_Event* event)
{ {
@ -289,6 +283,7 @@ public:
const LV2_Atom* atomFrame = nullptr; const LV2_Atom* atomFrame = nullptr;
const LV2_Atom* atomSpeed = nullptr; const LV2_Atom* atomSpeed = nullptr;
const LV2_Atom* atomBar = nullptr;
const LV2_Atom* atomBeat = nullptr; const LV2_Atom* atomBeat = nullptr;
const LV2_Atom* atomBeatUnit = nullptr; const LV2_Atom* atomBeatUnit = nullptr;
const LV2_Atom* atomBeatsPerBar = nullptr; const LV2_Atom* atomBeatsPerBar = nullptr;
@ -296,6 +291,7 @@ public:
LV2_Atom_Object_Query query[] { { mLV2_TIME__frame, &atomFrame }, LV2_Atom_Object_Query query[] { { mLV2_TIME__frame, &atomFrame },
{ mLV2_TIME__speed, &atomSpeed }, { mLV2_TIME__speed, &atomSpeed },
{ mLV2_TIME__bar, &atomBar },
{ mLV2_TIME__beat, &atomBeat }, { mLV2_TIME__beat, &atomBeat },
{ mLV2_TIME__beatUnit, &atomBeatUnit }, { mLV2_TIME__beatUnit, &atomBeatUnit },
{ mLV2_TIME__beatsPerBar, &atomBeatsPerBar }, { mLV2_TIME__beatsPerBar, &atomBeatsPerBar },
@ -304,37 +300,38 @@ public:
lv2_atom_object_query (object, query); lv2_atom_object_query (object, query);
const auto setTimeInFrames = [&] (int64_t value) info.emplace();
{
info.timeInSamples = value;
info.timeInSeconds = (double) info.timeInSamples / sampleRate;
};
// Carla always seems to give us an integral 'beat' even though I'd expect // Carla always seems to give us an integral 'beat' even though I'd expect
// it to be a floating-point value // it to be a floating-point value
if ( lv2_shared::withValue (parser.parseNumericAtom<float> (atomBeatsPerMinute), [&] (float value) { info.bpm = value; }) const auto numerator = parser.parseNumericAtom<float> (atomBeatsPerBar);
&& lv2_shared::withValue (parser.parseNumericAtom<float> (atomBeatsPerBar), [&] (float value) { info.timeSigNumerator = (int) value; }) const auto denominator = parser.parseNumericAtom<int32_t> (atomBeatUnit);
&& lv2_shared::withValue (parser.parseNumericAtom<int32_t> (atomBeatUnit), [&] (int32_t value) { info.timeSigDenominator = value; })
&& lv2_shared::withValue (parser.parseNumericAtom<double> (atomBeat), [&] (double value) { info.ppqPosition = value; }) if (numerator.hasValue() && denominator.hasValue())
&& lv2_shared::withValue (parser.parseNumericAtom<float> (atomSpeed), [&] (float value) { info.isPlaying = value != 0.0f; }) info->setTimeSignature (TimeSignature { (int) *numerator, (int) *denominator });
&& lv2_shared::withValue (parser.parseNumericAtom<int64_t> (atomFrame), setTimeInFrames))
info->setBpm (parser.parseNumericAtom<float> (atomBeatsPerMinute));
info->setPpqPosition (parser.parseNumericAtom<double> (atomBeat));
info->setIsPlaying (parser.parseNumericAtom<float> (atomSpeed).orFallback (0.0f) != 0.0f);
info->setBarCount (parser.parseNumericAtom<int64_t> (atomBar));
if (const auto parsed = parser.parseNumericAtom<int64_t> (atomFrame))
{ {
valid = true; info->setTimeInSamples (*parsed);
info->setTimeInSeconds ((double) *parsed / sampleRate);
} }
} }
bool getCurrentPosition (CurrentPositionInfo& result) override Optional<PositionInfo> getPosition() const override
{ {
result = info; return info;
return valid;
} }
private: private:
lv2_shared::NumericAtomParser parser; lv2_shared::NumericAtomParser parser;
CurrentPositionInfo info; Optional<PositionInfo> info;
double sampleRate; double sampleRate;
bool valid = false;
#define X(str) const LV2_URID m##str = parser.map (str); #define X(str) const LV2_URID m##str = parser.map (str);
X (LV2_ATOM__Blank) X (LV2_ATOM__Blank)
@ -346,6 +343,7 @@ private:
X (LV2_TIME__beatsPerMinute) X (LV2_TIME__beatsPerMinute)
X (LV2_TIME__frame) X (LV2_TIME__frame)
X (LV2_TIME__speed) X (LV2_TIME__speed)
X (LV2_TIME__bar)
#undef X #undef X
JUCE_LEAK_DETECTOR (PlayHead) JUCE_LEAK_DETECTOR (PlayHead)

View file

@ -400,7 +400,7 @@ public:
struct AtEndOfScope struct AtEndOfScope
{ {
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); } ~AtEndOfScope() { proc.setHostTimeNanos (nullopt); }
AudioProcessor& proc; AudioProcessor& proc;
}; };
@ -628,30 +628,19 @@ public:
} }
auto& info = currentPosition.emplace(); auto& info = currentPosition.emplace();
info.bpm = (ti->flags & Vst2::kVstTempoValid) != 0 ? ti->tempo : 0.0; info.setBpm ((ti->flags & Vst2::kVstTempoValid) != 0 ? makeOptional (ti->tempo) : nullopt);
if ((ti->flags & Vst2::kVstTimeSigValid) != 0) info.setTimeSignature ((ti->flags & Vst2::kVstTimeSigValid) != 0 ? makeOptional (TimeSignature { ti->timeSigNumerator, ti->timeSigDenominator })
: nullopt);
info.setTimeInSamples ((int64) (ti->samplePos + 0.5));
info.setTimeInSeconds (ti->samplePos / ti->sampleRate);
info.setPpqPosition ((ti->flags & Vst2::kVstPpqPosValid) != 0 ? makeOptional (ti->ppqPos) : nullopt);
info.setPpqPositionOfLastBarStart ((ti->flags & Vst2::kVstBarsValid) != 0 ? makeOptional (ti->barStartPos) : nullopt);
if ((ti->flags & Vst2::kVstSmpteValid) != 0)
{ {
info.timeSigNumerator = ti->timeSigNumerator; info.setFrameRate ([&]() -> Optional<FrameRate>
info.timeSigDenominator = ti->timeSigDenominator;
}
else
{
info.timeSigNumerator = 4;
info.timeSigDenominator = 4;
}
info.timeInSamples = (int64) (ti->samplePos + 0.5);
info.timeInSeconds = ti->samplePos / ti->sampleRate;
info.ppqPosition = (ti->flags & Vst2::kVstPpqPosValid) != 0 ? ti->ppqPos : 0.0;
info.ppqPositionOfLastBarStart = (ti->flags & Vst2::kVstBarsValid) != 0 ? ti->barStartPos : 0.0;
std::tie (info.frameRate, info.editOriginTime) = [ti]
{
if ((ti->flags & Vst2::kVstSmpteValid) == 0)
return std::make_tuple (FrameRate(), 0.0);
const auto rate = [&]
{ {
switch (ti->smpteFrameRate) switch (ti->smpteFrameRate)
{ {
@ -673,43 +662,27 @@ public:
case Vst2::kVstSmpteFilm35mm: return FrameRate().withBaseRate (24); case Vst2::kVstSmpteFilm35mm: return FrameRate().withBaseRate (24);
} }
return FrameRate(); return nullopt;
}(); }());
const auto effectiveRate = rate.getEffectiveRate(); const auto effectiveRate = info.getFrameRate().hasValue() ? info.getFrameRate()->getEffectiveRate() : 0.0;
return std::make_tuple (rate, effectiveRate != 0.0 ? ti->smpteOffset / (80.0 * effectiveRate) : 0.0); info.setEditOriginTime (effectiveRate != 0.0 ? makeOptional (ti->smpteOffset / (80.0 * effectiveRate)) : nullopt);
}();
info.isRecording = (ti->flags & Vst2::kVstTransportRecording) != 0;
info.isPlaying = (ti->flags & (Vst2::kVstTransportRecording | Vst2::kVstTransportPlaying)) != 0;
info.isLooping = (ti->flags & Vst2::kVstTransportCycleActive) != 0;
if ((ti->flags & Vst2::kVstCyclePosValid) != 0)
{
info.ppqLoopStart = ti->cycleStartPos;
info.ppqLoopEnd = ti->cycleEndPos;
}
else
{
info.ppqLoopStart = 0;
info.ppqLoopEnd = 0;
} }
if ((ti->flags & Vst2::kVstNanosValid) != 0) info.setIsRecording ((ti->flags & Vst2::kVstTransportRecording) != 0);
{ info.setIsPlaying ((ti->flags & (Vst2::kVstTransportRecording | Vst2::kVstTransportPlaying)) != 0);
const auto nanos = (uint64_t) ti->nanoSeconds; info.setIsLooping ((ti->flags & Vst2::kVstTransportCycleActive) != 0);
processor->setHostTimeNanos (&nanos);
} info.setLoopPoints ((ti->flags & Vst2::kVstCyclePosValid) != 0 ? makeOptional (LoopPoints { ti->cycleStartPos, ti->cycleEndPos })
: nullopt);
processor->setHostTimeNanos ((ti->flags & Vst2::kVstNanosValid) != 0 ? makeOptional ((uint64_t) ti->nanoSeconds) : nullopt);
} }
//============================================================================== //==============================================================================
bool getCurrentPosition (AudioPlayHead::CurrentPositionInfo& info) override Optional<PositionInfo> getPosition() const override
{ {
if (! currentPosition.hasValue()) return currentPosition;
return false;
info = *currentPosition;
return true;
} }
//============================================================================== //==============================================================================
@ -2149,7 +2122,7 @@ private:
Vst2::ERect editorRect; Vst2::ERect editorRect;
MidiBuffer midiEvents; MidiBuffer midiEvents;
VSTMidiEventList outgoingEvents; VSTMidiEventList outgoingEvents;
Optional<CurrentPositionInfo> currentPosition; Optional<PositionInfo> currentPosition;
LegacyAudioParametersWrapper juceParameters; LegacyAudioParametersWrapper juceParameters;

View file

@ -2868,34 +2868,46 @@ public:
Steinberg::int32 channel, Vst::UnitID& unitId) override { return comPluginInstance->getUnitByBus (type, dir, busIndex, channel, unitId); } Steinberg::int32 channel, Vst::UnitID& unitId) override { return comPluginInstance->getUnitByBus (type, dir, busIndex, channel, unitId); }
//============================================================================== //==============================================================================
bool getCurrentPosition (CurrentPositionInfo& info) override Optional<PositionInfo> getPosition() const override
{ {
info.timeInSamples = jmax ((juce::int64) 0, processContext.projectTimeSamples); PositionInfo info;
info.timeInSeconds = static_cast<double> (info.timeInSamples) / processContext.sampleRate; info.setTimeInSamples (jmax ((juce::int64) 0, processContext.projectTimeSamples));
info.bpm = jmax (1.0, processContext.tempo); info.setTimeInSeconds (static_cast<double> (*info.getTimeInSamples()) / processContext.sampleRate);
info.timeSigNumerator = jmax (1, (int) processContext.timeSigNumerator); info.setIsRecording ((processContext.state & Vst::ProcessContext::kRecording) != 0);
info.timeSigDenominator = jmax (1, (int) processContext.timeSigDenominator); info.setIsPlaying ((processContext.state & Vst::ProcessContext::kPlaying) != 0);
info.ppqPositionOfLastBarStart = processContext.barPositionMusic; info.setIsLooping ((processContext.state & Vst::ProcessContext::kCycleActive) != 0);
info.ppqPosition = processContext.projectTimeMusic;
info.ppqLoopStart = processContext.cycleStartMusic;
info.ppqLoopEnd = processContext.cycleEndMusic;
info.isRecording = (processContext.state & Vst::ProcessContext::kRecording) != 0;
info.isPlaying = (processContext.state & Vst::ProcessContext::kPlaying) != 0;
info.isLooping = (processContext.state & Vst::ProcessContext::kCycleActive) != 0;
info.frameRate = [&] info.setBpm ((processContext.state & Vst::ProcessContext::kTempoValid) != 0
{ ? makeOptional (processContext.tempo)
if ((processContext.state & Vst::ProcessContext::kSmpteValid) == 0) : nullopt);
return FrameRate();
return FrameRate().withBaseRate ((int) processContext.frameRate.framesPerSecond) info.setTimeSignature ((processContext.state & Vst::ProcessContext::kTimeSigValid) != 0
? makeOptional (TimeSignature { processContext.timeSigNumerator, processContext.timeSigDenominator })
: nullopt);
info.setLoopPoints ((processContext.state & Vst::ProcessContext::kCycleValid) != 0
? makeOptional (LoopPoints { processContext.cycleStartMusic, processContext.cycleEndMusic })
: nullopt);
info.setPpqPosition ((processContext.state & Vst::ProcessContext::kProjectTimeMusicValid) != 0
? makeOptional (processContext.projectTimeMusic)
: nullopt);
info.setPpqPositionOfLastBarStart ((processContext.state & Vst::ProcessContext::kBarPositionValid) != 0
? makeOptional (processContext.barPositionMusic)
: nullopt);
info.setFrameRate ((processContext.state & Vst::ProcessContext::kSmpteValid) != 0
? makeOptional (FrameRate().withBaseRate ((int) processContext.frameRate.framesPerSecond)
.withDrop ((processContext.frameRate.flags & Vst::FrameRate::kDropRate) != 0) .withDrop ((processContext.frameRate.flags & Vst::FrameRate::kDropRate) != 0)
.withPullDown ((processContext.frameRate.flags & Vst::FrameRate::kPullDownRate) != 0); .withPullDown ((processContext.frameRate.flags & Vst::FrameRate::kPullDownRate) != 0))
}(); : nullopt);
info.editOriginTime = (double) processContext.smpteOffsetSubframes / (80.0 * info.frameRate.getEffectiveRate()); info.setEditOriginTime (info.getFrameRate().hasValue()
? makeOptional ((double) processContext.smpteOffsetSubframes / (80.0 * info.getFrameRate()->getEffectiveRate()))
: nullopt);
return true; return info;
} }
//============================================================================== //==============================================================================
@ -3335,10 +3347,7 @@ public:
processContext = *data.processContext; processContext = *data.processContext;
if ((processContext.state & Vst::ProcessContext::kSystemTimeValid) != 0) if ((processContext.state & Vst::ProcessContext::kSystemTimeValid) != 0)
{ getPluginInstance().setHostTimeNanos ((uint64_t) processContext.systemTime);
const auto timestamp = (uint64_t) processContext.systemTime;
getPluginInstance().setHostTimeNanos (&timestamp);
}
if (juceVST3EditController != nullptr) if (juceVST3EditController != nullptr)
juceVST3EditController->vst3IsPlaying = (processContext.state & Vst::ProcessContext::kPlaying) != 0; juceVST3EditController->vst3IsPlaying = (processContext.state & Vst::ProcessContext::kPlaying) != 0;
@ -3353,7 +3362,7 @@ public:
struct AtEndOfScope struct AtEndOfScope
{ {
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); } ~AtEndOfScope() { proc.setHostTimeNanos (nullopt); }
AudioProcessor& proc; AudioProcessor& proc;
}; };

View file

@ -1385,7 +1385,7 @@ public:
void processAudio (AudioBuffer<float>& buffer, MidiBuffer& midiMessages, bool processBlockBypassedCalled) void processAudio (AudioBuffer<float>& buffer, MidiBuffer& midiMessages, bool processBlockBypassedCalled)
{ {
if (const auto* hostTimeNs = getHostTimeNs()) if (const auto hostTimeNs = getHostTimeNs())
{ {
timeStamp.mHostTime = *hostTimeNs; timeStamp.mHostTime = *hostTimeNs;
timeStamp.mFlags |= kAudioTimeStampHostTimeValid; timeStamp.mFlags |= kAudioTimeStampHostTimeValid;
@ -2298,12 +2298,10 @@ private:
{ {
if (auto* ph = getPlayHead()) if (auto* ph = getPlayHead())
{ {
AudioPlayHead::CurrentPositionInfo result; if (const auto pos = ph->getPosition())
if (ph->getCurrentPosition (result))
{ {
setIfNotNull (outCurrentBeat, result.ppqPosition); setIfNotNull (outCurrentBeat, pos->getPpqPosition().orFallback (0.0));
setIfNotNull (outCurrentTempo, result.bpm); setIfNotNull (outCurrentTempo, pos->getBpm().orFallback (0.0));
return noErr; return noErr;
} }
} }
@ -2318,14 +2316,13 @@ private:
{ {
if (auto* ph = getPlayHead()) if (auto* ph = getPlayHead())
{ {
AudioPlayHead::CurrentPositionInfo result; if (const auto pos = ph->getPosition())
if (ph->getCurrentPosition (result))
{ {
const auto signature = pos->getTimeSignature().orFallback (AudioPlayHead::TimeSignature{});
setIfNotNull (outDeltaSampleOffsetToNextBeat, (UInt32) 0); //xxx setIfNotNull (outDeltaSampleOffsetToNextBeat, (UInt32) 0); //xxx
setIfNotNull (outTimeSig_Numerator, (UInt32) result.timeSigNumerator); setIfNotNull (outTimeSig_Numerator, (UInt32) signature.numerator);
setIfNotNull (outTimeSig_Denominator, (UInt32) result.timeSigDenominator); setIfNotNull (outTimeSig_Denominator, (UInt32) signature.denominator);
setIfNotNull (outCurrentMeasureDownBeat, result.ppqPositionOfLastBarStart); //xxx wrong setIfNotNull (outCurrentMeasureDownBeat, pos->getPpqPositionOfLastBarStart().orFallback (0.0)); //xxx wrong
return noErr; return noErr;
} }
} }

View file

@ -134,16 +134,6 @@ struct ObjectTraits { static constexpr auto construct = lv2_atom_forge_object;
using SequenceFrame = ScopedFrame<SequenceTraits>; using SequenceFrame = ScopedFrame<SequenceTraits>;
using ObjectFrame = ScopedFrame<ObjectTraits>; using ObjectFrame = ScopedFrame<ObjectTraits>;
template <typename Value, typename Callback>
bool withValue (const Optional<Value>& opt, Callback&& callback)
{
if (! opt.hasValue())
return false;
callback (*opt);
return true;
}
struct NumericAtomParser struct NumericAtomParser
{ {
explicit NumericAtomParser (LV2_URID_Map mapFeatureIn) explicit NumericAtomParser (LV2_URID_Map mapFeatureIn)

View file

@ -4767,9 +4767,9 @@ private:
return; return;
// Write timing info to the control port // Write timing info to the control port
AudioPlayHead::CurrentPositionInfo info; const auto info = playhead->getPosition();
if (! playhead->getCurrentPosition (info)) if (! info.hasValue())
return; return;
const auto& urids = instance->urids; const auto& urids = instance->urids;
@ -4778,29 +4778,47 @@ private:
lv2_shared::ObjectFrame object { forge, (uint32_t) 0, urids.mLV2_TIME__Position }; lv2_shared::ObjectFrame object { forge, (uint32_t) 0, urids.mLV2_TIME__Position };
lv2_atom_forge_key (forge, urids.mLV2_TIME__frame);
lv2_atom_forge_long (forge, info.timeInSamples);
lv2_atom_forge_key (forge, urids.mLV2_TIME__bar);
lv2_atom_forge_long (forge, (info.ppqPosition * info.timeSigDenominator) / (4 * info.timeSigNumerator));
lv2_atom_forge_key (forge, urids.mLV2_TIME__speed); lv2_atom_forge_key (forge, urids.mLV2_TIME__speed);
lv2_atom_forge_float (forge, info.isPlaying ? 1.0f : 0.0f); lv2_atom_forge_float (forge, info->getIsPlaying() ? 1.0f : 0.0f);
if (const auto samples = info->getTimeInSamples())
{
lv2_atom_forge_key (forge, urids.mLV2_TIME__frame);
lv2_atom_forge_long (forge, *samples);
}
if (const auto bar = info->getBarCount())
{
lv2_atom_forge_key (forge, urids.mLV2_TIME__bar);
lv2_atom_forge_long (forge, *bar);
}
if (const auto beat = info->getPpqPosition())
{
if (const auto barStart = info->getPpqPositionOfLastBarStart())
{
lv2_atom_forge_key (forge, urids.mLV2_TIME__barBeat); lv2_atom_forge_key (forge, urids.mLV2_TIME__barBeat);
lv2_atom_forge_float (forge, (float) (info.ppqPosition - info.ppqPositionOfLastBarStart)); lv2_atom_forge_float (forge, (float) (*beat - *barStart));
}
lv2_atom_forge_key (forge, urids.mLV2_TIME__beat); lv2_atom_forge_key (forge, urids.mLV2_TIME__beat);
lv2_atom_forge_double (forge, info.ppqPosition); lv2_atom_forge_double (forge, *beat);
}
if (const auto sig = info->getTimeSignature())
{
lv2_atom_forge_key (forge, urids.mLV2_TIME__beatUnit); lv2_atom_forge_key (forge, urids.mLV2_TIME__beatUnit);
lv2_atom_forge_int (forge, info.timeSigDenominator); lv2_atom_forge_int (forge, sig->denominator);
lv2_atom_forge_key (forge, urids.mLV2_TIME__beatsPerBar); lv2_atom_forge_key (forge, urids.mLV2_TIME__beatsPerBar);
lv2_atom_forge_float (forge, (float) info.timeSigNumerator); lv2_atom_forge_float (forge, (float) sig->numerator);
}
if (const auto bpm = info->getBpm())
{
lv2_atom_forge_key (forge, urids.mLV2_TIME__beatsPerMinute); lv2_atom_forge_key (forge, urids.mLV2_TIME__beatsPerMinute);
lv2_atom_forge_float (forge, (float) info.bpm); lv2_atom_forge_float (forge, (float) *bpm);
}
} }
void preparePortsForRun (AudioBuffer<float>& audio, MidiBuffer& midiBuffer) void preparePortsForRun (AudioBuffer<float>& audio, MidiBuffer& midiBuffer)

View file

@ -262,7 +262,7 @@ static void setStateForAllBusesOfType (Vst::IComponent* component,
static void toProcessContext (Vst::ProcessContext& context, static void toProcessContext (Vst::ProcessContext& context,
AudioPlayHead* playHead, AudioPlayHead* playHead,
double sampleRate, double sampleRate,
const uint64_t* hostTimeNs) Optional<uint64_t> hostTimeNs)
{ {
jassert (sampleRate > 0.0); //Must always be valid, as stated by the VST3 SDK jassert (sampleRate > 0.0); //Must always be valid, as stated by the VST3 SDK
@ -270,55 +270,67 @@ static void toProcessContext (Vst::ProcessContext& context,
zerostruct (context); zerostruct (context);
context.sampleRate = sampleRate; context.sampleRate = sampleRate;
auto& fr = context.frameRate;
if (playHead != nullptr) const auto position = playHead != nullptr ? playHead->getPosition()
: nullopt;
if (position.hasValue())
{ {
AudioPlayHead::CurrentPositionInfo position; if (const auto timeInSamples = position->getTimeInSamples())
playHead->getCurrentPosition (position); context.projectTimeSamples = *timeInSamples;
context.projectTimeSamples = position.timeInSamples; // Must always be valid, as stated by the VST3 SDK
context.projectTimeMusic = position.ppqPosition; // Does not always need to be valid...
context.tempo = position.bpm;
context.timeSigNumerator = position.timeSigNumerator;
context.timeSigDenominator = position.timeSigDenominator;
context.barPositionMusic = position.ppqPositionOfLastBarStart;
context.cycleStartMusic = position.ppqLoopStart;
context.cycleEndMusic = position.ppqLoopEnd;
context.frameRate.framesPerSecond = (Steinberg::uint32) position.frameRate.getBaseRate();
context.frameRate.flags = (Steinberg::uint32) ((position.frameRate.isDrop() ? FrameRate::kDropRate : 0)
| (position.frameRate.isPullDown() ? FrameRate::kPullDownRate : 0));
if (position.isPlaying) context.state |= ProcessContext::kPlaying;
if (position.isRecording) context.state |= ProcessContext::kRecording;
if (position.isLooping) context.state |= ProcessContext::kCycleActive;
}
else else
jassertfalse; // The time in samples *must* be valid.
if (const auto tempo = position->getBpm())
{ {
context.tempo = 120.0; context.state |= ProcessContext::kTempoValid;
context.timeSigNumerator = 4; context.tempo = *tempo;
context.timeSigDenominator = 4;
fr.framesPerSecond = 30;
fr.flags = 0;
} }
if (context.projectTimeMusic >= 0.0) context.state |= ProcessContext::kProjectTimeMusicValid; if (const auto loop = position->getLoopPoints())
if (context.barPositionMusic >= 0.0) context.state |= ProcessContext::kBarPositionValid;
if (context.tempo > 0.0) context.state |= ProcessContext::kTempoValid;
if (context.frameRate.framesPerSecond > 0) context.state |= ProcessContext::kSmpteValid;
if (context.cycleStartMusic >= 0.0
&& context.cycleEndMusic > 0.0
&& context.cycleEndMusic > context.cycleStartMusic)
{ {
context.state |= ProcessContext::kCycleValid; context.state |= ProcessContext::kCycleValid;
context.cycleStartMusic = loop->ppqStart;
context.cycleEndMusic = loop->ppqEnd;
} }
if (context.timeSigNumerator > 0 && context.timeSigDenominator > 0) if (const auto sig = position->getTimeSignature())
{
context.state |= ProcessContext::kTimeSigValid; context.state |= ProcessContext::kTimeSigValid;
context.timeSigNumerator = sig->numerator;
context.timeSigDenominator = sig->denominator;
}
if (hostTimeNs != nullptr) if (const auto pos = position->getPpqPosition())
{
context.state |= ProcessContext::kProjectTimeMusicValid;
context.projectTimeMusic = *pos;
}
if (const auto barStart = position->getPpqPositionOfLastBarStart())
{
context.state |= ProcessContext::kBarPositionValid;
context.barPositionMusic = *barStart;
}
if (const auto frameRate = position->getFrameRate())
{
if (const auto offset = position->getEditOriginTime())
{
context.state |= ProcessContext::kSmpteValid;
context.smpteOffsetSubframes = (Steinberg::int32) (80.0 * *offset * frameRate->getEffectiveRate());
context.frameRate.framesPerSecond = (Steinberg::uint32) frameRate->getBaseRate();
context.frameRate.flags = (Steinberg::uint32) ((frameRate->isDrop() ? FrameRate::kDropRate : 0)
| (frameRate->isPullDown() ? FrameRate::kPullDownRate : 0));
}
}
if (position->getIsPlaying()) context.state |= ProcessContext::kPlaying;
if (position->getIsRecording()) context.state |= ProcessContext::kRecording;
if (position->getIsLooping()) context.state |= ProcessContext::kCycleActive;
}
if (hostTimeNs.hasValue())
{ {
context.systemTime = (int64_t) *hostTimeNs; context.systemTime = (int64_t) *hostTimeNs;
jassert (context.systemTime >= 0); jassert (context.systemTime >= 0);

View file

@ -2286,6 +2286,20 @@ private:
return { nullptr, nullptr }; return { nullptr, nullptr };
} }
template <typename Member, typename Value>
void setFromOptional (Member& target, Optional<Value> opt, int32_t flag)
{
if (opt.hasValue())
{
target = static_cast<Member> (*opt);
vstHostTime.flags |= flag;
}
else
{
vstHostTime.flags &= ~flag;
}
}
//============================================================================== //==============================================================================
template <typename FloatType> template <typename FloatType>
void processAudio (AudioBuffer<FloatType>& buffer, MidiBuffer& midiMessages, void processAudio (AudioBuffer<FloatType>& buffer, MidiBuffer& midiMessages,
@ -2311,34 +2325,32 @@ private:
{ {
if (auto* currentPlayHead = getPlayHead()) if (auto* currentPlayHead = getPlayHead())
{ {
AudioPlayHead::CurrentPositionInfo position; if (const auto position = currentPlayHead->getPosition())
if (currentPlayHead->getCurrentPosition (position))
{ {
vstHostTime.samplePos = (double) position.timeInSamples; if (const auto samplePos = position->getTimeInSamples())
vstHostTime.tempo = position.bpm; vstHostTime.samplePos = (double) *samplePos;
vstHostTime.timeSigNumerator = position.timeSigNumerator; else
vstHostTime.timeSigDenominator = position.timeSigDenominator; jassertfalse; // VST hosts *must* call setTimeInSamples on the audio playhead
vstHostTime.ppqPos = position.ppqPosition;
vstHostTime.barStartPos = position.ppqPositionOfLastBarStart;
vstHostTime.flags |= Vst2::kVstTempoValid
| Vst2::kVstTimeSigValid
| Vst2::kVstPpqPosValid
| Vst2::kVstBarsValid;
if (const auto* hostTimeNs = getHostTimeNs()) if (auto sig = position->getTimeSignature())
{ {
vstHostTime.nanoSeconds = (double) *hostTimeNs; vstHostTime.flags |= Vst2::kVstTimeSigValid;
vstHostTime.flags |= Vst2::kVstNanosValid; vstHostTime.timeSigNumerator = sig->numerator;
vstHostTime.timeSigDenominator = sig->denominator;
} }
else else
{ {
vstHostTime.flags &= ~Vst2::kVstNanosValid; vstHostTime.flags &= ~Vst2::kVstTimeSigValid;
} }
setFromOptional (vstHostTime.ppqPos, position->getPpqPosition(), Vst2::kVstPpqPosValid);
setFromOptional (vstHostTime.barStartPos, position->getPpqPositionOfLastBarStart(), Vst2::kVstBarsValid);
setFromOptional (vstHostTime.nanoSeconds, getHostTimeNs(), Vst2::kVstNanosValid);
setFromOptional (vstHostTime.tempo, position->getBpm(), Vst2::kVstTempoValid);
int32 newTransportFlags = 0; int32 newTransportFlags = 0;
if (position.isPlaying) newTransportFlags |= Vst2::kVstTransportPlaying; if (position->getIsPlaying()) newTransportFlags |= Vst2::kVstTransportPlaying;
if (position.isRecording) newTransportFlags |= Vst2::kVstTransportRecording; if (position->getIsRecording()) newTransportFlags |= Vst2::kVstTransportRecording;
if (newTransportFlags != (vstHostTime.flags & (Vst2::kVstTransportPlaying if (newTransportFlags != (vstHostTime.flags & (Vst2::kVstTransportPlaying
| Vst2::kVstTransportRecording))) | Vst2::kVstTransportRecording)))
@ -2346,15 +2358,18 @@ private:
else else
vstHostTime.flags &= ~Vst2::kVstTransportChanged; vstHostTime.flags &= ~Vst2::kVstTransportChanged;
const auto optionalFrameRate = [&fr = position.frameRate]() -> Optional<Vst2::VstInt32> const auto optionalFrameRate = [fr = position->getFrameRate()]() -> Optional<Vst2::VstInt32>
{ {
switch (fr.getBaseRate()) if (! fr.hasValue())
return {};
switch (fr->getBaseRate())
{ {
case 24: return fr.isPullDown() ? Vst2::kVstSmpte239fps : Vst2::kVstSmpte24fps; case 24: return fr->isPullDown() ? Vst2::kVstSmpte239fps : Vst2::kVstSmpte24fps;
case 25: return fr.isPullDown() ? Vst2::kVstSmpte249fps : Vst2::kVstSmpte25fps; case 25: return fr->isPullDown() ? Vst2::kVstSmpte249fps : Vst2::kVstSmpte25fps;
case 30: return fr.isPullDown() ? (fr.isDrop() ? Vst2::kVstSmpte2997dfps : Vst2::kVstSmpte2997fps) case 30: return fr->isPullDown() ? (fr->isDrop() ? Vst2::kVstSmpte2997dfps : Vst2::kVstSmpte2997fps)
: (fr.isDrop() ? Vst2::kVstSmpte30dfps : Vst2::kVstSmpte30fps); : (fr->isDrop() ? Vst2::kVstSmpte30dfps : Vst2::kVstSmpte30fps);
case 60: return fr.isPullDown() ? Vst2::kVstSmpte599fps : Vst2::kVstSmpte60fps; case 60: return fr->isPullDown() ? Vst2::kVstSmpte599fps : Vst2::kVstSmpte60fps;
} }
return {}; return {};
@ -2362,18 +2377,24 @@ private:
vstHostTime.flags |= optionalFrameRate ? Vst2::kVstSmpteValid : 0; vstHostTime.flags |= optionalFrameRate ? Vst2::kVstSmpteValid : 0;
vstHostTime.smpteFrameRate = optionalFrameRate.orFallback (Vst2::VstSmpteFrameRate{}); vstHostTime.smpteFrameRate = optionalFrameRate.orFallback (Vst2::VstSmpteFrameRate{});
vstHostTime.smpteOffset = (int32) (position.timeInSeconds * 80.0 * position.frameRate.getEffectiveRate() + 0.5); const auto effectiveRate = position->getFrameRate().hasValue() ? position->getFrameRate()->getEffectiveRate() : 0.0;
vstHostTime.smpteOffset = (int32) (position->getTimeInSeconds().orFallback (0.0) * 80.0 * effectiveRate + 0.5);
if (position.isLooping) if (const auto loop = position->getLoopPoints())
{ {
vstHostTime.cycleStartPos = position.ppqLoopStart; vstHostTime.flags |= Vst2::kVstCyclePosValid;
vstHostTime.cycleEndPos = position.ppqLoopEnd; vstHostTime.cycleStartPos = loop->ppqStart;
vstHostTime.flags |= (Vst2::kVstCyclePosValid | Vst2::kVstTransportCycleActive); vstHostTime.cycleEndPos = loop->ppqEnd;
} }
else else
{ {
vstHostTime.flags &= ~(Vst2::kVstCyclePosValid | Vst2::kVstTransportCycleActive); vstHostTime.flags &= ~Vst2::kVstCyclePosValid;
} }
if (position->getIsLooping())
vstHostTime.flags |= Vst2::kVstTransportCycleActive;
else
vstHostTime.flags &= ~Vst2::kVstTransportCycleActive;
} }
} }

View file

@ -1171,11 +1171,7 @@ public:
processor.setHostTimeNanos (nullptr); // Clear host time processor.setHostTimeNanos (nullptr); // Clear host time
@endcode @endcode
*/ */
void setHostTimeNanos (const uint64_t* hostTimeIn) void setHostTimeNanos (Optional<uint64_t> hostTimeIn) { hostTime = hostTimeIn; }
{
hasHostTime = hostTimeIn != nullptr;
hostTime = hasHostTime ? *hostTimeIn : 0;
}
/** The plugin may call this function inside the processBlock function (and only there!) /** The plugin may call this function inside the processBlock function (and only there!)
to find the timestamp associated with the current audio block. to find the timestamp associated with the current audio block.
@ -1196,7 +1192,7 @@ public:
} }
@endcode @endcode
*/ */
const uint64_t* getHostTimeNs() const { return hasHostTime ? &hostTime : nullptr; } Optional<uint64_t> getHostTimeNs() const { return hostTime; }
//============================================================================== //==============================================================================
/** This is called by the processor to specify its details before being played. Use this /** This is called by the processor to specify its details before being played. Use this
@ -1552,8 +1548,7 @@ private:
AudioProcessorParameterGroup parameterTree; AudioProcessorParameterGroup parameterTree;
Array<AudioProcessorParameter*> flatParameterList; Array<AudioProcessorParameter*> flatParameterList;
uint64_t hostTime = 0; Optional<uint64_t> hostTime;
bool hasHostTime = false;
AudioProcessorParameter* getParamChecked (int) const; AudioProcessorParameter* getParamChecked (int) const;

View file

@ -267,7 +267,7 @@ private:
void perform (const Context& c) override void perform (const Context& c) override
{ {
processor.setPlayHead (c.audioPlayHead); processor.setPlayHead (c.audioPlayHead);
processor.setHostTimeNanos (c.hostTimeNs.hasValue() ? &(*c.hostTimeNs) : nullptr); processor.setHostTimeNanos (c.hostTimeNs);
for (int i = 0; i < totalChans; ++i) for (int i = 0; i < totalChans; ++i)
audioChannels[i] = c.audioBuffers[audioChannelsToUse.getUnchecked (i)]; audioChannels[i] = c.audioBuffers[audioChannelsToUse.getUnchecked (i)];
@ -290,7 +290,7 @@ private:
else else
callProcess (buffer, c.midiBuffers[midiBufferToUse]); callProcess (buffer, c.midiBuffers[midiBufferToUse]);
processor.setHostTimeNanos (nullptr); processor.setHostTimeNanos (nullopt);
} }
void callProcess (AudioBuffer<float>& buffer, MidiBuffer& midiMessages) void callProcess (AudioBuffer<float>& buffer, MidiBuffer& midiMessages)
@ -1402,14 +1402,6 @@ static void processBlockForBuffer (AudioBuffer<FloatType>& buffer, MidiBuffer& m
std::unique_ptr<SequenceType>& renderSequence, std::unique_ptr<SequenceType>& renderSequence,
std::atomic<bool>& isPrepared) std::atomic<bool>& isPrepared)
{ {
const auto getHostTime = [&]() -> Optional<uint64_t>
{
if (auto* nanos = graph.getHostTimeNs())
return *nanos;
return nullopt;
};
if (graph.isNonRealtime()) if (graph.isNonRealtime())
{ {
while (! isPrepared) while (! isPrepared)
@ -1418,7 +1410,7 @@ static void processBlockForBuffer (AudioBuffer<FloatType>& buffer, MidiBuffer& m
const ScopedLock sl (graph.getCallbackLock()); const ScopedLock sl (graph.getCallbackLock());
if (renderSequence != nullptr) if (renderSequence != nullptr)
renderSequence->perform (buffer, midiMessages, graph.getPlayHead(), getHostTime()); renderSequence->perform (buffer, midiMessages, graph.getPlayHead(), graph.getHostTimeNs());
} }
else else
{ {
@ -1427,7 +1419,7 @@ static void processBlockForBuffer (AudioBuffer<FloatType>& buffer, MidiBuffer& m
if (isPrepared) if (isPrepared)
{ {
if (renderSequence != nullptr) if (renderSequence != nullptr)
renderSequence->perform (buffer, midiMessages, graph.getPlayHead(), getHostTime()); renderSequence->perform (buffer, midiMessages, graph.getPlayHead(), graph.getHostTimeNs());
} }
else else
{ {

View file

@ -30,7 +30,7 @@ namespace juce
bool ARARenderer::processBlock (AudioBuffer<double>& buffer, bool ARARenderer::processBlock (AudioBuffer<double>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept const AudioPlayHead::PositionInfo& positionInfo) noexcept
{ {
ignoreUnused (buffer, realtime, positionInfo); ignoreUnused (buffer, realtime, positionInfo);

View file

@ -88,7 +88,7 @@ public:
*/ */
virtual bool processBlock (AudioBuffer<float>& buffer, virtual bool processBlock (AudioBuffer<float>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept = 0; const AudioPlayHead::PositionInfo& positionInfo) noexcept = 0;
/** Renders the output into the given buffer. Returns true if rendering executed without error, /** Renders the output into the given buffer. Returns true if rendering executed without error,
false otherwise. false otherwise.
@ -108,7 +108,7 @@ public:
*/ */
virtual bool processBlock (AudioBuffer<double>& buffer, virtual bool processBlock (AudioBuffer<double>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept; const AudioPlayHead::PositionInfo& positionInfo) noexcept;
}; };
//============================================================================== //==============================================================================
@ -128,7 +128,7 @@ public:
bool processBlock (AudioBuffer<float>& buffer, bool processBlock (AudioBuffer<float>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept override const AudioPlayHead::PositionInfo& positionInfo) noexcept override
{ {
ignoreUnused (buffer, realtime, positionInfo); ignoreUnused (buffer, realtime, positionInfo);
return false; return false;
@ -189,7 +189,7 @@ public:
// isNonRealtime of the process context - typically preview is limited to realtime. // isNonRealtime of the process context - typically preview is limited to realtime.
bool processBlock (AudioBuffer<float>& buffer, bool processBlock (AudioBuffer<float>& buffer,
AudioProcessor::Realtime isNonRealtime, AudioProcessor::Realtime isNonRealtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) noexcept override const AudioPlayHead::PositionInfo& positionInfo) noexcept override
{ {
ignoreUnused (buffer, isNonRealtime, positionInfo); ignoreUnused (buffer, isNonRealtime, positionInfo);
return true; return true;

View file

@ -84,7 +84,7 @@ bool AudioProcessorARAExtension::releaseResourcesForARA()
bool AudioProcessorARAExtension::processBlockForARA (AudioBuffer<float>& buffer, bool AudioProcessorARAExtension::processBlockForARA (AudioBuffer<float>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo) const AudioPlayHead::PositionInfo& positionInfo)
{ {
// validate that the host has prepared us before processing // validate that the host has prepared us before processing
ARA_VALIDATE_API_STATE (isPrepared); ARA_VALIDATE_API_STATE (isPrepared);
@ -109,12 +109,10 @@ bool AudioProcessorARAExtension::processBlockForARA (AudioBuffer<float>& buffer,
juce::AudioProcessor::Realtime realtime, juce::AudioProcessor::Realtime realtime,
AudioPlayHead* playhead) AudioPlayHead* playhead)
{ {
AudioPlayHead::CurrentPositionInfo positionInfo; return processBlockForARA (buffer,
realtime,
if (! isBoundToARA() || ! playhead || ! playhead->getCurrentPosition (positionInfo)) playhead != nullptr ? playhead->getPosition().orFallback (AudioPlayHead::PositionInfo{})
positionInfo.resetToDefault(); : AudioPlayHead::PositionInfo{});
return processBlockForARA (buffer, realtime, positionInfo);
} }
//============================================================================== //==============================================================================

View file

@ -144,7 +144,7 @@ protected:
*/ */
bool processBlockForARA (AudioBuffer<float>& buffer, bool processBlockForARA (AudioBuffer<float>& buffer,
AudioProcessor::Realtime realtime, AudioProcessor::Realtime realtime,
const AudioPlayHead::CurrentPositionInfo& positionInfo); const AudioPlayHead::PositionInfo& positionInfo);
/** Implementation helper for AudioProcessor::processBlock(). /** Implementation helper for AudioProcessor::processBlock().

View file

@ -267,11 +267,11 @@ void AudioProcessorPlayer::audioDeviceIOCallbackWithContext (const float** const
const ScopedLock sl2 (processor->getCallbackLock()); const ScopedLock sl2 (processor->getCallbackLock());
processor->setHostTimeNanos (context.hostTimeNs); processor->setHostTimeNanos (context.hostTimeNs != nullptr ? makeOptional (*context.hostTimeNs) : nullopt);
struct AtEndOfScope struct AtEndOfScope
{ {
~AtEndOfScope() { proc.setHostTimeNanos (nullptr); } ~AtEndOfScope() { proc.setHostTimeNanos (nullopt); }
AudioProcessor& proc; AudioProcessor& proc;
}; };

View file

@ -502,7 +502,7 @@ public:
bool operator!= (const void* ptr) const { return ((const void*) block != ptr); } bool operator!= (const void* ptr) const { return ((const void*) block != ptr); }
~ObjCBlock() { if (block != nullptr) [block release]; } ~ObjCBlock() { if (block != nullptr) [block release]; }
operator BlockType() { return block; } operator BlockType() const { return block; }
private: private:
BlockType block; BlockType block;