mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-01-15 00:24:19 +00:00
Fix for AU bundle paths; added column width access to TableHeaderComponent, made FileBasedDocument do extra checking about file overwriting
This commit is contained in:
parent
1c01e897d9
commit
7e56a7514c
7 changed files with 345 additions and 352 deletions
|
|
@ -237,7 +237,10 @@ public:
|
|||
const ScopedAutoReleasePool pool;
|
||||
|
||||
AudioUnitCocoaViewInfo* info = (AudioUnitCocoaViewInfo*) outData;
|
||||
NSBundle* b = [NSBundle bundleForClass: [JuceUICreationClass class]];
|
||||
|
||||
const File bundleFile (File::getSpecialLocation (File::currentApplicationFile));
|
||||
NSString* bundlePath = [NSString stringWithUTF8String: (const char*) bundleFile.getFullPathName().toUTF8()];
|
||||
NSBundle* b = [NSBundle bundleWithPath: bundlePath];
|
||||
|
||||
info->mCocoaAUViewClass[0] = (CFStringRef) [[[JuceUICreationClass class] className] retain];
|
||||
info->mCocoaAUViewBundleLocation = (CFURLRef) [[NSURL fileURLWithPath: [b bundlePath]] retain];
|
||||
|
|
|
|||
|
|
@ -17710,8 +17710,24 @@ FileBasedDocument::SaveResult FileBasedDocument::saveAsInteractive (const bool w
|
|||
|
||||
File chosen (fc.getResult());
|
||||
if (chosen.getFileExtension().isEmpty())
|
||||
{
|
||||
chosen = chosen.withFileExtension (fileExtension);
|
||||
|
||||
if (chosen.exists())
|
||||
{
|
||||
if (! AlertWindow::showOkCancelBox (AlertWindow::WarningIcon,
|
||||
TRANS("File already exists"),
|
||||
TRANS("There's already a file called:\n\n")
|
||||
+ chosen.getFullPathName()
|
||||
+ T("\n\nAre you sure you want to overwrite it?"),
|
||||
TRANS("overwrite"),
|
||||
TRANS("cancel")))
|
||||
{
|
||||
return userCancelledSave;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return saveAs (chosen, false, false, true);
|
||||
}
|
||||
|
||||
|
|
@ -46777,6 +46793,12 @@ void TableHeaderComponent::moveColumn (const int columnId, int newIndex)
|
|||
}
|
||||
}
|
||||
|
||||
int TableHeaderComponent::getColumnWidth (const int columnId) const throw()
|
||||
{
|
||||
const ColumnInfo* const ci = getInfoForId (columnId);
|
||||
return ci != 0 ? ci->width : 0;
|
||||
}
|
||||
|
||||
void TableHeaderComponent::setColumnWidth (const int columnId, const int newWidth)
|
||||
{
|
||||
ColumnInfo* const ci = getInfoForId (columnId);
|
||||
|
|
@ -261142,357 +261164,6 @@ bool WebBrowserComponent::pageAboutToLoad (const String& url)
|
|||
// compiled on its own).
|
||||
#ifdef JUCE_INCLUDED_FILE
|
||||
|
||||
@interface UIKitAUIOHost : UIViewController
|
||||
{
|
||||
@public
|
||||
/** READONLY The audio format of the data stream. */
|
||||
AudioStreamBasicDescription format;
|
||||
AURenderCallbackStruct inputProc;
|
||||
Float64 hwSampleRate;
|
||||
AudioUnit rioUnit;
|
||||
UGen rawInput;
|
||||
UGen postFadeOutput;
|
||||
UGen preFadeOutput;
|
||||
int bufferSize;
|
||||
float *floatBuffer;
|
||||
UInt32 audioInputIsAvailable;
|
||||
UInt32 numInputChannels;
|
||||
UInt32 numOutputChannels;
|
||||
bool isRunning;
|
||||
float fadeInTime;
|
||||
UGenArray others;
|
||||
NSLock* nsLock;
|
||||
}
|
||||
|
||||
/** Initialises the AudioUnit framework and structures.
|
||||
Do not call this method, it is called automatically when the application launches. */
|
||||
- (void)initAudio;
|
||||
|
||||
/** Construct a UGen graph.
|
||||
You must implement this in your subclass. You should return a UGen which will be the UGen graph which is
|
||||
performed and rendered to the host. The input parameter may be ignored if only signal generation is required
|
||||
or may be used if a processing algorithm is being implemented (e.g., filtering incoming audio data).
|
||||
|
||||
@param input The input UGen which will contain audio data from the host.
|
||||
@return the UGen graph which will be performed */
|
||||
- (UGen)constructGraph:(UGen)input;
|
||||
|
||||
- (void)addOther:(UGen)ugen;
|
||||
|
||||
- (void)lock;
|
||||
- (void)unlock;
|
||||
- (BOOL)tryLock;
|
||||
|
||||
@end
|
||||
|
||||
#define NUM_CHANNELS 2
|
||||
|
||||
void SetFormat(AudioStreamBasicDescription& format)
|
||||
{
|
||||
memset(&format, 0, sizeof(AudioStreamBasicDescription));
|
||||
format.mFormatID = kAudioFormatLinearPCM;
|
||||
int sampleSize = sizeof(AudioSampleType);
|
||||
format.mFormatFlags = kAudioFormatFlagsCanonical;
|
||||
format.mBitsPerChannel = 8 * sampleSize;
|
||||
format.mChannelsPerFrame = NUM_CHANNELS;
|
||||
format.mFramesPerPacket = 1;
|
||||
format.mBytesPerPacket = format.mBytesPerFrame = sampleSize;
|
||||
format.mFormatFlags |= kAudioFormatFlagIsNonInterleaved;
|
||||
}
|
||||
|
||||
int SetupRemoteIO (AudioUnit& inRemoteIOUnit, AURenderCallbackStruct inRenderProc, AudioStreamBasicDescription& outFormat)
|
||||
{
|
||||
// Open the output unit
|
||||
AudioComponentDescription desc;
|
||||
desc.componentType = kAudioUnitType_Output;
|
||||
desc.componentSubType = kAudioUnitSubType_RemoteIO;
|
||||
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
|
||||
desc.componentFlags = 0;
|
||||
desc.componentFlagsMask = 0;
|
||||
|
||||
AudioComponent comp = AudioComponentFindNext (NULL, &desc);
|
||||
AudioComponentInstanceNew (comp, &inRemoteIOUnit);
|
||||
|
||||
const UInt32 one = 1;
|
||||
AudioUnitSetProperty(inRemoteIOUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one));
|
||||
AudioUnitSetProperty(inRemoteIOUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inRenderProc, sizeof(inRenderProc));
|
||||
|
||||
AudioUnitSetProperty(inRemoteIOUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &outFormat, sizeof(outFormat));
|
||||
AudioUnitSetProperty(inRemoteIOUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &outFormat, sizeof(outFormat));
|
||||
|
||||
AudioUnitInitialize(inRemoteIOUnit);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static const float FloatToFixed824_Factor = 16777216.f;
|
||||
static const float Fixed824ToFloat_Factor = 5.960464477539e-08f;
|
||||
|
||||
static const float FloatToPCM16Bit_Factor = 32767.f;
|
||||
static const float PCM16BitToFloat_Factor = 3.051850947600e-05f;
|
||||
|
||||
static OSStatus PerformThru(void *inRefCon,
|
||||
AudioUnitRenderActionFlags *ioActionFlags,
|
||||
const AudioTimeStamp *inTimeStamp,
|
||||
UInt32 inBusNumber,
|
||||
UInt32 inNumberFrames,
|
||||
AudioBufferList *ioData)
|
||||
{
|
||||
OSStatus err = 0;
|
||||
UIKitAUIOHost *x = (UIKitAUIOHost *)inRefCon;
|
||||
|
||||
[x lock];
|
||||
|
||||
if(x->audioInputIsAvailable)
|
||||
{
|
||||
err = AudioUnitRender(x->rioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData);
|
||||
if (err) { printf("PerformThru: error %d\n", (int)err); return err; }
|
||||
}
|
||||
|
||||
if(inNumberFrames > x->bufferSize)
|
||||
{
|
||||
delete [] x->floatBuffer;
|
||||
x->bufferSize = inNumberFrames;
|
||||
|
||||
x->floatBuffer = new float[inNumberFrames * NUM_CHANNELS];
|
||||
}
|
||||
|
||||
long blockID = UGen::getNextBlockID(inNumberFrames);
|
||||
|
||||
float *floatBufferData[2];
|
||||
floatBufferData[0] = x->floatBuffer;
|
||||
floatBufferData[1] = floatBufferData[0] + inNumberFrames;
|
||||
|
||||
if(x->audioInputIsAvailable)
|
||||
{
|
||||
for (UInt32 channel = 0; channel < x->numInputChannels; channel++)
|
||||
{
|
||||
AudioSampleType *audioUnitBuffer = (AudioSampleType*)ioData->mBuffers[0].mData;
|
||||
float *floatBuffer = floatBufferData[channel];
|
||||
|
||||
for(int sample = 0; sample < inNumberFrames; sample++)
|
||||
{
|
||||
floatBuffer[sample] = (float)audioUnitBuffer[sample] * PCM16BitToFloat_Factor;
|
||||
}
|
||||
}
|
||||
|
||||
x->rawInput.getSource().setInputs((const float**)floatBufferData, inNumberFrames, x->numInputChannels);
|
||||
}
|
||||
else
|
||||
{
|
||||
memset(x->floatBuffer, 0, x->numInputChannels * inNumberFrames * sizeof(float));
|
||||
}
|
||||
|
||||
x->postFadeOutput.setOutputs(floatBufferData, inNumberFrames, 2);
|
||||
x->postFadeOutput.prepareAndProcessBlock(inNumberFrames, blockID);
|
||||
|
||||
for (UInt32 channel = 0; channel < ioData->mNumberBuffers; channel++)
|
||||
{
|
||||
AudioSampleType *audioUnitBuffer = (AudioSampleType*)ioData->mBuffers[channel].mData;
|
||||
float *floatBuffer = floatBufferData[channel];
|
||||
|
||||
for(int sample = 0; sample < inNumberFrames; sample++)
|
||||
{
|
||||
audioUnitBuffer[sample] = (AudioSampleType)(floatBuffer[sample] * FloatToPCM16Bit_Factor);
|
||||
}
|
||||
}
|
||||
|
||||
for(int i = 0; i < x->others.size(); i++)
|
||||
{
|
||||
x->others[i].prepareAndProcessBlock(inNumberFrames, blockID);
|
||||
}
|
||||
|
||||
[x unlock];
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
void propListener(void * inClientData,
|
||||
AudioSessionPropertyID inID,
|
||||
UInt32 inDataSize,
|
||||
const void * inPropertyValue)
|
||||
{
|
||||
printf("Property changed!\n");
|
||||
|
||||
UIKitAUIOHost *x = (UIKitAUIOHost *)inClientData;
|
||||
|
||||
if(!x->isRunning) return;
|
||||
|
||||
if(inPropertyValue)
|
||||
{
|
||||
CFDictionaryRef routeChangeDictionary = (CFDictionaryRef)inPropertyValue;
|
||||
CFNumberRef routeChangeReasonRef =
|
||||
(CFNumberRef)CFDictionaryGetValue (routeChangeDictionary,
|
||||
CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
|
||||
|
||||
SInt32 routeChangeReason;
|
||||
CFNumberGetValue(routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
|
||||
|
||||
CFStringRef newAudioRoute;
|
||||
UInt32 propertySize = sizeof (CFStringRef);
|
||||
AudioSessionGetProperty(kAudioSessionProperty_AudioRoute, &propertySize, &newAudioRoute);
|
||||
|
||||
printf("route=%s\n", CFStringGetCStringPtr(newAudioRoute, CFStringGetSystemEncoding()));
|
||||
|
||||
}
|
||||
|
||||
UInt32 size = sizeof(UInt32);
|
||||
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareInputNumberChannels, &size, &x->numInputChannels);
|
||||
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareOutputNumberChannels, &size, &x->numOutputChannels);
|
||||
AudioSessionGetProperty(kAudioSessionProperty_AudioInputAvailable, &size, &x->audioInputIsAvailable);
|
||||
|
||||
printf("inputs=%d outputs=%d audioInputIsAvailable=%d\n", x->numInputChannels, x->numOutputChannels, x->audioInputIsAvailable);
|
||||
|
||||
if(x->rioUnit)
|
||||
{
|
||||
AudioComponentInstanceDispose(x->rioUnit);
|
||||
}
|
||||
|
||||
SetFormat(x->format);
|
||||
SetupRemoteIO(x->rioUnit, x->inputProc, x->format);
|
||||
|
||||
x->rawInput.setSource(AudioIn::AR(x->numInputChannels), true);
|
||||
x->postFadeOutput = Plug::AR(UGen::emptyChannels(x->preFadeOutput.getNumChannels()));
|
||||
x->postFadeOutput.fadeSourceAndRelease(x->preFadeOutput, x->fadeInTime);
|
||||
|
||||
AudioSessionSetActive(true);
|
||||
AudioOutputUnitStart(x->rioUnit);
|
||||
}
|
||||
|
||||
void rioInterruptionListener(void *inClientData, UInt32 inInterruption)
|
||||
{
|
||||
printf("Session interrupted! --- %s ---\n", inInterruption == kAudioSessionBeginInterruption ? "Begin Interruption" : "End Interruption");
|
||||
|
||||
UIKitAUIOHost *x = (UIKitAUIOHost *)inClientData;
|
||||
|
||||
if (inInterruption == kAudioSessionEndInterruption) {
|
||||
// make sure we are again the active session
|
||||
//AudioSessionSetActive(false);
|
||||
AudioSessionSetActive(true);
|
||||
x->isRunning = true;
|
||||
AudioOutputUnitStart(x->rioUnit);
|
||||
}
|
||||
|
||||
if (inInterruption == kAudioSessionBeginInterruption) {
|
||||
x->isRunning = false;
|
||||
AudioOutputUnitStop(x->rioUnit);
|
||||
|
||||
printf("rioInterruptionListener audioInputIsAvailable=%d\n", x->audioInputIsAvailable);
|
||||
|
||||
UIAlertView *baseAlert = [[UIAlertView alloc] initWithTitle:@"Audio interrupted"
|
||||
message:@"This could have been interrupted by another application or due to unplugging a headset:"
|
||||
delegate:x
|
||||
cancelButtonTitle:nil
|
||||
otherButtonTitles:@"Resume", @"Cancel", nil];
|
||||
[baseAlert show];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@implementation UIKitAUIOHost
|
||||
|
||||
- (id)init
|
||||
{
|
||||
if (self = [super init])
|
||||
{
|
||||
nsLock = [[NSLock alloc] init];
|
||||
fadeInTime = 1.0;
|
||||
[self performSelector:@selector(initAudio) withObject:nil afterDelay:1.0];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)initAudio
|
||||
{
|
||||
// render proc
|
||||
inputProc.inputProc = PerformThru;
|
||||
inputProc.inputProcRefCon = self;
|
||||
|
||||
// session
|
||||
AudioSessionInitialize (NULL, NULL, rioInterruptionListener, self);
|
||||
AudioSessionSetActive (true);
|
||||
|
||||
UInt32 audioCategory = kAudioSessionCategory_PlayAndRecord;
|
||||
AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, sizeof(audioCategory), &audioCategory);
|
||||
AudioSessionAddPropertyListener(kAudioSessionProperty_AudioRouteChange, propListener, self);
|
||||
|
||||
UInt32 size = sizeof(hwSampleRate);
|
||||
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareSampleRate, &size, &hwSampleRate);
|
||||
|
||||
Float32 bufferDuration = 512 / hwSampleRate;
|
||||
AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(bufferDuration), &bufferDuration);
|
||||
|
||||
UGen::initialise();
|
||||
UGen::prepareToPlay(hwSampleRate, 512);
|
||||
|
||||
rawInput = Plug::AR(UGen::emptyChannels(2));
|
||||
preFadeOutput = [self constructGraph: rawInput];
|
||||
|
||||
rioUnit = NULL;
|
||||
isRunning = true;
|
||||
propListener((void*)self, 0,0,0);
|
||||
|
||||
size = sizeof(format);
|
||||
AudioUnitGetProperty(rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &size);
|
||||
|
||||
//Float32 bufferDuration;
|
||||
size = sizeof(bufferDuration);
|
||||
AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareIOBufferDuration, &size, &bufferDuration);
|
||||
|
||||
bufferSize = (int)(hwSampleRate*bufferDuration+0.5);
|
||||
floatBuffer = new float[bufferSize * NUM_CHANNELS];
|
||||
}
|
||||
|
||||
- (UGen)constructGraph:(UGen)input
|
||||
{
|
||||
return UGen::emptyChannels(NUM_CHANNELS);
|
||||
}
|
||||
|
||||
- (void)addOther:(UGen)ugen
|
||||
{
|
||||
[self lock];
|
||||
others <<= ugen;
|
||||
[self unlock];
|
||||
}
|
||||
|
||||
- (void)lock
|
||||
{
|
||||
[nsLock lock];
|
||||
}
|
||||
|
||||
- (void)unlock
|
||||
{
|
||||
[nsLock unlock];
|
||||
}
|
||||
|
||||
- (BOOL)tryLock
|
||||
{
|
||||
return [nsLock tryLock];
|
||||
}
|
||||
|
||||
- (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex
|
||||
{
|
||||
printf("buttonIndex=%d\n", buttonIndex);
|
||||
|
||||
if(buttonIndex == 0)
|
||||
{
|
||||
// resume
|
||||
isRunning = true;
|
||||
propListener((void*)self, 0,0,0);
|
||||
}
|
||||
|
||||
[alertView release];
|
||||
}
|
||||
|
||||
-(void) dealloc
|
||||
{
|
||||
UGen::shutdown();
|
||||
delete [] floatBuffer;
|
||||
[nsLock release];
|
||||
[super dealloc];
|
||||
}
|
||||
@end
|
||||
|
||||
class IPhoneAudioIODevice : public AudioIODeviceType
|
||||
{
|
||||
public:
|
||||
|
|
|
|||
|
|
@ -46663,6 +46663,10 @@ public:
|
|||
*/
|
||||
void moveColumn (const int columnId, int newVisibleIndex);
|
||||
|
||||
/** Returns the width of one of the columns.
|
||||
*/
|
||||
int getColumnWidth (const int columnId) const throw();
|
||||
|
||||
/** Changes the width of a column.
|
||||
|
||||
This will cause an asynchronous callback to the tableColumnsResized() method of any registered listeners.
|
||||
|
|
|
|||
|
|
@ -191,6 +191,12 @@ void TableHeaderComponent::moveColumn (const int columnId, int newIndex)
|
|||
}
|
||||
}
|
||||
|
||||
int TableHeaderComponent::getColumnWidth (const int columnId) const throw()
|
||||
{
|
||||
const ColumnInfo* const ci = getInfoForId (columnId);
|
||||
return ci != 0 ? ci->width : 0;
|
||||
}
|
||||
|
||||
void TableHeaderComponent::setColumnWidth (const int columnId, const int newWidth)
|
||||
{
|
||||
ColumnInfo* const ci = getInfoForId (columnId);
|
||||
|
|
|
|||
|
|
@ -193,6 +193,10 @@ public:
|
|||
*/
|
||||
void moveColumn (const int columnId, int newVisibleIndex);
|
||||
|
||||
/** Returns the width of one of the columns.
|
||||
*/
|
||||
int getColumnWidth (const int columnId) const throw();
|
||||
|
||||
/** Changes the width of a column.
|
||||
|
||||
This will cause an asynchronous callback to the tableColumnsResized() method of any registered listeners.
|
||||
|
|
|
|||
|
|
@ -29,4 +29,293 @@
|
|||
|
||||
|
||||
|
||||
|
||||
class IPhoneAudioIODevice : public AudioIODeviceType
|
||||
{
|
||||
public:
|
||||
//==============================================================================
|
||||
IPhoneAudioIODevice (const String& deviceName, const bool isInput_)
|
||||
: AudioIODevice (deviceName, T("Audio")),
|
||||
isInput (isInput_),
|
||||
isOpen_ (false),
|
||||
isStarted (false)
|
||||
{
|
||||
}
|
||||
|
||||
~IPhoneAudioIODeviceType()
|
||||
{
|
||||
}
|
||||
|
||||
const StringArray getOutputChannelNames()
|
||||
{
|
||||
StringArray s;
|
||||
if (! isInput)
|
||||
{
|
||||
s.add ("Left");
|
||||
s.add ("Right");
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
const StringArray getInputChannelNames()
|
||||
{
|
||||
StringArray s;
|
||||
if (isInput)
|
||||
{
|
||||
s.add ("Left");
|
||||
s.add ("Right");
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
int getNumSampleRates()
|
||||
{
|
||||
return sampleRates.size();
|
||||
}
|
||||
|
||||
double getSampleRate (int index)
|
||||
{
|
||||
return sampleRates [index];
|
||||
}
|
||||
|
||||
int getNumBufferSizesAvailable()
|
||||
{
|
||||
return bufferSizes.size();
|
||||
}
|
||||
|
||||
int getBufferSizeSamples (int index)
|
||||
{
|
||||
return bufferSizes [index];
|
||||
}
|
||||
|
||||
int getDefaultBufferSize()
|
||||
{
|
||||
for (int i = 0; i < getNumBufferSizesAvailable(); ++i)
|
||||
if (getBufferSizeSamples(i) >= 512)
|
||||
return getBufferSizeSamples(i);
|
||||
|
||||
return 512;
|
||||
}
|
||||
|
||||
const String open (const BitArray& inputChannels,
|
||||
const BitArray& outputChannels,
|
||||
double sampleRate,
|
||||
int bufferSizeSamples)
|
||||
{
|
||||
isOpen_ = true;
|
||||
|
||||
if (bufferSizeSamples <= 0)
|
||||
bufferSizeSamples = getDefaultBufferSize();
|
||||
|
||||
lastError = String::empty;
|
||||
|
||||
|
||||
|
||||
isOpen_ = lastError.isEmpty();
|
||||
return lastError;
|
||||
}
|
||||
|
||||
void close()
|
||||
{
|
||||
isOpen_ = false;
|
||||
|
||||
|
||||
}
|
||||
|
||||
bool isOpen()
|
||||
{
|
||||
return isOpen_;
|
||||
}
|
||||
|
||||
int getCurrentBufferSizeSamples()
|
||||
{
|
||||
return internal != 0 ? internal->getBufferSize() : 512;
|
||||
}
|
||||
|
||||
double getCurrentSampleRate()
|
||||
{
|
||||
return internal != 0 ? internal->getSampleRate() : 0;
|
||||
}
|
||||
|
||||
int getCurrentBitDepth()
|
||||
{
|
||||
return 32; // no way to find out, so just assume it's high..
|
||||
}
|
||||
|
||||
const BitArray getActiveOutputChannels() const
|
||||
{
|
||||
return internal != 0 ? internal->activeOutputChans : BitArray();
|
||||
}
|
||||
|
||||
const BitArray getActiveInputChannels() const
|
||||
{
|
||||
BitArray chans;
|
||||
|
||||
if (internal != 0)
|
||||
{
|
||||
chans = internal->activeInputChans;
|
||||
|
||||
if (internal->inputDevice != 0)
|
||||
chans.orWith (internal->inputDevice->activeInputChans);
|
||||
}
|
||||
|
||||
return chans;
|
||||
}
|
||||
|
||||
int getOutputLatencyInSamples()
|
||||
{
|
||||
if (internal == 0)
|
||||
return 0;
|
||||
|
||||
// this seems like a good guess at getting the latency right - comparing
|
||||
// this with a round-trip measurement, it gets it to within a few millisecs
|
||||
// for the built-in mac soundcard
|
||||
return internal->outputLatency + internal->getBufferSize() * 2;
|
||||
}
|
||||
|
||||
int getInputLatencyInSamples()
|
||||
{
|
||||
if (internal == 0)
|
||||
return 0;
|
||||
|
||||
return internal->inputLatency + internal->getBufferSize() * 2;
|
||||
}
|
||||
|
||||
void start (AudioIODeviceCallback* callback)
|
||||
{
|
||||
if (internal != 0 && ! isStarted)
|
||||
{
|
||||
if (callback != 0)
|
||||
callback->audioDeviceAboutToStart (this);
|
||||
|
||||
isStarted = true;
|
||||
internal->start (callback);
|
||||
}
|
||||
}
|
||||
|
||||
void stop()
|
||||
{
|
||||
if (isStarted && internal != 0)
|
||||
{
|
||||
AudioIODeviceCallback* const lastCallback = internal->callback;
|
||||
|
||||
isStarted = false;
|
||||
internal->stop (true);
|
||||
|
||||
if (lastCallback != 0)
|
||||
lastCallback->audioDeviceStopped();
|
||||
}
|
||||
}
|
||||
|
||||
bool isPlaying()
|
||||
{
|
||||
if (internal->callback == 0)
|
||||
isStarted = false;
|
||||
|
||||
return isStarted;
|
||||
}
|
||||
|
||||
const String getLastError()
|
||||
{
|
||||
return lastError;
|
||||
}
|
||||
|
||||
int inputIndex, outputIndex;
|
||||
|
||||
juce_UseDebuggingNewOperator
|
||||
|
||||
private:
|
||||
CoreAudioInternal* internal;
|
||||
bool isOpen_, isStarted;
|
||||
String lastError;
|
||||
|
||||
static OSStatus hardwareListenerProc (AudioDeviceID /*inDevice*/, UInt32 /*inLine*/, const AudioObjectPropertyAddress* pa, void* inClientData)
|
||||
{
|
||||
CoreAudioInternal* const intern = (CoreAudioInternal*) inClientData;
|
||||
|
||||
switch (pa->mSelector)
|
||||
{
|
||||
case kAudioHardwarePropertyDevices:
|
||||
intern->deviceDetailsChanged();
|
||||
break;
|
||||
|
||||
case kAudioHardwarePropertyDefaultOutputDevice:
|
||||
case kAudioHardwarePropertyDefaultInputDevice:
|
||||
case kAudioHardwarePropertyDefaultSystemOutputDevice:
|
||||
break;
|
||||
}
|
||||
|
||||
return noErr;
|
||||
}
|
||||
|
||||
CoreAudioIODevice (const CoreAudioIODevice&);
|
||||
const CoreAudioIODevice& operator= (const CoreAudioIODevice&);
|
||||
};
|
||||
|
||||
|
||||
//==============================================================================
|
||||
class IPhoneAudioIODeviceType : public AudioIODeviceType
|
||||
{
|
||||
public:
|
||||
//==============================================================================
|
||||
IPhoneAudioIODeviceType()
|
||||
: AudioIODeviceType (T("iPhone Audio")),
|
||||
hasScanned (false)
|
||||
{
|
||||
}
|
||||
|
||||
~IPhoneAudioIODeviceType()
|
||||
{
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
void scanForDevices()
|
||||
{
|
||||
}
|
||||
|
||||
const StringArray getDeviceNames (const bool wantInputNames) const
|
||||
{
|
||||
StringArray s;
|
||||
return s;
|
||||
}
|
||||
|
||||
int getDefaultDeviceIndex (const bool forInput) const
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
int getIndexOfDevice (AudioIODevice* device, const bool asInput) const
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool hasSeparateInputsAndOutputs() const { return true; }
|
||||
|
||||
AudioIODevice* createDevice (const String& outputDeviceName,
|
||||
const String& inputDeviceName)
|
||||
{
|
||||
if (outputDeviceName.isNotEmpty() && inputDeviceName.isNotEmpty())
|
||||
return new CoreAudioIODevice (deviceName,
|
||||
inputIds [inputIndex],
|
||||
inputIndex,
|
||||
outputIds [outputIndex],
|
||||
outputIndex);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
juce_UseDebuggingNewOperator
|
||||
|
||||
private:
|
||||
IPhoneAudioIODeviceType (const IPhoneAudioIODeviceType&);
|
||||
const IPhoneAudioIODeviceType& operator= (const IPhoneAudioIODeviceType&);
|
||||
};
|
||||
|
||||
//==============================================================================
|
||||
AudioIODeviceType* juce_createAudioIODeviceType_iPhoneAudio()
|
||||
{
|
||||
return new IPhoneAudioIODeviceType();
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -263,8 +263,24 @@ FileBasedDocument::SaveResult FileBasedDocument::saveAsInteractive (const bool w
|
|||
|
||||
File chosen (fc.getResult());
|
||||
if (chosen.getFileExtension().isEmpty())
|
||||
{
|
||||
chosen = chosen.withFileExtension (fileExtension);
|
||||
|
||||
if (chosen.exists())
|
||||
{
|
||||
if (! AlertWindow::showOkCancelBox (AlertWindow::WarningIcon,
|
||||
TRANS("File already exists"),
|
||||
TRANS("There's already a file called:\n\n")
|
||||
+ chosen.getFullPathName()
|
||||
+ T("\n\nAre you sure you want to overwrite it?"),
|
||||
TRANS("overwrite"),
|
||||
TRANS("cancel")))
|
||||
{
|
||||
return userCancelledSave;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return saveAs (chosen, false, false, true);
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue