1
0
Fork 0
mirror of https://github.com/juce-framework/JUCE.git synced 2026-01-13 00:04:19 +00:00

Added Animated App template and examples

This commit is contained in:
Felix Faire 2014-10-29 15:55:23 +00:00
parent fefcf7aca6
commit ff6520a89a
1141 changed files with 438491 additions and 94 deletions

View file

@ -0,0 +1,83 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
struct CameraDevice::Pimpl
{
Pimpl (const String&, int /*index*/, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/)
{
}
~Pimpl()
{
}
void startRecordingToFile (const File&, int /*quality*/)
{
}
void stopRecording()
{
}
Time getTimeOfFirstRecordedFrame() const
{
return Time();
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
}
static StringArray getAvailableDevices()
{
StringArray results;
return results;
}
private:
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
struct CameraDevice::ViewerComponent : public Component
{
ViewerComponent (CameraDevice&)
{
}
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}

View file

@ -0,0 +1,353 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if ! JUCE_QUICKTIME
#error "To support cameras in OSX you'll need to enable the JUCE_QUICKTIME flag"
#endif
extern Image juce_createImageFromCIImage (CIImage*, int w, int h);
struct CameraDevice::Pimpl
{
Pimpl (const String&, const int index, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/)
: input (nil),
audioDevice (nil),
audioInput (nil),
session (nil),
fileOutput (nil),
imageOutput (nil),
firstPresentationTime (0),
averageTimeOffset (0),
isRecording (false)
{
JUCE_AUTORELEASEPOOL
{
session = [[QTCaptureSession alloc] init];
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
device = (QTCaptureDevice*) [devs objectAtIndex: index];
static DelegateClass cls;
callbackDelegate = [cls.createInstance() init];
DelegateClass::setOwner (callbackDelegate, this);
NSError* err = nil;
[device retain];
[device open: &err];
if (err == nil)
{
input = [[QTCaptureDeviceInput alloc] initWithDevice: device];
audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
[session addInput: input error: &err];
if (err == nil)
{
resetFile();
imageOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[imageOutput setDelegate: callbackDelegate];
if (err == nil)
{
[session startRunning];
return;
}
}
}
openingError = nsStringToJuce ([err description]);
DBG (openingError);
}
}
~Pimpl()
{
[session stopRunning];
[session removeOutput: imageOutput];
[session release];
[input release];
[device release];
[audioDevice release];
[audioInput release];
[fileOutput release];
[imageOutput release];
[callbackDelegate release];
}
bool openedOk() const noexcept { return openingError.isEmpty(); }
void resetFile()
{
[fileOutput recordToOutputFileURL: nil];
[session removeOutput: fileOutput];
[fileOutput release];
fileOutput = [[QTCaptureMovieFileOutput alloc] init];
[session removeInput: audioInput];
[audioInput release];
audioInput = nil;
[audioDevice release];
audioDevice = nil;
[fileOutput setDelegate: callbackDelegate];
}
void addDefaultAudioInput()
{
NSError* err = nil;
audioDevice = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeSound];
if ([audioDevice open: &err])
[audioDevice retain];
else
audioDevice = nil;
if (audioDevice != nil)
{
audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: audioDevice];
[session addInput: audioInput error: &err];
}
}
void startRecordingToFile (const File& file, int quality)
{
stopRecording();
firstPresentationTime = 0;
file.deleteFile();
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
// out wrong, so we'll put some audio in there too..,
addDefaultAudioInput();
[session addOutput: fileOutput error: nil];
NSEnumerator* connectionEnumerator = [[fileOutput connections] objectEnumerator];
for (;;)
{
QTCaptureConnection* connection = [connectionEnumerator nextObject];
if (connection == nil)
break;
QTCompressionOptions* options = nil;
NSString* mediaType = [connection mediaType];
if ([mediaType isEqualToString: QTMediaTypeVideo])
options = [QTCompressionOptions compressionOptionsWithIdentifier:
quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
: nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
else if ([mediaType isEqualToString: QTMediaTypeSound])
options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
[fileOutput setCompressionOptions: options forConnection: connection];
}
[fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
isRecording = true;
}
void stopRecording()
{
if (isRecording)
{
resetFile();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstPresentationTime != 0 ? Time (firstPresentationTime + averageTimeOffset)
: Time();
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
[session addOutput: imageOutput error: nil];
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
if (listeners.size() == 0)
[session removeOutput: imageOutput];
}
void callListeners (CIImage* frame, int w, int h)
{
Image image (juce_createImageFromCIImage (frame, w, h));
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
{
CameraDevice::Listener* const l = listeners[i];
if (l != nullptr)
l->imageReceived (image);
}
}
void captureBuffer (QTSampleBuffer* sampleBuffer)
{
const Time now (Time::getCurrentTime());
#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_5
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute];
#else
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: nsStringLiteral ("hostTime")];
#endif
int64 presentationTime = (hosttime != nil)
? ((int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000 + 40)
: (([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale + 50);
const int64 timeDiff = now.toMilliseconds() - presentationTime;
if (firstPresentationTime == 0)
{
firstPresentationTime = presentationTime;
averageTimeOffset = timeDiff;
}
else
{
averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128;
}
}
static StringArray getAvailableDevices()
{
StringArray results;
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
for (int i = 0; i < (int) [devs count]; ++i)
{
QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
results.add (nsStringToJuce ([dev localizedDisplayName]));
}
return results;
}
QTCaptureDevice* device;
QTCaptureDevice* audioDevice;
QTCaptureDeviceInput* input;
QTCaptureDeviceInput* audioInput;
QTCaptureSession* session;
QTCaptureMovieFileOutput* fileOutput;
QTCaptureDecompressedVideoOutput* imageOutput;
NSObject* callbackDelegate;
String openingError;
int64 firstPresentationTime, averageTimeOffset;
bool isRecording;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
private:
//==============================================================================
struct DelegateClass : public ObjCClass<NSObject>
{
DelegateClass() : ObjCClass<NSObject> ("JUCEAppDelegate_")
{
addIvar<Pimpl*> ("owner");
addMethod (@selector (captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@", @encode (CVImageBufferRef), "@@");
addMethod (@selector (captureOutput:didOutputSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@@@");
registerClass();
}
static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
static Pimpl* getOwner (id self) { return getIvar<Pimpl*> (self, "owner"); }
private:
static void didOutputVideoFrame (id self, SEL, QTCaptureOutput*, CVImageBufferRef videoFrame,
QTSampleBuffer*, QTCaptureConnection*)
{
Pimpl* const internal = getOwner (self);
if (internal->listeners.size() > 0)
{
JUCE_AUTORELEASEPOOL
{
internal->callListeners ([CIImage imageWithCVImageBuffer: videoFrame],
(int) CVPixelBufferGetWidth (videoFrame),
(int) CVPixelBufferGetHeight (videoFrame));
}
}
}
static void didOutputSampleBuffer (id self, SEL, QTCaptureFileOutput*, QTSampleBuffer* sampleBuffer, QTCaptureConnection*)
{
getOwner (self)->captureBuffer (sampleBuffer);
}
};
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
struct CameraDevice::ViewerComponent : public NSViewComponent
{
ViewerComponent (CameraDevice& d)
{
JUCE_AUTORELEASEPOOL
{
captureView = [[QTCaptureView alloc] init];
[captureView setCaptureSession: d.pimpl->session];
setSize (640, 480);
setView (captureView);
}
}
~ViewerComponent()
{
setView (nil);
[captureView setCaptureSession: nil];
[captureView release];
}
QTCaptureView* captureView;
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}

View file

@ -0,0 +1,341 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if JUCE_QUICKTIME
struct NonInterceptingQTMovieViewClass : public ObjCClass <QTMovieView>
{
NonInterceptingQTMovieViewClass() : ObjCClass <QTMovieView> ("JUCEQTMovieView_")
{
addMethod (@selector (hitTest:), hitTest, "@@:", @encode (NSPoint));
addMethod (@selector (acceptsFirstMouse:), acceptsFirstMouse, "c@:@");
registerClass();
}
private:
static NSView* hitTest (id self, SEL, NSPoint point)
{
if (! [(QTMovieView*) self isControllerVisible])
return nil;
objc_super s = { self, [QTMovieView class] };
return objc_msgSendSuper (&s, @selector (hitTest:), point);
}
static BOOL acceptsFirstMouse (id, SEL, NSEvent*)
{
return YES;
}
};
//==============================================================================
#define theMovie (static_cast <QTMovie*> (movie))
//==============================================================================
QuickTimeMovieComponent::QuickTimeMovieComponent()
: movie (0)
{
setOpaque (true);
setVisible (true);
static NonInterceptingQTMovieViewClass cls;
QTMovieView* view = [cls.createInstance() initWithFrame: NSMakeRect (0, 0, 100.0f, 100.0f)];
setView (view);
[view setNextResponder: [view superview]];
[view setWantsLayer: YES]; // prevents the view failing to redraw correctly when paused.
[view release];
}
QuickTimeMovieComponent::~QuickTimeMovieComponent()
{
closeMovie();
setView (nil);
}
bool QuickTimeMovieComponent::isQuickTimeAvailable() noexcept
{
return true;
}
static QTMovie* openMovieFromStream (InputStream* movieStream, File& movieFile)
{
// unfortunately, QTMovie objects can only be created on the main thread..
jassert (MessageManager::getInstance()->isThisTheMessageThread());
QTMovie* movie = nil;
if (FileInputStream* const fin = dynamic_cast <FileInputStream*> (movieStream))
{
movieFile = fin->getFile();
movie = [QTMovie movieWithFile: juceStringToNS (movieFile.getFullPathName())
error: nil];
}
else
{
MemoryBlock temp;
movieStream->readIntoMemoryBlock (temp);
static const char* const suffixesToTry[] = { ".mov", ".mp3", ".avi", ".m4a" };
for (int i = 0; i < numElementsInArray (suffixesToTry); ++i)
{
movie = [QTMovie movieWithDataReference: [QTDataReference dataReferenceWithReferenceToData: [NSData dataWithBytes: temp.getData()
length: temp.getSize()]
name: [NSString stringWithUTF8String: suffixesToTry[i]]
MIMEType: nsEmptyString()]
error: nil];
if (movie != 0)
break;
}
}
return movie;
}
bool QuickTimeMovieComponent::loadMovie (const File& file, const bool showController)
{
return loadMovie (file.createInputStream(), showController);
}
bool QuickTimeMovieComponent::loadMovie (InputStream* movieStream, const bool showController)
{
const ScopedPointer<InputStream> movieStreamDeleter (movieStream);
closeMovie();
if (getPeer() == nullptr)
{
// To open a movie, this component must be visible inside a functioning window, so that
// the QT control can be assigned to the window.
jassertfalse;
return false;
}
if (movieStream == nullptr)
return false;
movie = openMovieFromStream (movieStream, movieFile);
[theMovie retain];
QTMovieView* view = (QTMovieView*) getView();
[view setMovie: theMovie];
controllerVisible = showController;
[view setControllerVisible: controllerVisible];
setLooping (looping);
return movie != nil;
}
bool QuickTimeMovieComponent::loadMovie (const URL& movieURL, const bool showController)
{
// unfortunately, QTMovie objects can only be created on the main thread..
jassert (MessageManager::getInstance()->isThisTheMessageThread());
closeMovie();
if (getPeer() == nullptr)
{
// To open a movie, this component must be visible inside a functioning window, so that
// the QT control can be assigned to the window.
jassertfalse;
return false;
}
NSURL* url = [NSURL URLWithString: juceStringToNS (movieURL.toString (true))];
NSError* err;
if ([QTMovie canInitWithURL: url])
movie = [QTMovie movieWithURL: url error: &err];
[theMovie retain];
QTMovieView* view = (QTMovieView*) getView();
[view setMovie: theMovie];
controllerVisible = showController;
[view setControllerVisible: controllerVisible];
setLooping (looping);
return movie != nil;
}
void QuickTimeMovieComponent::closeMovie()
{
stop();
QTMovieView* view = (QTMovieView*) getView();
[view setMovie: nil];
[theMovie release];
movie = 0;
movieFile = File::nonexistent;
}
bool QuickTimeMovieComponent::isMovieOpen() const
{
return movie != nil;
}
File QuickTimeMovieComponent::getCurrentMovieFile() const
{
return movieFile;
}
void QuickTimeMovieComponent::play()
{
[theMovie play];
}
void QuickTimeMovieComponent::stop()
{
[theMovie stop];
}
bool QuickTimeMovieComponent::isPlaying() const
{
return movie != 0 && [theMovie rate] != 0;
}
void QuickTimeMovieComponent::setPosition (const double seconds)
{
if (movie != 0)
{
QTTime t;
t.timeValue = (uint64) (100000.0 * seconds);
t.timeScale = 100000;
t.flags = 0;
[theMovie setCurrentTime: t];
}
}
double QuickTimeMovieComponent::getPosition() const
{
if (movie == 0)
return 0.0;
QTTime t = [theMovie currentTime];
return t.timeValue / (double) t.timeScale;
}
void QuickTimeMovieComponent::setSpeed (const float newSpeed)
{
[theMovie setRate: newSpeed];
}
double QuickTimeMovieComponent::getMovieDuration() const
{
if (movie == 0)
return 0.0;
QTTime t = [theMovie duration];
return t.timeValue / (double) t.timeScale;
}
void QuickTimeMovieComponent::setLooping (const bool shouldLoop)
{
looping = shouldLoop;
[theMovie setAttribute: [NSNumber numberWithBool: shouldLoop]
forKey: QTMovieLoopsAttribute];
}
bool QuickTimeMovieComponent::isLooping() const
{
return looping;
}
void QuickTimeMovieComponent::setMovieVolume (const float newVolume)
{
[theMovie setVolume: newVolume];
}
float QuickTimeMovieComponent::getMovieVolume() const
{
return movie != 0 ? [theMovie volume] : 0.0f;
}
void QuickTimeMovieComponent::getMovieNormalSize (int& width, int& height) const
{
width = 0;
height = 0;
if (movie != 0)
{
NSSize s = [[theMovie attributeForKey: QTMovieNaturalSizeAttribute] sizeValue];
width = (int) s.width;
height = (int) s.height;
}
}
void QuickTimeMovieComponent::paint (Graphics& g)
{
if (movie == 0)
g.fillAll (Colours::black);
}
bool QuickTimeMovieComponent::isControllerVisible() const
{
return controllerVisible;
}
//==============================================================================
void QuickTimeMovieComponent::goToStart()
{
setPosition (0.0);
}
void QuickTimeMovieComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}
//==============================================================================
#if ! (JUCE_MAC && JUCE_64BIT)
bool juce_OpenQuickTimeMovieFromStream (InputStream* movieStream, Movie& result, Handle&)
{
if (movieStream == nullptr)
return false;
File file;
QTMovie* movie = openMovieFromStream (movieStream, file);
if (movie != nil)
result = [movie quickTimeMovie];
return movie != nil;
}
#endif
#endif

View file

@ -0,0 +1,792 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
interface ISampleGrabberCB : public IUnknown
{
virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
};
interface ISampleGrabber : public IUnknown
{
virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
};
static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
struct CameraDevice::Pimpl : public ChangeBroadcaster
{
Pimpl (const String&, int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
: isRecording (false),
openedSuccessfully (false),
imageNeedsFlipping (false),
width (0), height (0),
activeUsers (0),
recordNextFrameTime (false),
previewMaxFPS (60)
{
HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
if (FAILED (hr))
return;
filter = enumerateCameras (nullptr, index);
if (filter == nullptr)
return;
hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
if (FAILED (hr))
return;
hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
if (FAILED (hr))
return;
hr = graphBuilder.QueryInterface (mediaControl);
if (FAILED (hr))
return;
{
ComSmartPtr<IAMStreamConfig> streamConfig;
hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
if (streamConfig != nullptr)
{
getVideoSizes (streamConfig);
if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
return;
}
}
hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
if (FAILED (hr))
return;
hr = smartTee.CoCreateInstance (CLSID_SmartTee);
if (FAILED (hr))
return;
hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
if (FAILED (hr))
return;
if (! connectFilters (filter, smartTee))
return;
ComSmartPtr<IBaseFilter> sampleGrabberBase;
hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
if (FAILED (hr))
return;
hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
if (FAILED (hr))
return;
{
AM_MEDIA_TYPE mt = { 0 };
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
sampleGrabber->SetMediaType (&mt);
}
callback = new GrabberCallback (*this);
hr = sampleGrabber->SetCallback (callback, 1);
hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
if (FAILED (hr))
return;
ComSmartPtr<IPin> grabberInputPin;
if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
&& getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
&& getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
return;
hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
if (FAILED (hr))
return;
AM_MEDIA_TYPE mt = { 0 };
hr = sampleGrabber->GetConnectedMediaType (&mt);
VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
width = pVih->bmiHeader.biWidth;
height = pVih->bmiHeader.biHeight;
ComSmartPtr<IBaseFilter> nullFilter;
hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
if (connectFilters (sampleGrabberBase, nullFilter)
&& addGraphToRot())
{
activeImage = Image (Image::RGB, width, height, true);
loadingImage = Image (Image::RGB, width, height, true);
openedSuccessfully = true;
}
}
~Pimpl()
{
if (mediaControl != nullptr)
mediaControl->Stop();
removeGraphFromRot();
disconnectAnyViewers();
if (sampleGrabber != nullptr)
{
sampleGrabber->SetCallback (nullptr, 0);
sampleGrabber = nullptr;
}
callback = nullptr;
graphBuilder = nullptr;
mediaControl = nullptr;
filter = nullptr;
captureGraphBuilder = nullptr;
smartTee = nullptr;
smartTeePreviewOutputPin = nullptr;
smartTeeCaptureOutputPin = nullptr;
asfWriter = nullptr;
}
bool openedOk() const noexcept { return openedSuccessfully; }
void startRecordingToFile (const File& file, int quality)
{
addUser();
isRecording = createFileCaptureFilter (file, quality);
}
void stopRecording()
{
if (isRecording)
{
removeFileCaptureFilter();
removeUser();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstRecordedTime;
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
addUser();
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeAllInstancesOf (listenerToRemove);
if (listeners.size() == 0)
removeUser();
}
void callListeners (const Image& image)
{
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
if (CameraDevice::Listener* const l = listeners[i])
l->imageReceived (image);
}
void addUser()
{
if (openedSuccessfully && activeUsers++ == 0)
mediaControl->Run();
}
void removeUser()
{
if (openedSuccessfully && --activeUsers == 0)
mediaControl->Stop();
}
void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
{
if (recordNextFrameTime)
{
const double defaultCameraLatency = 0.1;
firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
recordNextFrameTime = false;
ComSmartPtr<IPin> pin;
if (getPin (filter, PINDIR_OUTPUT, pin))
{
ComSmartPtr<IAMPushSource> pushSource;
HRESULT hr = pin.QueryInterface (pushSource);
if (pushSource != nullptr)
{
REFERENCE_TIME latency = 0;
hr = pushSource->GetLatency (&latency);
firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
}
}
}
{
const int lineStride = width * 3;
const ScopedLock sl (imageSwapLock);
{
loadingImage.duplicateIfShared();
const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
for (int i = 0; i < height; ++i)
memcpy (destData.getLinePointer ((height - 1) - i),
buffer + lineStride * i,
lineStride);
}
imageNeedsFlipping = true;
}
if (listeners.size() > 0)
callListeners (loadingImage);
sendChangeMessage();
}
void drawCurrentImage (Graphics& g, Rectangle<int> area)
{
if (imageNeedsFlipping)
{
const ScopedLock sl (imageSwapLock);
std::swap (loadingImage, activeImage);
imageNeedsFlipping = false;
}
Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
.appliedTo (Rectangle<int> (width, height), area));
RectangleList<int> borders (area);
borders.subtract (centred);
g.setColour (Colours::black);
g.fillRectList (borders);
g.drawImage (activeImage, centred.getX(), centred.getY(),
centred.getWidth(), centred.getHeight(), 0, 0, width, height);
}
bool createFileCaptureFilter (const File& file, int quality)
{
removeFileCaptureFilter();
file.deleteFile();
mediaControl->Stop();
firstRecordedTime = Time();
recordNextFrameTime = true;
previewMaxFPS = 60;
HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
if (SUCCEEDED (hr))
{
ComSmartPtr<IFileSinkFilter> fileSink;
hr = asfWriter.QueryInterface (fileSink);
if (SUCCEEDED (hr))
{
hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
if (SUCCEEDED (hr))
{
hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
if (SUCCEEDED (hr))
{
ComSmartPtr<IConfigAsfWriter> asfConfig;
hr = asfWriter.QueryInterface (asfConfig);
asfConfig->SetIndexMode (true);
ComSmartPtr<IWMProfileManager> profileManager;
hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
// This gibberish is the DirectShow profile for a video-only wmv file.
String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
"<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
"streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
"bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
"<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
"<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
"btemporalcompression=\"1\" lsamplesize=\"0\">"
"<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
"<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
"<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
"<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
"bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
"biclrused=\"0\" biclrimportant=\"0\"/>"
"</videoinfoheader>"
"</wmmediatype>"
"</streamconfig>"
"</profile>");
const int fps[] = { 10, 15, 30 };
int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
maxFramesPerSecond = (quality >> 24) & 0xff;
prof = prof.replace ("$WIDTH", String (width))
.replace ("$HEIGHT", String (height))
.replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
ComSmartPtr<IWMProfile> currentProfile;
hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
if (SUCCEEDED (hr))
{
ComSmartPtr<IPin> asfWriterInputPin;
if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
{
hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
&& SUCCEEDED (mediaControl->Run()))
{
previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
previewMaxFPS = (quality >> 16) & 0xff;
return true;
}
}
}
}
}
}
}
removeFileCaptureFilter();
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
return false;
}
void removeFileCaptureFilter()
{
mediaControl->Stop();
if (asfWriter != nullptr)
{
graphBuilder->RemoveFilter (asfWriter);
asfWriter = nullptr;
}
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
previewMaxFPS = 60;
}
static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
{
int index = 0;
ComSmartPtr<ICreateDevEnum> pDevEnum;
if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
{
ComSmartPtr<IEnumMoniker> enumerator;
HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
if (SUCCEEDED (hr) && enumerator != nullptr)
{
ComSmartPtr<IMoniker> moniker;
ULONG fetched;
while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
{
ComSmartPtr<IBaseFilter> captureFilter;
hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
ComSmartPtr<IPropertyBag> propertyBag;
hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
propertyBag = nullptr;
if (SUCCEEDED (hr))
{
if (names != nullptr)
names->add (var.bstrVal);
if (index == deviceIndexToOpen)
return captureFilter;
++index;
}
}
}
}
}
}
return nullptr;
}
static StringArray getAvailableDevices()
{
StringArray devs;
enumerateCameras (&devs, -1);
return devs;
}
class GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
{
public:
GrabberCallback (Pimpl& p)
: ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
{
if (refId == IID_ISampleGrabberCB)
return castToType<ISampleGrabberCB> (result);
return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
}
STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
{
owner.handleFrame (time, buffer, bufferSize);
return S_OK;
}
private:
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
};
ComSmartPtr<GrabberCallback> callback;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
bool isRecording, openedSuccessfully;
int width, height;
Time firstRecordedTime;
Array<ViewerComponent*> viewerComps;
ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<ISampleGrabber> sampleGrabber;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
int activeUsers;
Array<int> widths, heights;
DWORD graphRegistrationID;
CriticalSection imageSwapLock;
bool imageNeedsFlipping;
Image loadingImage, activeImage;
bool recordNextFrameTime;
int previewMaxFPS;
private:
void getVideoSizes (IAMStreamConfig* const streamConfig)
{
widths.clear();
heights.clear();
int count = 0, size = 0;
streamConfig->GetNumberOfCapabilities (&count, &size);
if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
{
for (int i = 0; i < count; ++i)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE* config;
HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
if (SUCCEEDED (hr))
{
const int w = scc.InputSize.cx;
const int h = scc.InputSize.cy;
bool duplicate = false;
for (int j = widths.size(); --j >= 0;)
{
if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
{
duplicate = true;
break;
}
}
if (! duplicate)
{
DBG ("Camera capture size: " + String (w) + ", " + String (h));
widths.add (w);
heights.add (h);
}
deleteMediaType (config);
}
}
}
}
bool selectVideoSize (IAMStreamConfig* const streamConfig,
const int minWidth, const int minHeight,
const int maxWidth, const int maxHeight)
{
int count = 0, size = 0, bestArea = 0, bestIndex = -1;
streamConfig->GetNumberOfCapabilities (&count, &size);
if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
{
AM_MEDIA_TYPE* config;
VIDEO_STREAM_CONFIG_CAPS scc;
for (int i = 0; i < count; ++i)
{
HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
if (SUCCEEDED (hr))
{
if (scc.InputSize.cx >= minWidth
&& scc.InputSize.cy >= minHeight
&& scc.InputSize.cx <= maxWidth
&& scc.InputSize.cy <= maxHeight)
{
int area = scc.InputSize.cx * scc.InputSize.cy;
if (area > bestArea)
{
bestIndex = i;
bestArea = area;
}
}
deleteMediaType (config);
}
}
if (bestIndex >= 0)
{
HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
hr = streamConfig->SetFormat (config);
deleteMediaType (config);
return SUCCEEDED (hr);
}
}
return false;
}
static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
ComSmartPtr<IPin>& result, const char* pinName = nullptr)
{
ComSmartPtr<IEnumPins> enumerator;
ComSmartPtr<IPin> pin;
filter->EnumPins (enumerator.resetAndGetPointerAddress());
while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
{
PIN_DIRECTION dir;
pin->QueryDirection (&dir);
if (wantedDirection == dir)
{
PIN_INFO info = { 0 };
pin->QueryPinInfo (&info);
if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
{
result = pin;
return true;
}
}
}
return false;
}
bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
{
ComSmartPtr<IPin> in, out;
return getPin (first, PINDIR_OUTPUT, out)
&& getPin (second, PINDIR_INPUT, in)
&& SUCCEEDED (graphBuilder->Connect (out, in));
}
bool addGraphToRot()
{
ComSmartPtr<IRunningObjectTable> rot;
if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
return false;
ComSmartPtr<IMoniker> moniker;
WCHAR buffer[128];
HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
if (FAILED (hr))
return false;
graphRegistrationID = 0;
return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
}
void removeGraphFromRot()
{
ComSmartPtr<IRunningObjectTable> rot;
if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
rot->Revoke (graphRegistrationID);
}
void disconnectAnyViewers();
static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
{
if (pmt->cbFormat != 0)
CoTaskMemFree ((PVOID) pmt->pbFormat);
if (pmt->pUnk != nullptr)
pmt->pUnk->Release();
CoTaskMemFree (pmt);
}
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
//==============================================================================
struct CameraDevice::ViewerComponent : public Component,
public ChangeListener
{
ViewerComponent (CameraDevice& d)
: owner (d.pimpl), maxFPS (15), lastRepaintTime (0)
{
setOpaque (true);
owner->addChangeListener (this);
owner->addUser();
owner->viewerComps.add (this);
setSize (owner->width, owner->height);
}
~ViewerComponent()
{
if (owner != nullptr)
{
owner->viewerComps.removeFirstMatchingValue (this);
owner->removeUser();
owner->removeChangeListener (this);
}
}
void ownerDeleted()
{
owner = nullptr;
}
void paint (Graphics& g) override
{
g.setColour (Colours::black);
g.setImageResamplingQuality (Graphics::lowResamplingQuality);
if (owner != nullptr)
owner->drawCurrentImage (g, getLocalBounds());
else
g.fillAll();
}
void changeListenerCallback (ChangeBroadcaster*) override
{
const int64 now = Time::currentTimeMillis();
if (now >= lastRepaintTime + (1000 / maxFPS))
{
lastRepaintTime = now;
repaint();
if (owner != nullptr)
maxFPS = owner->previewMaxFPS;
}
}
private:
Pimpl* owner;
int maxFPS;
int64 lastRepaintTime;
};
void CameraDevice::Pimpl::disconnectAnyViewers()
{
for (int i = viewerComps.size(); --i >= 0;)
viewerComps.getUnchecked(i)->ownerDeleted();
}
String CameraDevice::getFileExtension()
{
return ".wmv";
}

View file

@ -0,0 +1,926 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
namespace DirectShowHelpers
{
bool checkDShowAvailability()
{
ComSmartPtr <IGraphBuilder> graph;
return SUCCEEDED (graph.CoCreateInstance (CLSID_FilterGraph));
}
//======================================================================
class VideoRenderer
{
public:
VideoRenderer() {}
virtual ~VideoRenderer() {}
virtual HRESULT create (ComSmartPtr <IGraphBuilder>& graphBuilder,
ComSmartPtr <IBaseFilter>& baseFilter, HWND hwnd) = 0;
virtual void setVideoWindow (HWND hwnd) = 0;
virtual void setVideoPosition (HWND hwnd, long videoWidth, long videoHeight) = 0;
virtual void repaintVideo (HWND hwnd, HDC hdc) = 0;
virtual void displayModeChanged() = 0;
virtual HRESULT getVideoSize (long& videoWidth, long& videoHeight) = 0;
};
//======================================================================
class VMR7 : public VideoRenderer
{
public:
VMR7() {}
HRESULT create (ComSmartPtr <IGraphBuilder>& graphBuilder,
ComSmartPtr <IBaseFilter>& baseFilter, HWND hwnd)
{
ComSmartPtr <IVMRFilterConfig> filterConfig;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_VideoMixingRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"VMR-7");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (filterConfig);
if (SUCCEEDED (hr)) hr = filterConfig->SetRenderingMode (VMRMode_Windowless);
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (windowlessControl);
if (SUCCEEDED (hr)) hr = windowlessControl->SetVideoClippingWindow (hwnd);
if (SUCCEEDED (hr)) hr = windowlessControl->SetAspectRatioMode (VMR_ARMODE_LETTER_BOX);
return hr;
}
void setVideoWindow (HWND hwnd)
{
windowlessControl->SetVideoClippingWindow (hwnd);
}
void setVideoPosition (HWND hwnd, long videoWidth, long videoHeight)
{
RECT src, dest;
SetRect (&src, 0, 0, videoWidth, videoHeight);
GetClientRect (hwnd, &dest);
windowlessControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND hwnd, HDC hdc)
{
windowlessControl->RepaintVideo (hwnd, hdc);
}
void displayModeChanged()
{
windowlessControl->DisplayModeChanged();
}
HRESULT getVideoSize (long& videoWidth, long& videoHeight)
{
return windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
}
private:
ComSmartPtr <IVMRWindowlessControl> windowlessControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (VMR7)
};
//======================================================================
#if JUCE_MEDIAFOUNDATION
class EVR : public VideoRenderer
{
public:
EVR() {}
HRESULT create (ComSmartPtr <IGraphBuilder>& graphBuilder,
ComSmartPtr <IBaseFilter>& baseFilter, HWND hwnd)
{
ComSmartPtr <IMFGetService> getService;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_EnhancedVideoRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"EVR");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (getService);
if (SUCCEEDED (hr)) hr = getService->GetService (MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl,
(LPVOID*) videoDisplayControl.resetAndGetPointerAddress());
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetVideoWindow (hwnd);
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetAspectRatioMode (MFVideoARMode_PreservePicture);
return hr;
}
void setVideoWindow (HWND hwnd)
{
videoDisplayControl->SetVideoWindow (hwnd);
}
void setVideoPosition (HWND hwnd, long /*videoWidth*/, long /*videoHeight*/)
{
const MFVideoNormalizedRect src = { 0.0f, 0.0f, 1.0f, 1.0f };
RECT dest;
GetClientRect (hwnd, &dest);
videoDisplayControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND /*hwnd*/, HDC /*hdc*/)
{
videoDisplayControl->RepaintVideo();
}
void displayModeChanged() {}
HRESULT getVideoSize (long& videoWidth, long& videoHeight)
{
SIZE sz;
HRESULT hr = videoDisplayControl->GetNativeVideoSize (&sz, nullptr);
videoWidth = sz.cx;
videoHeight = sz.cy;
return hr;
}
private:
ComSmartPtr <IMFVideoDisplayControl> videoDisplayControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (EVR)
};
#endif
}
//======================================================================
class DirectShowComponent::DirectShowContext : public AsyncUpdater
{
public:
DirectShowContext (DirectShowComponent& c, VideoRendererType renderType)
: component (c),
hwnd (0),
hdc (0),
state (uninitializedState),
hasVideo (false),
videoWidth (0),
videoHeight (0),
type (renderType),
needToUpdateViewport (true),
needToRecreateNativeWindow (false)
{
CoInitialize (0);
if (type == dshowDefault)
{
type = dshowVMR7;
#if JUCE_MEDIAFOUNDATION
if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
type = dshowEVR;
#endif
}
}
~DirectShowContext()
{
release();
CoUninitialize();
}
//======================================================================
void updateWindowPosition (const Rectangle<int>& newBounds)
{
nativeWindow->setWindowPosition (newBounds);
}
void showWindow (bool shouldBeVisible)
{
nativeWindow->showWindow (shouldBeVisible);
}
//======================================================================
void repaint()
{
if (hasVideo)
videoRenderer->repaintVideo (nativeWindow->getHandle(), nativeWindow->getContext());
}
void updateVideoPosition()
{
if (hasVideo)
videoRenderer->setVideoPosition (nativeWindow->getHandle(), videoWidth, videoHeight);
}
void displayResolutionChanged()
{
if (hasVideo)
videoRenderer->displayModeChanged();
}
//======================================================================
void peerChanged()
{
deleteNativeWindow();
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (nullptr);
createNativeWindow();
mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (hwnd);
}
void handleAsyncUpdate() override
{
if (hwnd != 0)
{
if (needToRecreateNativeWindow)
{
peerChanged();
needToRecreateNativeWindow = false;
}
if (needToUpdateViewport)
{
updateVideoPosition();
needToUpdateViewport = false;
}
repaint();
}
else
{
triggerAsyncUpdate();
}
}
void recreateNativeWindowAsync()
{
needToRecreateNativeWindow = true;
triggerAsyncUpdate();
}
void updateContextPosition()
{
needToUpdateViewport = true;
triggerAsyncUpdate();
}
//======================================================================
bool loadFile (const String& fileOrURLPath)
{
jassert (state == uninitializedState);
if (! createNativeWindow())
return false;
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
// basic playback interfaces
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaControl);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaPosition);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaEvent);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (basicAudio);
// video renderer interface
if (SUCCEEDED (hr))
{
#if JUCE_MEDIAFOUNDATION
if (type == dshowEVR)
videoRenderer = new DirectShowHelpers::EVR();
else
#endif
videoRenderer = new DirectShowHelpers::VMR7();
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
}
// build filter graph
if (SUCCEEDED (hr))
{
hr = graphBuilder->RenderFile (fileOrURLPath.toWideCharPointer(), nullptr);
if (FAILED (hr))
{
// Annoyingly, if we don't run the msg loop between failing and deleting the window, the
// whole OS message-dispatch system gets itself into a state, and refuses to deliver any
// more messages for the whole app. (That's what happens in Win7, anyway)
MessageManager::getInstance()->runDispatchLoopUntil (200);
}
}
// remove video renderer if not connected (no video)
if (SUCCEEDED (hr))
{
if (isRendererConnected())
{
hasVideo = true;
hr = videoRenderer->getVideoSize (videoWidth, videoHeight);
}
else
{
hasVideo = false;
graphBuilder->RemoveFilter (baseFilter);
videoRenderer = nullptr;
baseFilter = nullptr;
}
}
// set window to receive events
if (SUCCEEDED (hr))
hr = mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (SUCCEEDED (hr))
{
state = stoppedState;
pause();
return true;
}
// Note that if you're trying to open a file and this method fails, you may
// just need to install a suitable codec. It seems that by default DirectShow
// doesn't support a very good range of formats.
release();
return false;
}
void release()
{
if (mediaControl != nullptr)
mediaControl->Stop();
if (mediaEvent != nullptr)
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (0);
hasVideo = false;
videoRenderer = nullptr;
baseFilter = nullptr;
basicAudio = nullptr;
mediaEvent = nullptr;
mediaPosition = nullptr;
mediaControl = nullptr;
graphBuilder = nullptr;
state = uninitializedState;
videoWidth = 0;
videoHeight = 0;
if (nativeWindow != nullptr)
deleteNativeWindow();
}
void graphEventProc()
{
LONG ec;
LONG_PTR p1, p2;
jassert (mediaEvent != nullptr);
while (SUCCEEDED (mediaEvent->GetEvent (&ec, &p1, &p2, 0)))
{
switch (ec)
{
case EC_REPAINT:
component.repaint();
break;
case EC_COMPLETE:
if (component.isLooping())
component.goToStart();
else
component.stop();
break;
case EC_USERABORT:
case EC_ERRORABORT:
case EC_ERRORABORTEX:
component.closeMovie();
break;
default:
break;
}
mediaEvent->FreeEventParams (ec, p1, p2);
}
}
//======================================================================
void run()
{
mediaControl->Run();
state = runningState;
}
void stop()
{
mediaControl->Stop();
state = stoppedState;
}
void pause()
{
mediaControl->Pause();
state = pausedState;
}
//======================================================================
bool isInitialised() const noexcept { return state != uninitializedState; }
bool isRunning() const noexcept { return state == runningState; }
bool isPaused() const noexcept { return state == pausedState; }
bool isStopped() const noexcept { return state == stoppedState; }
bool containsVideo() const noexcept { return hasVideo; }
int getVideoWidth() const noexcept { return (int) videoWidth; }
int getVideoHeight() const noexcept { return (int) videoHeight; }
//======================================================================
double getDuration() const
{
REFTIME duration;
mediaPosition->get_Duration (&duration);
return duration;
}
double getPosition() const
{
REFTIME seconds;
mediaPosition->get_CurrentPosition (&seconds);
return seconds;
}
//======================================================================
void setSpeed (const float newSpeed) { mediaPosition->put_Rate (newSpeed); }
void setPosition (const double seconds) { mediaPosition->put_CurrentPosition (seconds); }
void setVolume (const float newVolume) { basicAudio->put_Volume (convertToDShowVolume (newVolume)); }
// in DirectShow, full volume is 0, silence is -10000
static long convertToDShowVolume (const float vol) noexcept
{
if (vol >= 1.0f) return 0;
if (vol <= 0.0f) return -10000;
return roundToInt ((vol * 10000.0f) - 10000.0f);
}
float getVolume() const
{
long volume;
basicAudio->get_Volume (&volume);
return (volume + 10000) / 10000.0f;
}
private:
//======================================================================
enum { graphEventID = WM_APP + 0x43f0 };
DirectShowComponent& component;
HWND hwnd;
HDC hdc;
enum State { uninitializedState, runningState, pausedState, stoppedState };
State state;
bool hasVideo;
long videoWidth, videoHeight;
VideoRendererType type;
ComSmartPtr <IGraphBuilder> graphBuilder;
ComSmartPtr <IMediaControl> mediaControl;
ComSmartPtr <IMediaPosition> mediaPosition;
ComSmartPtr <IMediaEventEx> mediaEvent;
ComSmartPtr <IBasicAudio> basicAudio;
ComSmartPtr <IBaseFilter> baseFilter;
ScopedPointer <DirectShowHelpers::VideoRenderer> videoRenderer;
bool needToUpdateViewport, needToRecreateNativeWindow;
//======================================================================
class NativeWindowClass : private DeletedAtShutdown
{
public:
bool isRegistered() const noexcept { return atom != 0; }
LPCTSTR getWindowClassName() const noexcept { return (LPCTSTR) MAKELONG (atom, 0); }
juce_DeclareSingleton_SingleThreaded_Minimal (NativeWindowClass);
private:
NativeWindowClass()
: atom (0)
{
String windowClassName ("JUCE_DIRECTSHOW_");
windowClassName << (int) (Time::currentTimeMillis() & 0x7fffffff);
HINSTANCE moduleHandle = (HINSTANCE) Process::getCurrentModuleInstanceHandle();
TCHAR moduleFile [1024] = { 0 };
GetModuleFileName (moduleHandle, moduleFile, 1024);
WNDCLASSEX wcex = { 0 };
wcex.cbSize = sizeof (wcex);
wcex.style = CS_OWNDC;
wcex.lpfnWndProc = (WNDPROC) wndProc;
wcex.lpszClassName = windowClassName.toWideCharPointer();
wcex.hInstance = moduleHandle;
atom = RegisterClassEx (&wcex);
jassert (atom != 0);
}
~NativeWindowClass()
{
if (atom != 0)
UnregisterClass (getWindowClassName(), (HINSTANCE) Process::getCurrentModuleInstanceHandle());
clearSingletonInstance();
}
static LRESULT CALLBACK wndProc (HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
if (DirectShowContext* const c = (DirectShowContext*) GetWindowLongPtr (hwnd, GWLP_USERDATA))
{
switch (msg)
{
case WM_NCHITTEST: return HTTRANSPARENT;
case WM_ERASEBKGND: return 1;
case WM_DISPLAYCHANGE: c->displayResolutionChanged(); break;
case graphEventID: c->graphEventProc(); return 0;
default: break;
}
}
return DefWindowProc (hwnd, msg, wParam, lParam);
}
ATOM atom;
JUCE_DECLARE_NON_COPYABLE (NativeWindowClass)
};
//======================================================================
class NativeWindow
{
public:
NativeWindow (HWND parentToAddTo, void* const userData)
: hwnd (0), hdc (0)
{
NativeWindowClass* const wc = NativeWindowClass::getInstance();
if (wc->isRegistered())
{
DWORD exstyle = 0;
DWORD type = WS_CHILD;
hwnd = CreateWindowEx (exstyle, wc->getWindowClassName(),
L"", type, 0, 0, 0, 0, parentToAddTo, 0,
(HINSTANCE) Process::getCurrentModuleInstanceHandle(), 0);
if (hwnd != 0)
{
hdc = GetDC (hwnd);
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) userData);
}
}
jassert (hwnd != 0);
}
~NativeWindow()
{
if (hwnd != 0)
{
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) 0);
DestroyWindow (hwnd);
}
}
HWND getHandle() const noexcept { return hwnd; }
HDC getContext() const noexcept { return hdc; }
void setWindowPosition (const Rectangle<int>& newBounds)
{
SetWindowPos (hwnd, 0, newBounds.getX(), newBounds.getY(),
newBounds.getWidth(), newBounds.getHeight(),
SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOOWNERZORDER);
}
void showWindow (const bool shouldBeVisible)
{
ShowWindow (hwnd, shouldBeVisible ? SW_SHOWNA : SW_HIDE);
}
private:
HWND hwnd;
HDC hdc;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (NativeWindow)
};
ScopedPointer<NativeWindow> nativeWindow;
//======================================================================
bool createNativeWindow()
{
jassert (nativeWindow == nullptr);
if (ComponentPeer* const topLevelPeer = component.getTopLevelComponent()->getPeer())
{
nativeWindow = new NativeWindow ((HWND) topLevelPeer->getNativeHandle(), this);
hwnd = nativeWindow->getHandle();
if (hwnd != 0)
{
hdc = GetDC (hwnd);
component.updateContextPosition();
component.showContext (component.isShowing());
return true;
}
else
{
nativeWindow = nullptr;
}
}
else
{
jassertfalse;
}
return false;
}
void deleteNativeWindow()
{
jassert (nativeWindow != nullptr);
ReleaseDC (hwnd, hdc);
hwnd = 0;
hdc = 0;
nativeWindow = nullptr;
}
bool isRendererConnected()
{
ComSmartPtr <IEnumPins> enumPins;
HRESULT hr = baseFilter->EnumPins (enumPins.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
hr = enumPins->Reset();
ComSmartPtr<IPin> pin;
while (SUCCEEDED (hr)
&& enumPins->Next (1, pin.resetAndGetPointerAddress(), nullptr) == S_OK)
{
ComSmartPtr<IPin> otherPin;
hr = pin->ConnectedTo (otherPin.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
PIN_DIRECTION direction;
hr = pin->QueryDirection (&direction);
if (SUCCEEDED (hr) && direction == PINDIR_INPUT)
return true;
}
else if (hr == VFW_E_NOT_CONNECTED)
{
hr = S_OK;
}
}
return false;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowContext)
};
juce_ImplementSingleton_SingleThreaded (DirectShowComponent::DirectShowContext::NativeWindowClass);
//======================================================================
class DirectShowComponent::DirectShowComponentWatcher : public ComponentMovementWatcher
{
public:
DirectShowComponentWatcher (DirectShowComponent* const c)
: ComponentMovementWatcher (c),
owner (c)
{
}
void componentMovedOrResized (bool /*wasMoved*/, bool /*wasResized*/) override
{
if (owner->videoLoaded)
owner->updateContextPosition();
}
void componentPeerChanged() override
{
if (owner->videoLoaded)
owner->recreateNativeWindowAsync();
}
void componentVisibilityChanged() override
{
if (owner->videoLoaded)
owner->showContext (owner->isShowing());
}
private:
DirectShowComponent* const owner;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowComponentWatcher)
};
//======================================================================
DirectShowComponent::DirectShowComponent (VideoRendererType type)
: videoLoaded (false),
looping (false)
{
setOpaque (true);
context = new DirectShowContext (*this, type);
componentWatcher = new DirectShowComponentWatcher (this);
}
DirectShowComponent::~DirectShowComponent()
{
componentWatcher = nullptr;
}
bool DirectShowComponent::isDirectShowAvailable()
{
static bool isDSAvailable = DirectShowHelpers::checkDShowAvailability();
return isDSAvailable;
}
void DirectShowComponent::recreateNativeWindowAsync()
{
context->recreateNativeWindowAsync();
repaint();
}
void DirectShowComponent::updateContextPosition()
{
context->updateContextPosition();
if (getWidth() > 0 && getHeight() > 0)
if (ComponentPeer* peer = getTopLevelComponent()->getPeer())
context->updateWindowPosition (peer->getAreaCoveredBy (*this));
}
void DirectShowComponent::showContext (const bool shouldBeVisible)
{
context->showWindow (shouldBeVisible);
}
void DirectShowComponent::paint (Graphics& g)
{
if (videoLoaded)
context->handleUpdateNowIfNeeded();
else
g.fillAll (Colours::grey);
}
//======================================================================
bool DirectShowComponent::loadMovie (const String& fileOrURLPath)
{
closeMovie();
videoLoaded = context->loadFile (fileOrURLPath);
if (videoLoaded)
{
videoPath = fileOrURLPath;
context->updateVideoPosition();
}
return videoLoaded;
}
bool DirectShowComponent::loadMovie (const File& videoFile)
{
return loadMovie (videoFile.getFullPathName());
}
bool DirectShowComponent::loadMovie (const URL& videoURL)
{
return loadMovie (videoURL.toString (false));
}
void DirectShowComponent::closeMovie()
{
if (videoLoaded)
context->release();
videoLoaded = false;
videoPath.clear();
}
//======================================================================
File DirectShowComponent::getCurrentMoviePath() const { return videoPath; }
bool DirectShowComponent::isMovieOpen() const { return videoLoaded; }
double DirectShowComponent::getMovieDuration() const { return videoLoaded ? context->getDuration() : 0.0; }
void DirectShowComponent::setLooping (const bool shouldLoop) { looping = shouldLoop; }
bool DirectShowComponent::isLooping() const { return looping; }
void DirectShowComponent::getMovieNormalSize (int &width, int &height) const
{
width = context->getVideoWidth();
height = context->getVideoHeight();
}
//======================================================================
void DirectShowComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (0, 0, normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}
//======================================================================
void DirectShowComponent::play()
{
if (videoLoaded)
context->run();
}
void DirectShowComponent::stop()
{
if (videoLoaded)
context->pause();
}
bool DirectShowComponent::isPlaying() const
{
return context->isRunning();
}
void DirectShowComponent::goToStart()
{
setPosition (0.0);
}
void DirectShowComponent::setPosition (const double seconds)
{
if (videoLoaded)
context->setPosition (seconds);
}
double DirectShowComponent::getPosition() const
{
return videoLoaded ? context->getPosition() : 0.0;
}
void DirectShowComponent::setSpeed (const float newSpeed)
{
if (videoLoaded)
context->setSpeed (newSpeed);
}
void DirectShowComponent::setMovieVolume (const float newVolume)
{
if (videoLoaded)
context->setVolume (newVolume);
}
float DirectShowComponent::getMovieVolume() const
{
return videoLoaded ? context->getVolume() : 0.0f;
}

View file

@ -0,0 +1,483 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
using namespace QTOLibrary;
using namespace QTOControlLib;
bool juce_OpenQuickTimeMovieFromStream (InputStream* input, Movie& movie, Handle& dataHandle);
static bool isQTAvailable = false;
//==============================================================================
class QuickTimeMovieComponent::Pimpl
{
public:
Pimpl() : dataHandle (0)
{
}
~Pimpl()
{
clearHandle();
}
void clearHandle()
{
if (dataHandle != 0)
{
DisposeHandle (dataHandle);
dataHandle = 0;
}
}
IQTControlPtr qtControl;
IQTMoviePtr qtMovie;
Handle dataHandle;
};
//==============================================================================
QuickTimeMovieComponent::QuickTimeMovieComponent()
: movieLoaded (false),
controllerVisible (true)
{
pimpl = new Pimpl();
setMouseEventsAllowed (false);
}
QuickTimeMovieComponent::~QuickTimeMovieComponent()
{
closeMovie();
pimpl->qtControl = 0;
deleteControl();
pimpl = nullptr;
}
bool QuickTimeMovieComponent::isQuickTimeAvailable() noexcept
{
if (! isQTAvailable)
isQTAvailable = (InitializeQTML (0) == noErr) && (EnterMovies() == noErr);
return isQTAvailable;
}
//==============================================================================
void QuickTimeMovieComponent::createControlIfNeeded()
{
if (isShowing() && ! isControlCreated())
{
const IID qtIID = __uuidof (QTControl);
if (createControl (&qtIID))
{
const IID qtInterfaceIID = __uuidof (IQTControl);
pimpl->qtControl = (IQTControl*) queryInterface (&qtInterfaceIID);
if (pimpl->qtControl != nullptr)
{
pimpl->qtControl->Release(); // it has one ref too many at this point
pimpl->qtControl->QuickTimeInitialize();
pimpl->qtControl->PutSizing (qtMovieFitsControl);
if (movieFile != File::nonexistent)
loadMovie (movieFile, controllerVisible);
}
}
}
}
bool QuickTimeMovieComponent::isControlCreated() const
{
return isControlOpen();
}
bool QuickTimeMovieComponent::loadMovie (InputStream* movieStream,
const bool isControllerVisible)
{
const ScopedPointer<InputStream> movieStreamDeleter (movieStream);
movieFile = File::nonexistent;
movieLoaded = false;
pimpl->qtMovie = 0;
controllerVisible = isControllerVisible;
createControlIfNeeded();
if (isControlCreated())
{
if (pimpl->qtControl != 0)
{
pimpl->qtControl->Put_MovieHandle (0);
pimpl->clearHandle();
Movie movie;
if (juce_OpenQuickTimeMovieFromStream (movieStream, movie, pimpl->dataHandle))
{
pimpl->qtControl->Put_MovieHandle ((long) (pointer_sized_int) movie);
pimpl->qtMovie = pimpl->qtControl->GetMovie();
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutMovieControllerType (isControllerVisible ? qtMovieControllerTypeStandard
: qtMovieControllerTypeNone);
}
if (movie == 0)
pimpl->clearHandle();
}
movieLoaded = (pimpl->qtMovie != 0);
}
else
{
// You're trying to open a movie when the control hasn't yet been created, probably because
// you've not yet added this component to a Window and made the whole component hierarchy visible.
jassertfalse;
}
return movieLoaded;
}
void QuickTimeMovieComponent::closeMovie()
{
stop();
movieFile = File::nonexistent;
movieLoaded = false;
pimpl->qtMovie = 0;
if (pimpl->qtControl != 0)
pimpl->qtControl->Put_MovieHandle (0);
pimpl->clearHandle();
}
File QuickTimeMovieComponent::getCurrentMovieFile() const
{
return movieFile;
}
bool QuickTimeMovieComponent::isMovieOpen() const
{
return movieLoaded;
}
double QuickTimeMovieComponent::getMovieDuration() const
{
if (pimpl->qtMovie != 0)
return pimpl->qtMovie->GetDuration() / (double) pimpl->qtMovie->GetTimeScale();
return 0.0;
}
void QuickTimeMovieComponent::getMovieNormalSize (int& width, int& height) const
{
if (pimpl->qtMovie != 0)
{
struct QTRECT r = pimpl->qtMovie->GetNaturalRect();
width = r.right - r.left;
height = r.bottom - r.top;
}
else
{
width = height = 0;
}
}
void QuickTimeMovieComponent::play()
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->Play();
}
void QuickTimeMovieComponent::stop()
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->Stop();
}
bool QuickTimeMovieComponent::isPlaying() const
{
return pimpl->qtMovie != 0 && pimpl->qtMovie->GetRate() != 0.0f;
}
void QuickTimeMovieComponent::setPosition (const double seconds)
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutTime ((long) (seconds * pimpl->qtMovie->GetTimeScale()));
}
double QuickTimeMovieComponent::getPosition() const
{
if (pimpl->qtMovie != 0)
return pimpl->qtMovie->GetTime() / (double) pimpl->qtMovie->GetTimeScale();
return 0.0;
}
void QuickTimeMovieComponent::setSpeed (const float newSpeed)
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutRate (newSpeed);
}
void QuickTimeMovieComponent::setMovieVolume (const float newVolume)
{
if (pimpl->qtMovie != 0)
{
pimpl->qtMovie->PutAudioVolume (newVolume);
pimpl->qtMovie->PutAudioMute (newVolume <= 0);
}
}
float QuickTimeMovieComponent::getMovieVolume() const
{
if (pimpl->qtMovie != 0)
return pimpl->qtMovie->GetAudioVolume();
return 0.0f;
}
void QuickTimeMovieComponent::setLooping (const bool shouldLoop)
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutLoop (shouldLoop);
}
bool QuickTimeMovieComponent::isLooping() const
{
return pimpl->qtMovie != 0 && pimpl->qtMovie->GetLoop();
}
bool QuickTimeMovieComponent::isControllerVisible() const
{
return controllerVisible;
}
void QuickTimeMovieComponent::parentHierarchyChanged()
{
createControlIfNeeded();
QTCompBaseClass::parentHierarchyChanged();
}
void QuickTimeMovieComponent::visibilityChanged()
{
createControlIfNeeded();
QTCompBaseClass::visibilityChanged();
}
void QuickTimeMovieComponent::paint (Graphics& g)
{
if (! isControlCreated())
g.fillAll (Colours::black);
}
//==============================================================================
static Handle createHandleDataRef (Handle dataHandle, const char* fileName)
{
Handle dataRef = 0;
OSStatus err = PtrToHand (&dataHandle, &dataRef, sizeof (Handle));
if (err == noErr)
{
Str255 suffix;
#if JUCE_MSVC
#pragma warning (push)
#pragma warning (disable: 4244 4996)
#endif
suffix[0] = strlen (fileName);
strncpy ((char*) suffix + 1, fileName, 128);
#if JUCE_MSVC
#pragma warning (pop)
#endif
err = PtrAndHand (suffix, dataRef, suffix[0] + 1);
if (err == noErr)
{
long atoms[3];
atoms[0] = EndianU32_NtoB (3 * sizeof (long));
atoms[1] = EndianU32_NtoB (kDataRefExtensionMacOSFileType);
atoms[2] = EndianU32_NtoB (MovieFileType);
err = PtrAndHand (atoms, dataRef, 3 * sizeof (long));
if (err == noErr)
return dataRef;
}
DisposeHandle (dataRef);
}
return 0;
}
static CFStringRef juceStringToCFString (const String& s)
{
return CFStringCreateWithCString (kCFAllocatorDefault, s.toUTF8(), kCFStringEncodingUTF8);
}
static bool openMovie (QTNewMoviePropertyElement* props, int prop, Movie& movie)
{
Boolean trueBool = true;
props[prop].propClass = kQTPropertyClass_MovieInstantiation;
props[prop].propID = kQTMovieInstantiationPropertyID_DontResolveDataRefs;
props[prop].propValueSize = sizeof (trueBool);
props[prop].propValueAddress = &trueBool;
++prop;
props[prop].propClass = kQTPropertyClass_MovieInstantiation;
props[prop].propID = kQTMovieInstantiationPropertyID_AsyncOK;
props[prop].propValueSize = sizeof (trueBool);
props[prop].propValueAddress = &trueBool;
++prop;
Boolean isActive = true;
props[prop].propClass = kQTPropertyClass_NewMovieProperty;
props[prop].propID = kQTNewMoviePropertyID_Active;
props[prop].propValueSize = sizeof (isActive);
props[prop].propValueAddress = &isActive;
++prop;
MacSetPort (0);
jassert (prop <= 5);
OSStatus err = NewMovieFromProperties (prop, props, 0, 0, &movie);
return err == noErr;
}
bool juce_OpenQuickTimeMovieFromStream (InputStream* input, Movie& movie, Handle& dataHandle)
{
if (input == nullptr)
return false;
dataHandle = 0;
bool ok = false;
QTNewMoviePropertyElement props[5] = { 0 };
int prop = 0;
DataReferenceRecord dr;
props[prop].propClass = kQTPropertyClass_DataLocation;
props[prop].propID = kQTDataLocationPropertyID_DataReference;
props[prop].propValueSize = sizeof (dr);
props[prop].propValueAddress = &dr;
++prop;
FileInputStream* const fin = dynamic_cast <FileInputStream*> (input);
if (fin != nullptr)
{
CFStringRef filePath = juceStringToCFString (fin->getFile().getFullPathName());
QTNewDataReferenceFromFullPathCFString (filePath, (QTPathStyle) kQTNativeDefaultPathStyle, 0,
&dr.dataRef, &dr.dataRefType);
ok = openMovie (props, prop, movie);
DisposeHandle (dr.dataRef);
CFRelease (filePath);
}
else
{
// sanity-check because this currently needs to load the whole stream into memory..
jassert (input->getTotalLength() < 50 * 1024 * 1024);
dataHandle = NewHandle ((Size) input->getTotalLength());
HLock (dataHandle);
// read the entire stream into memory - this is a pain, but can't get it to work
// properly using a custom callback to supply the data.
input->read (*dataHandle, (int) input->getTotalLength());
HUnlock (dataHandle);
// different types to get QT to try. (We should really be a bit smarter here by
// working out in advance which one the stream contains, rather than just trying
// each one)
static const char* const suffixesToTry[] = { "\04.mov", "\04.mp3",
"\04.avi", "\04.m4a" };
for (int i = 0; i < numElementsInArray (suffixesToTry) && ! ok; ++i)
{
/* // this fails for some bizarre reason - it can be bodged to work with
// movies, but can't seem to do it for other file types..
QTNewMovieUserProcRecord procInfo;
procInfo.getMovieUserProc = NewGetMovieUPP (readMovieStreamProc);
procInfo.getMovieUserProcRefcon = this;
procInfo.defaultDataRef.dataRef = dataRef;
procInfo.defaultDataRef.dataRefType = HandleDataHandlerSubType;
props[prop].propClass = kQTPropertyClass_DataLocation;
props[prop].propID = kQTDataLocationPropertyID_MovieUserProc;
props[prop].propValueSize = sizeof (procInfo);
props[prop].propValueAddress = (void*) &procInfo;
++prop; */
dr.dataRef = createHandleDataRef (dataHandle, suffixesToTry [i]);
dr.dataRefType = HandleDataHandlerSubType;
ok = openMovie (props, prop, movie);
DisposeHandle (dr.dataRef);
}
}
return ok;
}
bool QuickTimeMovieComponent::loadMovie (const File& movieFile_,
const bool isControllerVisible)
{
const bool ok = loadMovie (static_cast <InputStream*> (movieFile_.createInputStream()), isControllerVisible);
movieFile = movieFile_;
return ok;
}
bool QuickTimeMovieComponent::loadMovie (const URL& movieURL,
const bool isControllerVisible)
{
return loadMovie (static_cast <InputStream*> (movieURL.createInputStream (false)), isControllerVisible);
}
void QuickTimeMovieComponent::goToStart()
{
setPosition (0.0);
}
void QuickTimeMovieComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (0, 0, normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}