1
0
Fork 0
mirror of https://github.com/juce-framework/JUCE.git synced 2026-01-13 00:04:19 +00:00

Added Animated App template and examples

This commit is contained in:
Felix Faire 2014-10-29 15:55:23 +00:00
parent fefcf7aca6
commit ff6520a89a
1141 changed files with 438491 additions and 94 deletions

View file

@ -0,0 +1,89 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
CameraDevice::CameraDevice (const String& nm, int index, int minWidth, int minHeight, int maxWidth, int maxHeight)
: name (nm), pimpl (new Pimpl (name, index, minWidth, minHeight, maxWidth, maxHeight))
{
}
CameraDevice::~CameraDevice()
{
stopRecording();
pimpl = nullptr;
}
Component* CameraDevice::createViewerComponent()
{
return new ViewerComponent (*this);
}
void CameraDevice::startRecordingToFile (const File& file, int quality)
{
stopRecording();
pimpl->startRecordingToFile (file, quality);
}
Time CameraDevice::getTimeOfFirstRecordedFrame() const
{
return pimpl->getTimeOfFirstRecordedFrame();
}
void CameraDevice::stopRecording()
{
pimpl->stopRecording();
}
void CameraDevice::addListener (Listener* listenerToAdd)
{
if (listenerToAdd != nullptr)
pimpl->addListener (listenerToAdd);
}
void CameraDevice::removeListener (Listener* listenerToRemove)
{
if (listenerToRemove != nullptr)
pimpl->removeListener (listenerToRemove);
}
//==============================================================================
StringArray CameraDevice::getAvailableDevices()
{
JUCE_AUTORELEASEPOOL
{
return Pimpl::getAvailableDevices();
}
}
CameraDevice* CameraDevice::openDevice (int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
{
ScopedPointer<CameraDevice> d (new CameraDevice (getAvailableDevices() [index], index,
minWidth, minHeight, maxWidth, maxHeight));
if (d->pimpl->openedOk())
return d.release();
return nullptr;
}

View file

@ -0,0 +1,161 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_CAMERADEVICE_H_INCLUDED
#define JUCE_CAMERADEVICE_H_INCLUDED
#if JUCE_USE_CAMERA || DOXYGEN
//==============================================================================
/**
Controls any video capture devices that might be available.
Use getAvailableDevices() to list the devices that are attached to the
system, then call openDevice to open one for use. Once you have a CameraDevice
object, you can get a viewer component from it, and use its methods to
stream to a file or capture still-frames.
*/
class JUCE_API CameraDevice
{
public:
/** Destructor. */
virtual ~CameraDevice();
//==============================================================================
/** Returns a list of the available cameras on this machine.
You can open one of these devices by calling openDevice().
*/
static StringArray getAvailableDevices();
/** Opens a camera device.
The index parameter indicates which of the items returned by getAvailableDevices()
to open.
The size constraints allow the method to choose between different resolutions if
the camera supports this. If the resolution cam't be specified (e.g. on the Mac)
then these will be ignored.
*/
static CameraDevice* openDevice (int deviceIndex,
int minWidth = 128, int minHeight = 64,
int maxWidth = 1024, int maxHeight = 768);
//==============================================================================
/** Returns the name of this device */
const String& getName() const noexcept { return name; }
/** Creates a component that can be used to display a preview of the
video from this camera.
*/
Component* createViewerComponent();
//==============================================================================
/** Starts recording video to the specified file.
You should use getFileExtension() to find out the correct extension to
use for your filename.
If the file exists, it will be deleted before the recording starts.
This method may not start recording instantly, so if you need to know the
exact time at which the file begins, you can call getTimeOfFirstRecordedFrame()
after the recording has finished.
The quality parameter can be 0, 1, or 2, to indicate low, medium, or high. It may
or may not be used, depending on the driver.
*/
void startRecordingToFile (const File& file, int quality = 2);
/** Stops recording, after a call to startRecordingToFile(). */
void stopRecording();
/** Returns the file extension that should be used for the files
that you pass to startRecordingToFile().
This may be platform-specific, e.g. ".mov" or ".avi".
*/
static String getFileExtension();
/** After calling stopRecording(), this method can be called to return the timestamp
of the first frame that was written to the file.
*/
Time getTimeOfFirstRecordedFrame() const;
//==============================================================================
/**
Receives callbacks with images from a CameraDevice.
@see CameraDevice::addListener
*/
class JUCE_API Listener
{
public:
Listener() {}
virtual ~Listener() {}
/** This method is called when a new image arrives.
This may be called by any thread, so be careful about thread-safety,
and make sure that you process the data as quickly as possible to
avoid glitching!
*/
virtual void imageReceived (const Image& image) = 0;
};
/** Adds a listener to receive images from the camera.
Be very careful not to delete the listener without first removing it by calling
removeListener().
*/
void addListener (Listener* listenerToAdd);
/** Removes a listener that was previously added with addListener(). */
void removeListener (Listener* listenerToRemove);
private:
String name;
struct Pimpl;
friend struct Pimpl;
friend struct ContainerDeletePolicy<Pimpl>;
ScopedPointer<Pimpl> pimpl;
struct ViewerComponent;
friend struct ViewerComponent;
CameraDevice (const String& name, int index,
int minWidth, int minHeight, int maxWidth, int maxHeight);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (CameraDevice)
};
#ifndef DOXYGEN
/** This typedef is just for compatibility with VC6 - newer code should use the CameraDevice::Listener class directly. */
typedef CameraDevice::Listener CameraImageListener;
#endif
#endif
#endif // JUCE_CAMERADEVICE_H_INCLUDED

View file

@ -0,0 +1,21 @@
{
"id": "juce_video",
"name": "JUCE video playback and capture classes",
"version": "3.0.8",
"description": "Classes for playing video and capturing camera input.",
"website": "http://www.juce.com/juce",
"license": "GPL/Commercial",
"dependencies": [ { "id": "juce_gui_extra", "version": "matching" } ],
"include": "juce_video.h",
"compile": [ { "file": "juce_video.cpp", "target": "! xcode" },
{ "file": "juce_video.mm", "target": "xcode" } ],
"browse": [ "playback/*",
"capture/*",
"native/*" ],
"OSXFrameworks": "QTKit QuickTime"
}

View file

@ -0,0 +1,146 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if defined (JUCE_VIDEO_H_INCLUDED) && ! JUCE_AMALGAMATED_INCLUDE
/* When you add this cpp file to your project, you mustn't include it in a file where you've
already included any other headers - just put it inside a file on its own, possibly with your config
flags preceding it, but don't include anything else. That also includes avoiding any automatic prefix
header files that the compiler may be using.
*/
#error "Incorrect use of JUCE cpp file"
#endif
// Your project must contain an AppConfig.h file with your project-specific settings in it,
// and your header search path must make it accessible to the module's files.
#include "AppConfig.h"
#include "../juce_core/native/juce_BasicNativeHeaders.h"
#include "../juce_gui_extra/juce_gui_extra.h"
#include "juce_video.h"
#if JUCE_MAC
#if JUCE_QUICKTIME
#define Point CarbonDummyPointName
#define Component CarbonDummyCompName
#import <QTKit/QTKit.h>
#undef Point
#undef Component
#endif
//==============================================================================
#elif JUCE_WINDOWS
#if JUCE_QUICKTIME
/* If you've got an include error here, you probably need to install the QuickTime SDK and
add its header directory to your include path.
Alternatively, if you don't need any QuickTime services, just set the JUCE_QUICKTIME flag to 0.
*/
#include <Movies.h>
#include <QTML.h>
#include <QuickTimeComponents.h>
#include <MediaHandlers.h>
#include <ImageCodec.h>
/* If you've got QuickTime 7 installed, then these COM objects should be found in
the "\Program Files\Quicktime" directory. You'll need to add this directory to
your include search path to make these import statements work.
*/
#import <QTOLibrary.dll>
#import <QTOControl.dll>
#if JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "QTMLClient.lib")
#endif
#endif
#if JUCE_USE_CAMERA || JUCE_DIRECTSHOW
/* If you're using the camera classes, you'll need access to a few DirectShow headers.
These files are provided in the normal Windows SDK. */
#include <dshow.h>
#include <dshowasf.h>
#endif
#if JUCE_DIRECTSHOW && JUCE_MEDIAFOUNDATION
#include <evr.h>
#endif
#if JUCE_USE_CAMERA && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "Strmiids.lib")
#pragma comment (lib, "wmvcore.lib")
#endif
#if JUCE_MEDIAFOUNDATION && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "mfuuid.lib")
#endif
#if JUCE_DIRECTSHOW && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "strmiids.lib")
#endif
#endif
//==============================================================================
using namespace juce;
namespace juce
{
#if JUCE_MAC || JUCE_IOS
#include "../juce_core/native/juce_osx_ObjCHelpers.h"
#if JUCE_USE_CAMERA
#include "native/juce_mac_CameraDevice.mm"
#endif
#if JUCE_QUICKTIME
#include "native/juce_mac_QuickTimeMovieComponent.mm"
#endif
#elif JUCE_WINDOWS
#include "../juce_core/native/juce_win32_ComSmartPtr.h"
#if JUCE_USE_CAMERA
#include "native/juce_win32_CameraDevice.cpp"
#endif
#if JUCE_DIRECTSHOW
#include "native/juce_win32_DirectShowComponent.cpp"
#endif
#if JUCE_QUICKTIME
#include "native/juce_win32_QuickTimeMovieComponent.cpp"
#endif
#elif JUCE_LINUX
#elif JUCE_ANDROID
#if JUCE_USE_CAMERA
#include "native/juce_android_CameraDevice.cpp"
#endif
#endif
#if JUCE_USE_CAMERA
#include "capture/juce_CameraDevice.cpp"
#endif
}

View file

@ -0,0 +1,82 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_VIDEO_H_INCLUDED
#define JUCE_VIDEO_H_INCLUDED
//=============================================================================
#include "../juce_gui_extra/juce_gui_extra.h"
//=============================================================================
/** Config: JUCE_DIRECTSHOW
Enables DirectShow media-streaming architecture (MS Windows only).
*/
#ifndef JUCE_DIRECTSHOW
#define JUCE_DIRECTSHOW 0
#endif
/** Config: JUCE_MEDIAFOUNDATION
Enables Media Foundation multimedia platform (Windows Vista and above).
*/
#ifndef JUCE_MEDIAFOUNDATION
#define JUCE_MEDIAFOUNDATION 0
#endif
#if ! JUCE_WINDOWS
#undef JUCE_DIRECTSHOW
#undef JUCE_MEDIAFOUNDATION
#endif
/** Config: JUCE_QUICKTIME
Enables the QuickTimeMovieComponent class (Mac and Windows).
If you're building on Windows, you'll need to have the Apple QuickTime SDK
installed, and its header files will need to be on your include path.
*/
#if ! (defined (JUCE_QUICKTIME) || JUCE_LINUX || JUCE_IOS || JUCE_ANDROID || (JUCE_WINDOWS && ! JUCE_MSVC))
#define JUCE_QUICKTIME 0
#endif
/** Config: JUCE_USE_CAMERA
Enables web-cam support using the CameraDevice class (Mac and Windows).
*/
#if (JUCE_QUICKTIME || JUCE_WINDOWS) && ! defined (JUCE_USE_CAMERA)
#define JUCE_USE_CAMERA 0
#endif
#if ! (JUCE_MAC || JUCE_WINDOWS)
#undef JUCE_QUICKTIME
#undef JUCE_USE_CAMERA
#endif
//=============================================================================
namespace juce
{
#include "playback/juce_DirectShowComponent.h"
#include "playback/juce_QuickTimeMovieComponent.h"
#include "capture/juce_CameraDevice.h"
}
#endif // JUCE_VIDEO_H_INCLUDED

View file

@ -0,0 +1,25 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#include "juce_video.cpp"

View file

@ -0,0 +1,83 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
struct CameraDevice::Pimpl
{
Pimpl (const String&, int /*index*/, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/)
{
}
~Pimpl()
{
}
void startRecordingToFile (const File&, int /*quality*/)
{
}
void stopRecording()
{
}
Time getTimeOfFirstRecordedFrame() const
{
return Time();
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
}
static StringArray getAvailableDevices()
{
StringArray results;
return results;
}
private:
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
struct CameraDevice::ViewerComponent : public Component
{
ViewerComponent (CameraDevice&)
{
}
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}

View file

@ -0,0 +1,353 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if ! JUCE_QUICKTIME
#error "To support cameras in OSX you'll need to enable the JUCE_QUICKTIME flag"
#endif
extern Image juce_createImageFromCIImage (CIImage*, int w, int h);
struct CameraDevice::Pimpl
{
Pimpl (const String&, const int index, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/)
: input (nil),
audioDevice (nil),
audioInput (nil),
session (nil),
fileOutput (nil),
imageOutput (nil),
firstPresentationTime (0),
averageTimeOffset (0),
isRecording (false)
{
JUCE_AUTORELEASEPOOL
{
session = [[QTCaptureSession alloc] init];
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
device = (QTCaptureDevice*) [devs objectAtIndex: index];
static DelegateClass cls;
callbackDelegate = [cls.createInstance() init];
DelegateClass::setOwner (callbackDelegate, this);
NSError* err = nil;
[device retain];
[device open: &err];
if (err == nil)
{
input = [[QTCaptureDeviceInput alloc] initWithDevice: device];
audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
[session addInput: input error: &err];
if (err == nil)
{
resetFile();
imageOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[imageOutput setDelegate: callbackDelegate];
if (err == nil)
{
[session startRunning];
return;
}
}
}
openingError = nsStringToJuce ([err description]);
DBG (openingError);
}
}
~Pimpl()
{
[session stopRunning];
[session removeOutput: imageOutput];
[session release];
[input release];
[device release];
[audioDevice release];
[audioInput release];
[fileOutput release];
[imageOutput release];
[callbackDelegate release];
}
bool openedOk() const noexcept { return openingError.isEmpty(); }
void resetFile()
{
[fileOutput recordToOutputFileURL: nil];
[session removeOutput: fileOutput];
[fileOutput release];
fileOutput = [[QTCaptureMovieFileOutput alloc] init];
[session removeInput: audioInput];
[audioInput release];
audioInput = nil;
[audioDevice release];
audioDevice = nil;
[fileOutput setDelegate: callbackDelegate];
}
void addDefaultAudioInput()
{
NSError* err = nil;
audioDevice = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeSound];
if ([audioDevice open: &err])
[audioDevice retain];
else
audioDevice = nil;
if (audioDevice != nil)
{
audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: audioDevice];
[session addInput: audioInput error: &err];
}
}
void startRecordingToFile (const File& file, int quality)
{
stopRecording();
firstPresentationTime = 0;
file.deleteFile();
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
// out wrong, so we'll put some audio in there too..,
addDefaultAudioInput();
[session addOutput: fileOutput error: nil];
NSEnumerator* connectionEnumerator = [[fileOutput connections] objectEnumerator];
for (;;)
{
QTCaptureConnection* connection = [connectionEnumerator nextObject];
if (connection == nil)
break;
QTCompressionOptions* options = nil;
NSString* mediaType = [connection mediaType];
if ([mediaType isEqualToString: QTMediaTypeVideo])
options = [QTCompressionOptions compressionOptionsWithIdentifier:
quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
: nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
else if ([mediaType isEqualToString: QTMediaTypeSound])
options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
[fileOutput setCompressionOptions: options forConnection: connection];
}
[fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
isRecording = true;
}
void stopRecording()
{
if (isRecording)
{
resetFile();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstPresentationTime != 0 ? Time (firstPresentationTime + averageTimeOffset)
: Time();
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
[session addOutput: imageOutput error: nil];
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
if (listeners.size() == 0)
[session removeOutput: imageOutput];
}
void callListeners (CIImage* frame, int w, int h)
{
Image image (juce_createImageFromCIImage (frame, w, h));
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
{
CameraDevice::Listener* const l = listeners[i];
if (l != nullptr)
l->imageReceived (image);
}
}
void captureBuffer (QTSampleBuffer* sampleBuffer)
{
const Time now (Time::getCurrentTime());
#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_5
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute];
#else
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: nsStringLiteral ("hostTime")];
#endif
int64 presentationTime = (hosttime != nil)
? ((int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000 + 40)
: (([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale + 50);
const int64 timeDiff = now.toMilliseconds() - presentationTime;
if (firstPresentationTime == 0)
{
firstPresentationTime = presentationTime;
averageTimeOffset = timeDiff;
}
else
{
averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128;
}
}
static StringArray getAvailableDevices()
{
StringArray results;
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
for (int i = 0; i < (int) [devs count]; ++i)
{
QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
results.add (nsStringToJuce ([dev localizedDisplayName]));
}
return results;
}
QTCaptureDevice* device;
QTCaptureDevice* audioDevice;
QTCaptureDeviceInput* input;
QTCaptureDeviceInput* audioInput;
QTCaptureSession* session;
QTCaptureMovieFileOutput* fileOutput;
QTCaptureDecompressedVideoOutput* imageOutput;
NSObject* callbackDelegate;
String openingError;
int64 firstPresentationTime, averageTimeOffset;
bool isRecording;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
private:
//==============================================================================
struct DelegateClass : public ObjCClass<NSObject>
{
DelegateClass() : ObjCClass<NSObject> ("JUCEAppDelegate_")
{
addIvar<Pimpl*> ("owner");
addMethod (@selector (captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@", @encode (CVImageBufferRef), "@@");
addMethod (@selector (captureOutput:didOutputSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@@@");
registerClass();
}
static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
static Pimpl* getOwner (id self) { return getIvar<Pimpl*> (self, "owner"); }
private:
static void didOutputVideoFrame (id self, SEL, QTCaptureOutput*, CVImageBufferRef videoFrame,
QTSampleBuffer*, QTCaptureConnection*)
{
Pimpl* const internal = getOwner (self);
if (internal->listeners.size() > 0)
{
JUCE_AUTORELEASEPOOL
{
internal->callListeners ([CIImage imageWithCVImageBuffer: videoFrame],
(int) CVPixelBufferGetWidth (videoFrame),
(int) CVPixelBufferGetHeight (videoFrame));
}
}
}
static void didOutputSampleBuffer (id self, SEL, QTCaptureFileOutput*, QTSampleBuffer* sampleBuffer, QTCaptureConnection*)
{
getOwner (self)->captureBuffer (sampleBuffer);
}
};
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
struct CameraDevice::ViewerComponent : public NSViewComponent
{
ViewerComponent (CameraDevice& d)
{
JUCE_AUTORELEASEPOOL
{
captureView = [[QTCaptureView alloc] init];
[captureView setCaptureSession: d.pimpl->session];
setSize (640, 480);
setView (captureView);
}
}
~ViewerComponent()
{
setView (nil);
[captureView setCaptureSession: nil];
[captureView release];
}
QTCaptureView* captureView;
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}

View file

@ -0,0 +1,341 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if JUCE_QUICKTIME
struct NonInterceptingQTMovieViewClass : public ObjCClass <QTMovieView>
{
NonInterceptingQTMovieViewClass() : ObjCClass <QTMovieView> ("JUCEQTMovieView_")
{
addMethod (@selector (hitTest:), hitTest, "@@:", @encode (NSPoint));
addMethod (@selector (acceptsFirstMouse:), acceptsFirstMouse, "c@:@");
registerClass();
}
private:
static NSView* hitTest (id self, SEL, NSPoint point)
{
if (! [(QTMovieView*) self isControllerVisible])
return nil;
objc_super s = { self, [QTMovieView class] };
return objc_msgSendSuper (&s, @selector (hitTest:), point);
}
static BOOL acceptsFirstMouse (id, SEL, NSEvent*)
{
return YES;
}
};
//==============================================================================
#define theMovie (static_cast <QTMovie*> (movie))
//==============================================================================
QuickTimeMovieComponent::QuickTimeMovieComponent()
: movie (0)
{
setOpaque (true);
setVisible (true);
static NonInterceptingQTMovieViewClass cls;
QTMovieView* view = [cls.createInstance() initWithFrame: NSMakeRect (0, 0, 100.0f, 100.0f)];
setView (view);
[view setNextResponder: [view superview]];
[view setWantsLayer: YES]; // prevents the view failing to redraw correctly when paused.
[view release];
}
QuickTimeMovieComponent::~QuickTimeMovieComponent()
{
closeMovie();
setView (nil);
}
bool QuickTimeMovieComponent::isQuickTimeAvailable() noexcept
{
return true;
}
static QTMovie* openMovieFromStream (InputStream* movieStream, File& movieFile)
{
// unfortunately, QTMovie objects can only be created on the main thread..
jassert (MessageManager::getInstance()->isThisTheMessageThread());
QTMovie* movie = nil;
if (FileInputStream* const fin = dynamic_cast <FileInputStream*> (movieStream))
{
movieFile = fin->getFile();
movie = [QTMovie movieWithFile: juceStringToNS (movieFile.getFullPathName())
error: nil];
}
else
{
MemoryBlock temp;
movieStream->readIntoMemoryBlock (temp);
static const char* const suffixesToTry[] = { ".mov", ".mp3", ".avi", ".m4a" };
for (int i = 0; i < numElementsInArray (suffixesToTry); ++i)
{
movie = [QTMovie movieWithDataReference: [QTDataReference dataReferenceWithReferenceToData: [NSData dataWithBytes: temp.getData()
length: temp.getSize()]
name: [NSString stringWithUTF8String: suffixesToTry[i]]
MIMEType: nsEmptyString()]
error: nil];
if (movie != 0)
break;
}
}
return movie;
}
bool QuickTimeMovieComponent::loadMovie (const File& file, const bool showController)
{
return loadMovie (file.createInputStream(), showController);
}
bool QuickTimeMovieComponent::loadMovie (InputStream* movieStream, const bool showController)
{
const ScopedPointer<InputStream> movieStreamDeleter (movieStream);
closeMovie();
if (getPeer() == nullptr)
{
// To open a movie, this component must be visible inside a functioning window, so that
// the QT control can be assigned to the window.
jassertfalse;
return false;
}
if (movieStream == nullptr)
return false;
movie = openMovieFromStream (movieStream, movieFile);
[theMovie retain];
QTMovieView* view = (QTMovieView*) getView();
[view setMovie: theMovie];
controllerVisible = showController;
[view setControllerVisible: controllerVisible];
setLooping (looping);
return movie != nil;
}
bool QuickTimeMovieComponent::loadMovie (const URL& movieURL, const bool showController)
{
// unfortunately, QTMovie objects can only be created on the main thread..
jassert (MessageManager::getInstance()->isThisTheMessageThread());
closeMovie();
if (getPeer() == nullptr)
{
// To open a movie, this component must be visible inside a functioning window, so that
// the QT control can be assigned to the window.
jassertfalse;
return false;
}
NSURL* url = [NSURL URLWithString: juceStringToNS (movieURL.toString (true))];
NSError* err;
if ([QTMovie canInitWithURL: url])
movie = [QTMovie movieWithURL: url error: &err];
[theMovie retain];
QTMovieView* view = (QTMovieView*) getView();
[view setMovie: theMovie];
controllerVisible = showController;
[view setControllerVisible: controllerVisible];
setLooping (looping);
return movie != nil;
}
void QuickTimeMovieComponent::closeMovie()
{
stop();
QTMovieView* view = (QTMovieView*) getView();
[view setMovie: nil];
[theMovie release];
movie = 0;
movieFile = File::nonexistent;
}
bool QuickTimeMovieComponent::isMovieOpen() const
{
return movie != nil;
}
File QuickTimeMovieComponent::getCurrentMovieFile() const
{
return movieFile;
}
void QuickTimeMovieComponent::play()
{
[theMovie play];
}
void QuickTimeMovieComponent::stop()
{
[theMovie stop];
}
bool QuickTimeMovieComponent::isPlaying() const
{
return movie != 0 && [theMovie rate] != 0;
}
void QuickTimeMovieComponent::setPosition (const double seconds)
{
if (movie != 0)
{
QTTime t;
t.timeValue = (uint64) (100000.0 * seconds);
t.timeScale = 100000;
t.flags = 0;
[theMovie setCurrentTime: t];
}
}
double QuickTimeMovieComponent::getPosition() const
{
if (movie == 0)
return 0.0;
QTTime t = [theMovie currentTime];
return t.timeValue / (double) t.timeScale;
}
void QuickTimeMovieComponent::setSpeed (const float newSpeed)
{
[theMovie setRate: newSpeed];
}
double QuickTimeMovieComponent::getMovieDuration() const
{
if (movie == 0)
return 0.0;
QTTime t = [theMovie duration];
return t.timeValue / (double) t.timeScale;
}
void QuickTimeMovieComponent::setLooping (const bool shouldLoop)
{
looping = shouldLoop;
[theMovie setAttribute: [NSNumber numberWithBool: shouldLoop]
forKey: QTMovieLoopsAttribute];
}
bool QuickTimeMovieComponent::isLooping() const
{
return looping;
}
void QuickTimeMovieComponent::setMovieVolume (const float newVolume)
{
[theMovie setVolume: newVolume];
}
float QuickTimeMovieComponent::getMovieVolume() const
{
return movie != 0 ? [theMovie volume] : 0.0f;
}
void QuickTimeMovieComponent::getMovieNormalSize (int& width, int& height) const
{
width = 0;
height = 0;
if (movie != 0)
{
NSSize s = [[theMovie attributeForKey: QTMovieNaturalSizeAttribute] sizeValue];
width = (int) s.width;
height = (int) s.height;
}
}
void QuickTimeMovieComponent::paint (Graphics& g)
{
if (movie == 0)
g.fillAll (Colours::black);
}
bool QuickTimeMovieComponent::isControllerVisible() const
{
return controllerVisible;
}
//==============================================================================
void QuickTimeMovieComponent::goToStart()
{
setPosition (0.0);
}
void QuickTimeMovieComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}
//==============================================================================
#if ! (JUCE_MAC && JUCE_64BIT)
bool juce_OpenQuickTimeMovieFromStream (InputStream* movieStream, Movie& result, Handle&)
{
if (movieStream == nullptr)
return false;
File file;
QTMovie* movie = openMovieFromStream (movieStream, file);
if (movie != nil)
result = [movie quickTimeMovie];
return movie != nil;
}
#endif
#endif

View file

@ -0,0 +1,792 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
interface ISampleGrabberCB : public IUnknown
{
virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
};
interface ISampleGrabber : public IUnknown
{
virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
};
static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
struct CameraDevice::Pimpl : public ChangeBroadcaster
{
Pimpl (const String&, int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
: isRecording (false),
openedSuccessfully (false),
imageNeedsFlipping (false),
width (0), height (0),
activeUsers (0),
recordNextFrameTime (false),
previewMaxFPS (60)
{
HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
if (FAILED (hr))
return;
filter = enumerateCameras (nullptr, index);
if (filter == nullptr)
return;
hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
if (FAILED (hr))
return;
hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
if (FAILED (hr))
return;
hr = graphBuilder.QueryInterface (mediaControl);
if (FAILED (hr))
return;
{
ComSmartPtr<IAMStreamConfig> streamConfig;
hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
if (streamConfig != nullptr)
{
getVideoSizes (streamConfig);
if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
return;
}
}
hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
if (FAILED (hr))
return;
hr = smartTee.CoCreateInstance (CLSID_SmartTee);
if (FAILED (hr))
return;
hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
if (FAILED (hr))
return;
if (! connectFilters (filter, smartTee))
return;
ComSmartPtr<IBaseFilter> sampleGrabberBase;
hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
if (FAILED (hr))
return;
hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
if (FAILED (hr))
return;
{
AM_MEDIA_TYPE mt = { 0 };
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
sampleGrabber->SetMediaType (&mt);
}
callback = new GrabberCallback (*this);
hr = sampleGrabber->SetCallback (callback, 1);
hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
if (FAILED (hr))
return;
ComSmartPtr<IPin> grabberInputPin;
if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
&& getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
&& getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
return;
hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
if (FAILED (hr))
return;
AM_MEDIA_TYPE mt = { 0 };
hr = sampleGrabber->GetConnectedMediaType (&mt);
VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
width = pVih->bmiHeader.biWidth;
height = pVih->bmiHeader.biHeight;
ComSmartPtr<IBaseFilter> nullFilter;
hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
if (connectFilters (sampleGrabberBase, nullFilter)
&& addGraphToRot())
{
activeImage = Image (Image::RGB, width, height, true);
loadingImage = Image (Image::RGB, width, height, true);
openedSuccessfully = true;
}
}
~Pimpl()
{
if (mediaControl != nullptr)
mediaControl->Stop();
removeGraphFromRot();
disconnectAnyViewers();
if (sampleGrabber != nullptr)
{
sampleGrabber->SetCallback (nullptr, 0);
sampleGrabber = nullptr;
}
callback = nullptr;
graphBuilder = nullptr;
mediaControl = nullptr;
filter = nullptr;
captureGraphBuilder = nullptr;
smartTee = nullptr;
smartTeePreviewOutputPin = nullptr;
smartTeeCaptureOutputPin = nullptr;
asfWriter = nullptr;
}
bool openedOk() const noexcept { return openedSuccessfully; }
void startRecordingToFile (const File& file, int quality)
{
addUser();
isRecording = createFileCaptureFilter (file, quality);
}
void stopRecording()
{
if (isRecording)
{
removeFileCaptureFilter();
removeUser();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstRecordedTime;
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
addUser();
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeAllInstancesOf (listenerToRemove);
if (listeners.size() == 0)
removeUser();
}
void callListeners (const Image& image)
{
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
if (CameraDevice::Listener* const l = listeners[i])
l->imageReceived (image);
}
void addUser()
{
if (openedSuccessfully && activeUsers++ == 0)
mediaControl->Run();
}
void removeUser()
{
if (openedSuccessfully && --activeUsers == 0)
mediaControl->Stop();
}
void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
{
if (recordNextFrameTime)
{
const double defaultCameraLatency = 0.1;
firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
recordNextFrameTime = false;
ComSmartPtr<IPin> pin;
if (getPin (filter, PINDIR_OUTPUT, pin))
{
ComSmartPtr<IAMPushSource> pushSource;
HRESULT hr = pin.QueryInterface (pushSource);
if (pushSource != nullptr)
{
REFERENCE_TIME latency = 0;
hr = pushSource->GetLatency (&latency);
firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
}
}
}
{
const int lineStride = width * 3;
const ScopedLock sl (imageSwapLock);
{
loadingImage.duplicateIfShared();
const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
for (int i = 0; i < height; ++i)
memcpy (destData.getLinePointer ((height - 1) - i),
buffer + lineStride * i,
lineStride);
}
imageNeedsFlipping = true;
}
if (listeners.size() > 0)
callListeners (loadingImage);
sendChangeMessage();
}
void drawCurrentImage (Graphics& g, Rectangle<int> area)
{
if (imageNeedsFlipping)
{
const ScopedLock sl (imageSwapLock);
std::swap (loadingImage, activeImage);
imageNeedsFlipping = false;
}
Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
.appliedTo (Rectangle<int> (width, height), area));
RectangleList<int> borders (area);
borders.subtract (centred);
g.setColour (Colours::black);
g.fillRectList (borders);
g.drawImage (activeImage, centred.getX(), centred.getY(),
centred.getWidth(), centred.getHeight(), 0, 0, width, height);
}
bool createFileCaptureFilter (const File& file, int quality)
{
removeFileCaptureFilter();
file.deleteFile();
mediaControl->Stop();
firstRecordedTime = Time();
recordNextFrameTime = true;
previewMaxFPS = 60;
HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
if (SUCCEEDED (hr))
{
ComSmartPtr<IFileSinkFilter> fileSink;
hr = asfWriter.QueryInterface (fileSink);
if (SUCCEEDED (hr))
{
hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
if (SUCCEEDED (hr))
{
hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
if (SUCCEEDED (hr))
{
ComSmartPtr<IConfigAsfWriter> asfConfig;
hr = asfWriter.QueryInterface (asfConfig);
asfConfig->SetIndexMode (true);
ComSmartPtr<IWMProfileManager> profileManager;
hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
// This gibberish is the DirectShow profile for a video-only wmv file.
String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
"<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
"streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
"bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
"<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
"<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
"btemporalcompression=\"1\" lsamplesize=\"0\">"
"<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
"<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
"<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
"<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
"bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
"biclrused=\"0\" biclrimportant=\"0\"/>"
"</videoinfoheader>"
"</wmmediatype>"
"</streamconfig>"
"</profile>");
const int fps[] = { 10, 15, 30 };
int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
maxFramesPerSecond = (quality >> 24) & 0xff;
prof = prof.replace ("$WIDTH", String (width))
.replace ("$HEIGHT", String (height))
.replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
ComSmartPtr<IWMProfile> currentProfile;
hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
if (SUCCEEDED (hr))
{
ComSmartPtr<IPin> asfWriterInputPin;
if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
{
hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
&& SUCCEEDED (mediaControl->Run()))
{
previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
previewMaxFPS = (quality >> 16) & 0xff;
return true;
}
}
}
}
}
}
}
removeFileCaptureFilter();
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
return false;
}
void removeFileCaptureFilter()
{
mediaControl->Stop();
if (asfWriter != nullptr)
{
graphBuilder->RemoveFilter (asfWriter);
asfWriter = nullptr;
}
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
previewMaxFPS = 60;
}
static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
{
int index = 0;
ComSmartPtr<ICreateDevEnum> pDevEnum;
if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
{
ComSmartPtr<IEnumMoniker> enumerator;
HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
if (SUCCEEDED (hr) && enumerator != nullptr)
{
ComSmartPtr<IMoniker> moniker;
ULONG fetched;
while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
{
ComSmartPtr<IBaseFilter> captureFilter;
hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
ComSmartPtr<IPropertyBag> propertyBag;
hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
propertyBag = nullptr;
if (SUCCEEDED (hr))
{
if (names != nullptr)
names->add (var.bstrVal);
if (index == deviceIndexToOpen)
return captureFilter;
++index;
}
}
}
}
}
}
return nullptr;
}
static StringArray getAvailableDevices()
{
StringArray devs;
enumerateCameras (&devs, -1);
return devs;
}
class GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
{
public:
GrabberCallback (Pimpl& p)
: ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
{
if (refId == IID_ISampleGrabberCB)
return castToType<ISampleGrabberCB> (result);
return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
}
STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
{
owner.handleFrame (time, buffer, bufferSize);
return S_OK;
}
private:
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
};
ComSmartPtr<GrabberCallback> callback;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
bool isRecording, openedSuccessfully;
int width, height;
Time firstRecordedTime;
Array<ViewerComponent*> viewerComps;
ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<ISampleGrabber> sampleGrabber;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
int activeUsers;
Array<int> widths, heights;
DWORD graphRegistrationID;
CriticalSection imageSwapLock;
bool imageNeedsFlipping;
Image loadingImage, activeImage;
bool recordNextFrameTime;
int previewMaxFPS;
private:
void getVideoSizes (IAMStreamConfig* const streamConfig)
{
widths.clear();
heights.clear();
int count = 0, size = 0;
streamConfig->GetNumberOfCapabilities (&count, &size);
if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
{
for (int i = 0; i < count; ++i)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE* config;
HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
if (SUCCEEDED (hr))
{
const int w = scc.InputSize.cx;
const int h = scc.InputSize.cy;
bool duplicate = false;
for (int j = widths.size(); --j >= 0;)
{
if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
{
duplicate = true;
break;
}
}
if (! duplicate)
{
DBG ("Camera capture size: " + String (w) + ", " + String (h));
widths.add (w);
heights.add (h);
}
deleteMediaType (config);
}
}
}
}
bool selectVideoSize (IAMStreamConfig* const streamConfig,
const int minWidth, const int minHeight,
const int maxWidth, const int maxHeight)
{
int count = 0, size = 0, bestArea = 0, bestIndex = -1;
streamConfig->GetNumberOfCapabilities (&count, &size);
if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
{
AM_MEDIA_TYPE* config;
VIDEO_STREAM_CONFIG_CAPS scc;
for (int i = 0; i < count; ++i)
{
HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
if (SUCCEEDED (hr))
{
if (scc.InputSize.cx >= minWidth
&& scc.InputSize.cy >= minHeight
&& scc.InputSize.cx <= maxWidth
&& scc.InputSize.cy <= maxHeight)
{
int area = scc.InputSize.cx * scc.InputSize.cy;
if (area > bestArea)
{
bestIndex = i;
bestArea = area;
}
}
deleteMediaType (config);
}
}
if (bestIndex >= 0)
{
HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
hr = streamConfig->SetFormat (config);
deleteMediaType (config);
return SUCCEEDED (hr);
}
}
return false;
}
static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
ComSmartPtr<IPin>& result, const char* pinName = nullptr)
{
ComSmartPtr<IEnumPins> enumerator;
ComSmartPtr<IPin> pin;
filter->EnumPins (enumerator.resetAndGetPointerAddress());
while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
{
PIN_DIRECTION dir;
pin->QueryDirection (&dir);
if (wantedDirection == dir)
{
PIN_INFO info = { 0 };
pin->QueryPinInfo (&info);
if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
{
result = pin;
return true;
}
}
}
return false;
}
bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
{
ComSmartPtr<IPin> in, out;
return getPin (first, PINDIR_OUTPUT, out)
&& getPin (second, PINDIR_INPUT, in)
&& SUCCEEDED (graphBuilder->Connect (out, in));
}
bool addGraphToRot()
{
ComSmartPtr<IRunningObjectTable> rot;
if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
return false;
ComSmartPtr<IMoniker> moniker;
WCHAR buffer[128];
HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
if (FAILED (hr))
return false;
graphRegistrationID = 0;
return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
}
void removeGraphFromRot()
{
ComSmartPtr<IRunningObjectTable> rot;
if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
rot->Revoke (graphRegistrationID);
}
void disconnectAnyViewers();
static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
{
if (pmt->cbFormat != 0)
CoTaskMemFree ((PVOID) pmt->pbFormat);
if (pmt->pUnk != nullptr)
pmt->pUnk->Release();
CoTaskMemFree (pmt);
}
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
//==============================================================================
struct CameraDevice::ViewerComponent : public Component,
public ChangeListener
{
ViewerComponent (CameraDevice& d)
: owner (d.pimpl), maxFPS (15), lastRepaintTime (0)
{
setOpaque (true);
owner->addChangeListener (this);
owner->addUser();
owner->viewerComps.add (this);
setSize (owner->width, owner->height);
}
~ViewerComponent()
{
if (owner != nullptr)
{
owner->viewerComps.removeFirstMatchingValue (this);
owner->removeUser();
owner->removeChangeListener (this);
}
}
void ownerDeleted()
{
owner = nullptr;
}
void paint (Graphics& g) override
{
g.setColour (Colours::black);
g.setImageResamplingQuality (Graphics::lowResamplingQuality);
if (owner != nullptr)
owner->drawCurrentImage (g, getLocalBounds());
else
g.fillAll();
}
void changeListenerCallback (ChangeBroadcaster*) override
{
const int64 now = Time::currentTimeMillis();
if (now >= lastRepaintTime + (1000 / maxFPS))
{
lastRepaintTime = now;
repaint();
if (owner != nullptr)
maxFPS = owner->previewMaxFPS;
}
}
private:
Pimpl* owner;
int maxFPS;
int64 lastRepaintTime;
};
void CameraDevice::Pimpl::disconnectAnyViewers()
{
for (int i = viewerComps.size(); --i >= 0;)
viewerComps.getUnchecked(i)->ownerDeleted();
}
String CameraDevice::getFileExtension()
{
return ".wmv";
}

View file

@ -0,0 +1,926 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
namespace DirectShowHelpers
{
bool checkDShowAvailability()
{
ComSmartPtr <IGraphBuilder> graph;
return SUCCEEDED (graph.CoCreateInstance (CLSID_FilterGraph));
}
//======================================================================
class VideoRenderer
{
public:
VideoRenderer() {}
virtual ~VideoRenderer() {}
virtual HRESULT create (ComSmartPtr <IGraphBuilder>& graphBuilder,
ComSmartPtr <IBaseFilter>& baseFilter, HWND hwnd) = 0;
virtual void setVideoWindow (HWND hwnd) = 0;
virtual void setVideoPosition (HWND hwnd, long videoWidth, long videoHeight) = 0;
virtual void repaintVideo (HWND hwnd, HDC hdc) = 0;
virtual void displayModeChanged() = 0;
virtual HRESULT getVideoSize (long& videoWidth, long& videoHeight) = 0;
};
//======================================================================
class VMR7 : public VideoRenderer
{
public:
VMR7() {}
HRESULT create (ComSmartPtr <IGraphBuilder>& graphBuilder,
ComSmartPtr <IBaseFilter>& baseFilter, HWND hwnd)
{
ComSmartPtr <IVMRFilterConfig> filterConfig;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_VideoMixingRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"VMR-7");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (filterConfig);
if (SUCCEEDED (hr)) hr = filterConfig->SetRenderingMode (VMRMode_Windowless);
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (windowlessControl);
if (SUCCEEDED (hr)) hr = windowlessControl->SetVideoClippingWindow (hwnd);
if (SUCCEEDED (hr)) hr = windowlessControl->SetAspectRatioMode (VMR_ARMODE_LETTER_BOX);
return hr;
}
void setVideoWindow (HWND hwnd)
{
windowlessControl->SetVideoClippingWindow (hwnd);
}
void setVideoPosition (HWND hwnd, long videoWidth, long videoHeight)
{
RECT src, dest;
SetRect (&src, 0, 0, videoWidth, videoHeight);
GetClientRect (hwnd, &dest);
windowlessControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND hwnd, HDC hdc)
{
windowlessControl->RepaintVideo (hwnd, hdc);
}
void displayModeChanged()
{
windowlessControl->DisplayModeChanged();
}
HRESULT getVideoSize (long& videoWidth, long& videoHeight)
{
return windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
}
private:
ComSmartPtr <IVMRWindowlessControl> windowlessControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (VMR7)
};
//======================================================================
#if JUCE_MEDIAFOUNDATION
class EVR : public VideoRenderer
{
public:
EVR() {}
HRESULT create (ComSmartPtr <IGraphBuilder>& graphBuilder,
ComSmartPtr <IBaseFilter>& baseFilter, HWND hwnd)
{
ComSmartPtr <IMFGetService> getService;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_EnhancedVideoRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"EVR");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (getService);
if (SUCCEEDED (hr)) hr = getService->GetService (MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl,
(LPVOID*) videoDisplayControl.resetAndGetPointerAddress());
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetVideoWindow (hwnd);
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetAspectRatioMode (MFVideoARMode_PreservePicture);
return hr;
}
void setVideoWindow (HWND hwnd)
{
videoDisplayControl->SetVideoWindow (hwnd);
}
void setVideoPosition (HWND hwnd, long /*videoWidth*/, long /*videoHeight*/)
{
const MFVideoNormalizedRect src = { 0.0f, 0.0f, 1.0f, 1.0f };
RECT dest;
GetClientRect (hwnd, &dest);
videoDisplayControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND /*hwnd*/, HDC /*hdc*/)
{
videoDisplayControl->RepaintVideo();
}
void displayModeChanged() {}
HRESULT getVideoSize (long& videoWidth, long& videoHeight)
{
SIZE sz;
HRESULT hr = videoDisplayControl->GetNativeVideoSize (&sz, nullptr);
videoWidth = sz.cx;
videoHeight = sz.cy;
return hr;
}
private:
ComSmartPtr <IMFVideoDisplayControl> videoDisplayControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (EVR)
};
#endif
}
//======================================================================
class DirectShowComponent::DirectShowContext : public AsyncUpdater
{
public:
DirectShowContext (DirectShowComponent& c, VideoRendererType renderType)
: component (c),
hwnd (0),
hdc (0),
state (uninitializedState),
hasVideo (false),
videoWidth (0),
videoHeight (0),
type (renderType),
needToUpdateViewport (true),
needToRecreateNativeWindow (false)
{
CoInitialize (0);
if (type == dshowDefault)
{
type = dshowVMR7;
#if JUCE_MEDIAFOUNDATION
if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
type = dshowEVR;
#endif
}
}
~DirectShowContext()
{
release();
CoUninitialize();
}
//======================================================================
void updateWindowPosition (const Rectangle<int>& newBounds)
{
nativeWindow->setWindowPosition (newBounds);
}
void showWindow (bool shouldBeVisible)
{
nativeWindow->showWindow (shouldBeVisible);
}
//======================================================================
void repaint()
{
if (hasVideo)
videoRenderer->repaintVideo (nativeWindow->getHandle(), nativeWindow->getContext());
}
void updateVideoPosition()
{
if (hasVideo)
videoRenderer->setVideoPosition (nativeWindow->getHandle(), videoWidth, videoHeight);
}
void displayResolutionChanged()
{
if (hasVideo)
videoRenderer->displayModeChanged();
}
//======================================================================
void peerChanged()
{
deleteNativeWindow();
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (nullptr);
createNativeWindow();
mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (hwnd);
}
void handleAsyncUpdate() override
{
if (hwnd != 0)
{
if (needToRecreateNativeWindow)
{
peerChanged();
needToRecreateNativeWindow = false;
}
if (needToUpdateViewport)
{
updateVideoPosition();
needToUpdateViewport = false;
}
repaint();
}
else
{
triggerAsyncUpdate();
}
}
void recreateNativeWindowAsync()
{
needToRecreateNativeWindow = true;
triggerAsyncUpdate();
}
void updateContextPosition()
{
needToUpdateViewport = true;
triggerAsyncUpdate();
}
//======================================================================
bool loadFile (const String& fileOrURLPath)
{
jassert (state == uninitializedState);
if (! createNativeWindow())
return false;
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
// basic playback interfaces
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaControl);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaPosition);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaEvent);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (basicAudio);
// video renderer interface
if (SUCCEEDED (hr))
{
#if JUCE_MEDIAFOUNDATION
if (type == dshowEVR)
videoRenderer = new DirectShowHelpers::EVR();
else
#endif
videoRenderer = new DirectShowHelpers::VMR7();
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
}
// build filter graph
if (SUCCEEDED (hr))
{
hr = graphBuilder->RenderFile (fileOrURLPath.toWideCharPointer(), nullptr);
if (FAILED (hr))
{
// Annoyingly, if we don't run the msg loop between failing and deleting the window, the
// whole OS message-dispatch system gets itself into a state, and refuses to deliver any
// more messages for the whole app. (That's what happens in Win7, anyway)
MessageManager::getInstance()->runDispatchLoopUntil (200);
}
}
// remove video renderer if not connected (no video)
if (SUCCEEDED (hr))
{
if (isRendererConnected())
{
hasVideo = true;
hr = videoRenderer->getVideoSize (videoWidth, videoHeight);
}
else
{
hasVideo = false;
graphBuilder->RemoveFilter (baseFilter);
videoRenderer = nullptr;
baseFilter = nullptr;
}
}
// set window to receive events
if (SUCCEEDED (hr))
hr = mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (SUCCEEDED (hr))
{
state = stoppedState;
pause();
return true;
}
// Note that if you're trying to open a file and this method fails, you may
// just need to install a suitable codec. It seems that by default DirectShow
// doesn't support a very good range of formats.
release();
return false;
}
void release()
{
if (mediaControl != nullptr)
mediaControl->Stop();
if (mediaEvent != nullptr)
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (0);
hasVideo = false;
videoRenderer = nullptr;
baseFilter = nullptr;
basicAudio = nullptr;
mediaEvent = nullptr;
mediaPosition = nullptr;
mediaControl = nullptr;
graphBuilder = nullptr;
state = uninitializedState;
videoWidth = 0;
videoHeight = 0;
if (nativeWindow != nullptr)
deleteNativeWindow();
}
void graphEventProc()
{
LONG ec;
LONG_PTR p1, p2;
jassert (mediaEvent != nullptr);
while (SUCCEEDED (mediaEvent->GetEvent (&ec, &p1, &p2, 0)))
{
switch (ec)
{
case EC_REPAINT:
component.repaint();
break;
case EC_COMPLETE:
if (component.isLooping())
component.goToStart();
else
component.stop();
break;
case EC_USERABORT:
case EC_ERRORABORT:
case EC_ERRORABORTEX:
component.closeMovie();
break;
default:
break;
}
mediaEvent->FreeEventParams (ec, p1, p2);
}
}
//======================================================================
void run()
{
mediaControl->Run();
state = runningState;
}
void stop()
{
mediaControl->Stop();
state = stoppedState;
}
void pause()
{
mediaControl->Pause();
state = pausedState;
}
//======================================================================
bool isInitialised() const noexcept { return state != uninitializedState; }
bool isRunning() const noexcept { return state == runningState; }
bool isPaused() const noexcept { return state == pausedState; }
bool isStopped() const noexcept { return state == stoppedState; }
bool containsVideo() const noexcept { return hasVideo; }
int getVideoWidth() const noexcept { return (int) videoWidth; }
int getVideoHeight() const noexcept { return (int) videoHeight; }
//======================================================================
double getDuration() const
{
REFTIME duration;
mediaPosition->get_Duration (&duration);
return duration;
}
double getPosition() const
{
REFTIME seconds;
mediaPosition->get_CurrentPosition (&seconds);
return seconds;
}
//======================================================================
void setSpeed (const float newSpeed) { mediaPosition->put_Rate (newSpeed); }
void setPosition (const double seconds) { mediaPosition->put_CurrentPosition (seconds); }
void setVolume (const float newVolume) { basicAudio->put_Volume (convertToDShowVolume (newVolume)); }
// in DirectShow, full volume is 0, silence is -10000
static long convertToDShowVolume (const float vol) noexcept
{
if (vol >= 1.0f) return 0;
if (vol <= 0.0f) return -10000;
return roundToInt ((vol * 10000.0f) - 10000.0f);
}
float getVolume() const
{
long volume;
basicAudio->get_Volume (&volume);
return (volume + 10000) / 10000.0f;
}
private:
//======================================================================
enum { graphEventID = WM_APP + 0x43f0 };
DirectShowComponent& component;
HWND hwnd;
HDC hdc;
enum State { uninitializedState, runningState, pausedState, stoppedState };
State state;
bool hasVideo;
long videoWidth, videoHeight;
VideoRendererType type;
ComSmartPtr <IGraphBuilder> graphBuilder;
ComSmartPtr <IMediaControl> mediaControl;
ComSmartPtr <IMediaPosition> mediaPosition;
ComSmartPtr <IMediaEventEx> mediaEvent;
ComSmartPtr <IBasicAudio> basicAudio;
ComSmartPtr <IBaseFilter> baseFilter;
ScopedPointer <DirectShowHelpers::VideoRenderer> videoRenderer;
bool needToUpdateViewport, needToRecreateNativeWindow;
//======================================================================
class NativeWindowClass : private DeletedAtShutdown
{
public:
bool isRegistered() const noexcept { return atom != 0; }
LPCTSTR getWindowClassName() const noexcept { return (LPCTSTR) MAKELONG (atom, 0); }
juce_DeclareSingleton_SingleThreaded_Minimal (NativeWindowClass);
private:
NativeWindowClass()
: atom (0)
{
String windowClassName ("JUCE_DIRECTSHOW_");
windowClassName << (int) (Time::currentTimeMillis() & 0x7fffffff);
HINSTANCE moduleHandle = (HINSTANCE) Process::getCurrentModuleInstanceHandle();
TCHAR moduleFile [1024] = { 0 };
GetModuleFileName (moduleHandle, moduleFile, 1024);
WNDCLASSEX wcex = { 0 };
wcex.cbSize = sizeof (wcex);
wcex.style = CS_OWNDC;
wcex.lpfnWndProc = (WNDPROC) wndProc;
wcex.lpszClassName = windowClassName.toWideCharPointer();
wcex.hInstance = moduleHandle;
atom = RegisterClassEx (&wcex);
jassert (atom != 0);
}
~NativeWindowClass()
{
if (atom != 0)
UnregisterClass (getWindowClassName(), (HINSTANCE) Process::getCurrentModuleInstanceHandle());
clearSingletonInstance();
}
static LRESULT CALLBACK wndProc (HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
if (DirectShowContext* const c = (DirectShowContext*) GetWindowLongPtr (hwnd, GWLP_USERDATA))
{
switch (msg)
{
case WM_NCHITTEST: return HTTRANSPARENT;
case WM_ERASEBKGND: return 1;
case WM_DISPLAYCHANGE: c->displayResolutionChanged(); break;
case graphEventID: c->graphEventProc(); return 0;
default: break;
}
}
return DefWindowProc (hwnd, msg, wParam, lParam);
}
ATOM atom;
JUCE_DECLARE_NON_COPYABLE (NativeWindowClass)
};
//======================================================================
class NativeWindow
{
public:
NativeWindow (HWND parentToAddTo, void* const userData)
: hwnd (0), hdc (0)
{
NativeWindowClass* const wc = NativeWindowClass::getInstance();
if (wc->isRegistered())
{
DWORD exstyle = 0;
DWORD type = WS_CHILD;
hwnd = CreateWindowEx (exstyle, wc->getWindowClassName(),
L"", type, 0, 0, 0, 0, parentToAddTo, 0,
(HINSTANCE) Process::getCurrentModuleInstanceHandle(), 0);
if (hwnd != 0)
{
hdc = GetDC (hwnd);
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) userData);
}
}
jassert (hwnd != 0);
}
~NativeWindow()
{
if (hwnd != 0)
{
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) 0);
DestroyWindow (hwnd);
}
}
HWND getHandle() const noexcept { return hwnd; }
HDC getContext() const noexcept { return hdc; }
void setWindowPosition (const Rectangle<int>& newBounds)
{
SetWindowPos (hwnd, 0, newBounds.getX(), newBounds.getY(),
newBounds.getWidth(), newBounds.getHeight(),
SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOOWNERZORDER);
}
void showWindow (const bool shouldBeVisible)
{
ShowWindow (hwnd, shouldBeVisible ? SW_SHOWNA : SW_HIDE);
}
private:
HWND hwnd;
HDC hdc;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (NativeWindow)
};
ScopedPointer<NativeWindow> nativeWindow;
//======================================================================
bool createNativeWindow()
{
jassert (nativeWindow == nullptr);
if (ComponentPeer* const topLevelPeer = component.getTopLevelComponent()->getPeer())
{
nativeWindow = new NativeWindow ((HWND) topLevelPeer->getNativeHandle(), this);
hwnd = nativeWindow->getHandle();
if (hwnd != 0)
{
hdc = GetDC (hwnd);
component.updateContextPosition();
component.showContext (component.isShowing());
return true;
}
else
{
nativeWindow = nullptr;
}
}
else
{
jassertfalse;
}
return false;
}
void deleteNativeWindow()
{
jassert (nativeWindow != nullptr);
ReleaseDC (hwnd, hdc);
hwnd = 0;
hdc = 0;
nativeWindow = nullptr;
}
bool isRendererConnected()
{
ComSmartPtr <IEnumPins> enumPins;
HRESULT hr = baseFilter->EnumPins (enumPins.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
hr = enumPins->Reset();
ComSmartPtr<IPin> pin;
while (SUCCEEDED (hr)
&& enumPins->Next (1, pin.resetAndGetPointerAddress(), nullptr) == S_OK)
{
ComSmartPtr<IPin> otherPin;
hr = pin->ConnectedTo (otherPin.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
PIN_DIRECTION direction;
hr = pin->QueryDirection (&direction);
if (SUCCEEDED (hr) && direction == PINDIR_INPUT)
return true;
}
else if (hr == VFW_E_NOT_CONNECTED)
{
hr = S_OK;
}
}
return false;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowContext)
};
juce_ImplementSingleton_SingleThreaded (DirectShowComponent::DirectShowContext::NativeWindowClass);
//======================================================================
class DirectShowComponent::DirectShowComponentWatcher : public ComponentMovementWatcher
{
public:
DirectShowComponentWatcher (DirectShowComponent* const c)
: ComponentMovementWatcher (c),
owner (c)
{
}
void componentMovedOrResized (bool /*wasMoved*/, bool /*wasResized*/) override
{
if (owner->videoLoaded)
owner->updateContextPosition();
}
void componentPeerChanged() override
{
if (owner->videoLoaded)
owner->recreateNativeWindowAsync();
}
void componentVisibilityChanged() override
{
if (owner->videoLoaded)
owner->showContext (owner->isShowing());
}
private:
DirectShowComponent* const owner;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowComponentWatcher)
};
//======================================================================
DirectShowComponent::DirectShowComponent (VideoRendererType type)
: videoLoaded (false),
looping (false)
{
setOpaque (true);
context = new DirectShowContext (*this, type);
componentWatcher = new DirectShowComponentWatcher (this);
}
DirectShowComponent::~DirectShowComponent()
{
componentWatcher = nullptr;
}
bool DirectShowComponent::isDirectShowAvailable()
{
static bool isDSAvailable = DirectShowHelpers::checkDShowAvailability();
return isDSAvailable;
}
void DirectShowComponent::recreateNativeWindowAsync()
{
context->recreateNativeWindowAsync();
repaint();
}
void DirectShowComponent::updateContextPosition()
{
context->updateContextPosition();
if (getWidth() > 0 && getHeight() > 0)
if (ComponentPeer* peer = getTopLevelComponent()->getPeer())
context->updateWindowPosition (peer->getAreaCoveredBy (*this));
}
void DirectShowComponent::showContext (const bool shouldBeVisible)
{
context->showWindow (shouldBeVisible);
}
void DirectShowComponent::paint (Graphics& g)
{
if (videoLoaded)
context->handleUpdateNowIfNeeded();
else
g.fillAll (Colours::grey);
}
//======================================================================
bool DirectShowComponent::loadMovie (const String& fileOrURLPath)
{
closeMovie();
videoLoaded = context->loadFile (fileOrURLPath);
if (videoLoaded)
{
videoPath = fileOrURLPath;
context->updateVideoPosition();
}
return videoLoaded;
}
bool DirectShowComponent::loadMovie (const File& videoFile)
{
return loadMovie (videoFile.getFullPathName());
}
bool DirectShowComponent::loadMovie (const URL& videoURL)
{
return loadMovie (videoURL.toString (false));
}
void DirectShowComponent::closeMovie()
{
if (videoLoaded)
context->release();
videoLoaded = false;
videoPath.clear();
}
//======================================================================
File DirectShowComponent::getCurrentMoviePath() const { return videoPath; }
bool DirectShowComponent::isMovieOpen() const { return videoLoaded; }
double DirectShowComponent::getMovieDuration() const { return videoLoaded ? context->getDuration() : 0.0; }
void DirectShowComponent::setLooping (const bool shouldLoop) { looping = shouldLoop; }
bool DirectShowComponent::isLooping() const { return looping; }
void DirectShowComponent::getMovieNormalSize (int &width, int &height) const
{
width = context->getVideoWidth();
height = context->getVideoHeight();
}
//======================================================================
void DirectShowComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (0, 0, normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}
//======================================================================
void DirectShowComponent::play()
{
if (videoLoaded)
context->run();
}
void DirectShowComponent::stop()
{
if (videoLoaded)
context->pause();
}
bool DirectShowComponent::isPlaying() const
{
return context->isRunning();
}
void DirectShowComponent::goToStart()
{
setPosition (0.0);
}
void DirectShowComponent::setPosition (const double seconds)
{
if (videoLoaded)
context->setPosition (seconds);
}
double DirectShowComponent::getPosition() const
{
return videoLoaded ? context->getPosition() : 0.0;
}
void DirectShowComponent::setSpeed (const float newSpeed)
{
if (videoLoaded)
context->setSpeed (newSpeed);
}
void DirectShowComponent::setMovieVolume (const float newVolume)
{
if (videoLoaded)
context->setVolume (newVolume);
}
float DirectShowComponent::getMovieVolume() const
{
return videoLoaded ? context->getVolume() : 0.0f;
}

View file

@ -0,0 +1,483 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
using namespace QTOLibrary;
using namespace QTOControlLib;
bool juce_OpenQuickTimeMovieFromStream (InputStream* input, Movie& movie, Handle& dataHandle);
static bool isQTAvailable = false;
//==============================================================================
class QuickTimeMovieComponent::Pimpl
{
public:
Pimpl() : dataHandle (0)
{
}
~Pimpl()
{
clearHandle();
}
void clearHandle()
{
if (dataHandle != 0)
{
DisposeHandle (dataHandle);
dataHandle = 0;
}
}
IQTControlPtr qtControl;
IQTMoviePtr qtMovie;
Handle dataHandle;
};
//==============================================================================
QuickTimeMovieComponent::QuickTimeMovieComponent()
: movieLoaded (false),
controllerVisible (true)
{
pimpl = new Pimpl();
setMouseEventsAllowed (false);
}
QuickTimeMovieComponent::~QuickTimeMovieComponent()
{
closeMovie();
pimpl->qtControl = 0;
deleteControl();
pimpl = nullptr;
}
bool QuickTimeMovieComponent::isQuickTimeAvailable() noexcept
{
if (! isQTAvailable)
isQTAvailable = (InitializeQTML (0) == noErr) && (EnterMovies() == noErr);
return isQTAvailable;
}
//==============================================================================
void QuickTimeMovieComponent::createControlIfNeeded()
{
if (isShowing() && ! isControlCreated())
{
const IID qtIID = __uuidof (QTControl);
if (createControl (&qtIID))
{
const IID qtInterfaceIID = __uuidof (IQTControl);
pimpl->qtControl = (IQTControl*) queryInterface (&qtInterfaceIID);
if (pimpl->qtControl != nullptr)
{
pimpl->qtControl->Release(); // it has one ref too many at this point
pimpl->qtControl->QuickTimeInitialize();
pimpl->qtControl->PutSizing (qtMovieFitsControl);
if (movieFile != File::nonexistent)
loadMovie (movieFile, controllerVisible);
}
}
}
}
bool QuickTimeMovieComponent::isControlCreated() const
{
return isControlOpen();
}
bool QuickTimeMovieComponent::loadMovie (InputStream* movieStream,
const bool isControllerVisible)
{
const ScopedPointer<InputStream> movieStreamDeleter (movieStream);
movieFile = File::nonexistent;
movieLoaded = false;
pimpl->qtMovie = 0;
controllerVisible = isControllerVisible;
createControlIfNeeded();
if (isControlCreated())
{
if (pimpl->qtControl != 0)
{
pimpl->qtControl->Put_MovieHandle (0);
pimpl->clearHandle();
Movie movie;
if (juce_OpenQuickTimeMovieFromStream (movieStream, movie, pimpl->dataHandle))
{
pimpl->qtControl->Put_MovieHandle ((long) (pointer_sized_int) movie);
pimpl->qtMovie = pimpl->qtControl->GetMovie();
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutMovieControllerType (isControllerVisible ? qtMovieControllerTypeStandard
: qtMovieControllerTypeNone);
}
if (movie == 0)
pimpl->clearHandle();
}
movieLoaded = (pimpl->qtMovie != 0);
}
else
{
// You're trying to open a movie when the control hasn't yet been created, probably because
// you've not yet added this component to a Window and made the whole component hierarchy visible.
jassertfalse;
}
return movieLoaded;
}
void QuickTimeMovieComponent::closeMovie()
{
stop();
movieFile = File::nonexistent;
movieLoaded = false;
pimpl->qtMovie = 0;
if (pimpl->qtControl != 0)
pimpl->qtControl->Put_MovieHandle (0);
pimpl->clearHandle();
}
File QuickTimeMovieComponent::getCurrentMovieFile() const
{
return movieFile;
}
bool QuickTimeMovieComponent::isMovieOpen() const
{
return movieLoaded;
}
double QuickTimeMovieComponent::getMovieDuration() const
{
if (pimpl->qtMovie != 0)
return pimpl->qtMovie->GetDuration() / (double) pimpl->qtMovie->GetTimeScale();
return 0.0;
}
void QuickTimeMovieComponent::getMovieNormalSize (int& width, int& height) const
{
if (pimpl->qtMovie != 0)
{
struct QTRECT r = pimpl->qtMovie->GetNaturalRect();
width = r.right - r.left;
height = r.bottom - r.top;
}
else
{
width = height = 0;
}
}
void QuickTimeMovieComponent::play()
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->Play();
}
void QuickTimeMovieComponent::stop()
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->Stop();
}
bool QuickTimeMovieComponent::isPlaying() const
{
return pimpl->qtMovie != 0 && pimpl->qtMovie->GetRate() != 0.0f;
}
void QuickTimeMovieComponent::setPosition (const double seconds)
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutTime ((long) (seconds * pimpl->qtMovie->GetTimeScale()));
}
double QuickTimeMovieComponent::getPosition() const
{
if (pimpl->qtMovie != 0)
return pimpl->qtMovie->GetTime() / (double) pimpl->qtMovie->GetTimeScale();
return 0.0;
}
void QuickTimeMovieComponent::setSpeed (const float newSpeed)
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutRate (newSpeed);
}
void QuickTimeMovieComponent::setMovieVolume (const float newVolume)
{
if (pimpl->qtMovie != 0)
{
pimpl->qtMovie->PutAudioVolume (newVolume);
pimpl->qtMovie->PutAudioMute (newVolume <= 0);
}
}
float QuickTimeMovieComponent::getMovieVolume() const
{
if (pimpl->qtMovie != 0)
return pimpl->qtMovie->GetAudioVolume();
return 0.0f;
}
void QuickTimeMovieComponent::setLooping (const bool shouldLoop)
{
if (pimpl->qtMovie != 0)
pimpl->qtMovie->PutLoop (shouldLoop);
}
bool QuickTimeMovieComponent::isLooping() const
{
return pimpl->qtMovie != 0 && pimpl->qtMovie->GetLoop();
}
bool QuickTimeMovieComponent::isControllerVisible() const
{
return controllerVisible;
}
void QuickTimeMovieComponent::parentHierarchyChanged()
{
createControlIfNeeded();
QTCompBaseClass::parentHierarchyChanged();
}
void QuickTimeMovieComponent::visibilityChanged()
{
createControlIfNeeded();
QTCompBaseClass::visibilityChanged();
}
void QuickTimeMovieComponent::paint (Graphics& g)
{
if (! isControlCreated())
g.fillAll (Colours::black);
}
//==============================================================================
static Handle createHandleDataRef (Handle dataHandle, const char* fileName)
{
Handle dataRef = 0;
OSStatus err = PtrToHand (&dataHandle, &dataRef, sizeof (Handle));
if (err == noErr)
{
Str255 suffix;
#if JUCE_MSVC
#pragma warning (push)
#pragma warning (disable: 4244 4996)
#endif
suffix[0] = strlen (fileName);
strncpy ((char*) suffix + 1, fileName, 128);
#if JUCE_MSVC
#pragma warning (pop)
#endif
err = PtrAndHand (suffix, dataRef, suffix[0] + 1);
if (err == noErr)
{
long atoms[3];
atoms[0] = EndianU32_NtoB (3 * sizeof (long));
atoms[1] = EndianU32_NtoB (kDataRefExtensionMacOSFileType);
atoms[2] = EndianU32_NtoB (MovieFileType);
err = PtrAndHand (atoms, dataRef, 3 * sizeof (long));
if (err == noErr)
return dataRef;
}
DisposeHandle (dataRef);
}
return 0;
}
static CFStringRef juceStringToCFString (const String& s)
{
return CFStringCreateWithCString (kCFAllocatorDefault, s.toUTF8(), kCFStringEncodingUTF8);
}
static bool openMovie (QTNewMoviePropertyElement* props, int prop, Movie& movie)
{
Boolean trueBool = true;
props[prop].propClass = kQTPropertyClass_MovieInstantiation;
props[prop].propID = kQTMovieInstantiationPropertyID_DontResolveDataRefs;
props[prop].propValueSize = sizeof (trueBool);
props[prop].propValueAddress = &trueBool;
++prop;
props[prop].propClass = kQTPropertyClass_MovieInstantiation;
props[prop].propID = kQTMovieInstantiationPropertyID_AsyncOK;
props[prop].propValueSize = sizeof (trueBool);
props[prop].propValueAddress = &trueBool;
++prop;
Boolean isActive = true;
props[prop].propClass = kQTPropertyClass_NewMovieProperty;
props[prop].propID = kQTNewMoviePropertyID_Active;
props[prop].propValueSize = sizeof (isActive);
props[prop].propValueAddress = &isActive;
++prop;
MacSetPort (0);
jassert (prop <= 5);
OSStatus err = NewMovieFromProperties (prop, props, 0, 0, &movie);
return err == noErr;
}
bool juce_OpenQuickTimeMovieFromStream (InputStream* input, Movie& movie, Handle& dataHandle)
{
if (input == nullptr)
return false;
dataHandle = 0;
bool ok = false;
QTNewMoviePropertyElement props[5] = { 0 };
int prop = 0;
DataReferenceRecord dr;
props[prop].propClass = kQTPropertyClass_DataLocation;
props[prop].propID = kQTDataLocationPropertyID_DataReference;
props[prop].propValueSize = sizeof (dr);
props[prop].propValueAddress = &dr;
++prop;
FileInputStream* const fin = dynamic_cast <FileInputStream*> (input);
if (fin != nullptr)
{
CFStringRef filePath = juceStringToCFString (fin->getFile().getFullPathName());
QTNewDataReferenceFromFullPathCFString (filePath, (QTPathStyle) kQTNativeDefaultPathStyle, 0,
&dr.dataRef, &dr.dataRefType);
ok = openMovie (props, prop, movie);
DisposeHandle (dr.dataRef);
CFRelease (filePath);
}
else
{
// sanity-check because this currently needs to load the whole stream into memory..
jassert (input->getTotalLength() < 50 * 1024 * 1024);
dataHandle = NewHandle ((Size) input->getTotalLength());
HLock (dataHandle);
// read the entire stream into memory - this is a pain, but can't get it to work
// properly using a custom callback to supply the data.
input->read (*dataHandle, (int) input->getTotalLength());
HUnlock (dataHandle);
// different types to get QT to try. (We should really be a bit smarter here by
// working out in advance which one the stream contains, rather than just trying
// each one)
static const char* const suffixesToTry[] = { "\04.mov", "\04.mp3",
"\04.avi", "\04.m4a" };
for (int i = 0; i < numElementsInArray (suffixesToTry) && ! ok; ++i)
{
/* // this fails for some bizarre reason - it can be bodged to work with
// movies, but can't seem to do it for other file types..
QTNewMovieUserProcRecord procInfo;
procInfo.getMovieUserProc = NewGetMovieUPP (readMovieStreamProc);
procInfo.getMovieUserProcRefcon = this;
procInfo.defaultDataRef.dataRef = dataRef;
procInfo.defaultDataRef.dataRefType = HandleDataHandlerSubType;
props[prop].propClass = kQTPropertyClass_DataLocation;
props[prop].propID = kQTDataLocationPropertyID_MovieUserProc;
props[prop].propValueSize = sizeof (procInfo);
props[prop].propValueAddress = (void*) &procInfo;
++prop; */
dr.dataRef = createHandleDataRef (dataHandle, suffixesToTry [i]);
dr.dataRefType = HandleDataHandlerSubType;
ok = openMovie (props, prop, movie);
DisposeHandle (dr.dataRef);
}
}
return ok;
}
bool QuickTimeMovieComponent::loadMovie (const File& movieFile_,
const bool isControllerVisible)
{
const bool ok = loadMovie (static_cast <InputStream*> (movieFile_.createInputStream()), isControllerVisible);
movieFile = movieFile_;
return ok;
}
bool QuickTimeMovieComponent::loadMovie (const URL& movieURL,
const bool isControllerVisible)
{
return loadMovie (static_cast <InputStream*> (movieURL.createInputStream (false)), isControllerVisible);
}
void QuickTimeMovieComponent::goToStart()
{
setPosition (0.0);
}
void QuickTimeMovieComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (0, 0, normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}

View file

@ -0,0 +1,216 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_DIRECTSHOWCOMPONENT_H_INCLUDED
#define JUCE_DIRECTSHOWCOMPONENT_H_INCLUDED
#if JUCE_DIRECTSHOW || DOXYGEN
//==============================================================================
/**
A window that can play back a DirectShow video.
@note Controller is not implemented
*/
class JUCE_API DirectShowComponent : public Component
{
public:
//==============================================================================
/** DirectShow video renderer type.
See MSDN for advice about choosing the right renderer.
*/
enum VideoRendererType
{
dshowDefault, /**< VMR7 for Windows XP, EVR for Windows Vista and later */
dshowVMR7, /**< Video Mixing Renderer 7 */
dshowEVR /**< Enhanced Video Renderer */
};
/** Creates a DirectShowComponent, initially blank.
Use the loadMovie() method to load a video once you've added the
component to a window, (or put it on the desktop as a heavyweight window).
Loading a video when the component isn't visible can cause problems, as
DirectShow needs a window handle to initialise properly.
@see VideoRendererType
*/
DirectShowComponent (VideoRendererType type = dshowDefault);
/** Destructor. */
~DirectShowComponent();
/** Returns true if DirectShow is installed and working on this machine. */
static bool isDirectShowAvailable();
//==============================================================================
/** Tries to load a DirectShow video from a file or URL into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a video when the
component isn't visible can cause problems, because DirectShow needs a window
handle to do its stuff.
@param fileOrURLPath the file or URL path to open
@returns true if the video opens successfully
*/
bool loadMovie (const String& fileOrURLPath);
/** Tries to load a DirectShow video from a file into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a video when the
component isn't visible can cause problems, because DirectShow needs a window
handle to do its stuff.
@param videoFile the video file to open
@returns true if the video opens successfully
*/
bool loadMovie (const File& videoFile);
/** Tries to load a DirectShow video from a URL into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a video when the
component isn't visible can cause problems, because DirectShow needs a window
handle to do its stuff.
@param videoURL the video URL to open
@returns true if the video opens successfully
*/
bool loadMovie (const URL& videoURL);
/** Closes the video, if one is open. */
void closeMovie();
/** Returns the file path or URL from which the video file was loaded.
If there isn't one, this returns an empty string.
*/
File getCurrentMoviePath() const;
/** Returns true if there's currently a video open. */
bool isMovieOpen() const;
/** Returns the length of the video, in seconds. */
double getMovieDuration() const;
/** Returns the video's natural size, in pixels.
You can use this to resize the component to show the video at its preferred
scale.
If no video is loaded, the size returned will be 0 x 0.
*/
void getMovieNormalSize (int& width, int& height) const;
/** This will position the component within a given area, keeping its aspect
ratio correct according to the video's normal size.
The component will be made as large as it can go within the space, and will
be aligned according to the justification value if this means there are gaps at
the top or sides.
@note Not implemented
*/
void setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement);
/** Starts the video playing. */
void play();
/** Stops the video playing. */
void stop();
/** Returns true if the video is currently playing. */
bool isPlaying() const;
/** Moves the video's position back to the start. */
void goToStart();
/** Sets the video's position to a given time. */
void setPosition (double seconds);
/** Returns the current play position of the video. */
double getPosition() const;
/** Changes the video playback rate.
A value of 1 is normal speed, greater values play it proportionately faster,
smaller values play it slower.
*/
void setSpeed (float newSpeed);
/** Changes the video's playback volume.
@param newVolume the volume in the range 0 (silent) to 1.0 (full)
*/
void setMovieVolume (float newVolume);
/** Returns the video's playback volume.
@returns the volume in the range 0 (silent) to 1.0 (full)
*/
float getMovieVolume() const;
/** Tells the video whether it should loop. */
void setLooping (bool shouldLoop);
/** Returns true if the video is currently looping.
@see setLooping
*/
bool isLooping() const;
//==============================================================================
/** @internal */
void paint (Graphics&) override;
private:
//==============================================================================
String videoPath;
bool videoLoaded, looping;
class DirectShowContext;
friend class DirectShowContext;
friend struct ContainerDeletePolicy<DirectShowContext>;
ScopedPointer<DirectShowContext> context;
class DirectShowComponentWatcher;
friend class DirectShowComponentWatcher;
friend struct ContainerDeletePolicy<DirectShowComponentWatcher>;
ScopedPointer<DirectShowComponentWatcher> componentWatcher;
//==============================================================================
void updateContextPosition();
void showContext (bool shouldBeVisible);
void recreateNativeWindowAsync();
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowComponent)
};
#endif
#endif // JUCE_DIRECTSHOWCOMPONENT_H_INCLUDED

View file

@ -0,0 +1,220 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_QUICKTIMEMOVIECOMPONENT_H_INCLUDED
#define JUCE_QUICKTIMEMOVIECOMPONENT_H_INCLUDED
// (NB: This stuff mustn't go inside the "#if QUICKTIME" block, or it'll break the
// amalgamated build)
#ifndef DOXYGEN
#if JUCE_WINDOWS
typedef ActiveXControlComponent QTCompBaseClass;
#elif JUCE_MAC
typedef NSViewComponent QTCompBaseClass;
#endif
#endif
#if JUCE_QUICKTIME || DOXYGEN
//==============================================================================
/**
A window that can play back a QuickTime movie.
*/
class JUCE_API QuickTimeMovieComponent : public QTCompBaseClass
{
public:
//==============================================================================
/** Creates a QuickTimeMovieComponent, initially blank.
Use the loadMovie() method to load a movie once you've added the
component to a window, (or put it on the desktop as a heavyweight window).
Loading a movie when the component isn't visible can cause problems, as
QuickTime needs a window handle to initialise properly.
*/
QuickTimeMovieComponent();
/** Destructor. */
~QuickTimeMovieComponent();
/** Returns true if QT is installed and working on this machine.
*/
static bool isQuickTimeAvailable() noexcept;
//==============================================================================
/** Tries to load a QuickTime movie from a file into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a movie when the
component isn't visible can cause problems, because QuickTime needs a window
handle to do its stuff.
@param movieFile the .mov file to open
@param isControllerVisible whether to show a controller bar at the bottom
@returns true if the movie opens successfully
*/
bool loadMovie (const File& movieFile,
bool isControllerVisible);
/** Tries to load a QuickTime movie from a URL into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a movie when the
component isn't visible can cause problems, because QuickTime needs a window
handle to do its stuff.
@param movieURL the .mov file to open
@param isControllerVisible whether to show a controller bar at the bottom
@returns true if the movie opens successfully
*/
bool loadMovie (const URL& movieURL,
bool isControllerVisible);
/** Tries to load a QuickTime movie from a stream into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a movie when the
component isn't visible can cause problems, because QuickTime needs a window
handle to do its stuff.
@param movieStream a stream containing a .mov file. The component may try
to read the whole stream before playing, rather than
streaming from it.
@param isControllerVisible whether to show a controller bar at the bottom
@returns true if the movie opens successfully
*/
bool loadMovie (InputStream* movieStream,
bool isControllerVisible);
/** Closes the movie, if one is open. */
void closeMovie();
/** Returns the movie file that is currently open.
If there isn't one, this returns File::nonexistent
*/
File getCurrentMovieFile() const;
/** Returns true if there's currently a movie open. */
bool isMovieOpen() const;
/** Returns the length of the movie, in seconds. */
double getMovieDuration() const;
/** Returns the movie's natural size, in pixels.
You can use this to resize the component to show the movie at its preferred
scale.
If no movie is loaded, the size returned will be 0 x 0.
*/
void getMovieNormalSize (int& width, int& height) const;
/** This will position the component within a given area, keeping its aspect
ratio correct according to the movie's normal size.
The component will be made as large as it can go within the space, and will
be aligned according to the justification value if this means there are gaps at
the top or sides.
*/
void setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement);
/** Starts the movie playing. */
void play();
/** Stops the movie playing. */
void stop();
/** Returns true if the movie is currently playing. */
bool isPlaying() const;
/** Moves the movie's position back to the start. */
void goToStart();
/** Sets the movie's position to a given time. */
void setPosition (double seconds);
/** Returns the current play position of the movie. */
double getPosition() const;
/** Changes the movie playback rate.
A value of 1 is normal speed, greater values play it proportionately faster,
smaller values play it slower.
*/
void setSpeed (float newSpeed);
/** Changes the movie's playback volume.
@param newVolume the volume in the range 0 (silent) to 1.0 (full)
*/
void setMovieVolume (float newVolume);
/** Returns the movie's playback volume.
@returns the volume in the range 0 (silent) to 1.0 (full)
*/
float getMovieVolume() const;
/** Tells the movie whether it should loop. */
void setLooping (bool shouldLoop);
/** Returns true if the movie is currently looping.
@see setLooping
*/
bool isLooping() const;
/** True if the native QuickTime controller bar is shown in the window.
@see loadMovie
*/
bool isControllerVisible() const;
//==============================================================================
/** @internal */
void paint (Graphics&) override;
private:
//==============================================================================
File movieFile;
bool movieLoaded, controllerVisible, looping;
#if JUCE_WINDOWS
void parentHierarchyChanged() override;
void visibilityChanged() override;
void createControlIfNeeded();
bool isControlCreated() const;
class Pimpl;
friend struct ContainerDeletePolicy<Pimpl>;
ScopedPointer<Pimpl> pimpl;
#else
void* movie;
#endif
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (QuickTimeMovieComponent)
};
#endif
#endif // JUCE_QUICKTIMEMOVIECOMPONENT_H_INCLUDED