mirror of
https://github.com/juce-framework/JUCE.git
synced 2026-02-06 04:00:08 +00:00
Tweaks to the win32 camera capture code to reduce CPU load.
This commit is contained in:
parent
b80bb4bf38
commit
ea16741b3d
8 changed files with 308 additions and 342 deletions
|
|
@ -172,11 +172,12 @@ inline void swapVariables (Type& variable1, Type& variable2)
|
|||
int numElements = numElementsInArray (myArray) // returns 3
|
||||
@endcode
|
||||
*/
|
||||
template <typename Type>
|
||||
inline int numElementsInArray (Type& array)
|
||||
template <typename Type, int N>
|
||||
inline int numElementsInArray (Type (&array)[N])
|
||||
{
|
||||
(void) array; // (required to avoid a spurious warning in MS compilers)
|
||||
return static_cast<int> (sizeof (array) / sizeof (0[array]));
|
||||
sizeof (0[array]); // This line should cause an error if you pass an object with a user-defined subscript operator
|
||||
return N;
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
|
|
@ -184,7 +185,7 @@ inline int numElementsInArray (Type& array)
|
|||
|
||||
/** Using juce_hypot and juce_hypotf is easier than dealing with all the different
|
||||
versions of these functions of various platforms and compilers. */
|
||||
inline double juce_hypot (double a, double b)
|
||||
inline double juce_hypot (double a, double b) throw()
|
||||
{
|
||||
#if JUCE_WINDOWS
|
||||
return _hypot (a, b);
|
||||
|
|
|
|||
|
|
@ -69,22 +69,12 @@ void Drawable::drawAt (Graphics& g, const float x, const float y, const float op
|
|||
}
|
||||
|
||||
void Drawable::drawWithin (Graphics& g,
|
||||
const int destX,
|
||||
const int destY,
|
||||
const int destW,
|
||||
const int destH,
|
||||
const Rectangle<float>& destArea,
|
||||
const RectanglePlacement& placement,
|
||||
const float opacity) const
|
||||
{
|
||||
if (destW > 0 && destH > 0)
|
||||
{
|
||||
Rectangle<float> bounds (getBounds());
|
||||
|
||||
draw (g, opacity,
|
||||
placement.getTransformToFit (bounds.getX(), bounds.getY(), bounds.getWidth(), bounds.getHeight(),
|
||||
(float) destX, (float) destY,
|
||||
(float) destW, (float) destH));
|
||||
}
|
||||
if (! destArea.isEmpty())
|
||||
draw (g, opacity, placement.getTransformToFit (getBounds(), destArea));
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
|
|
|
|||
|
|
@ -38,8 +38,7 @@
|
|||
#define log(a) {}
|
||||
#endif
|
||||
|
||||
#define JUCE_ASIOCALLBACK // should probably use this to define the callback type, but
|
||||
// the asio header doesn't actually specify a calling convention for the functions..
|
||||
#define JUCE_ASIOCALLBACK __cdecl
|
||||
|
||||
//==============================================================================
|
||||
#if ASIO_DEBUGGING
|
||||
|
|
@ -146,40 +145,15 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
const StringArray getOutputChannelNames()
|
||||
{
|
||||
return outputChannelNames;
|
||||
}
|
||||
const StringArray getOutputChannelNames() { return outputChannelNames; }
|
||||
const StringArray getInputChannelNames() { return inputChannelNames; }
|
||||
|
||||
const StringArray getInputChannelNames()
|
||||
{
|
||||
return inputChannelNames;
|
||||
}
|
||||
int getNumSampleRates() { return sampleRates.size(); }
|
||||
double getSampleRate (int index) { return sampleRates [index]; }
|
||||
|
||||
int getNumSampleRates()
|
||||
{
|
||||
return sampleRates.size();
|
||||
}
|
||||
|
||||
double getSampleRate (int index)
|
||||
{
|
||||
return sampleRates [index];
|
||||
}
|
||||
|
||||
int getNumBufferSizesAvailable()
|
||||
{
|
||||
return bufferSizes.size();
|
||||
}
|
||||
|
||||
int getBufferSizeSamples (int index)
|
||||
{
|
||||
return bufferSizes [index];
|
||||
}
|
||||
|
||||
int getDefaultBufferSize()
|
||||
{
|
||||
return preferredSize;
|
||||
}
|
||||
int getNumBufferSizesAvailable() { return bufferSizes.size(); }
|
||||
int getBufferSizeSamples (int index) { return bufferSizes [index]; }
|
||||
int getDefaultBufferSize() { return preferredSize; }
|
||||
|
||||
const String open (const BigInteger& inputChannels,
|
||||
const BigInteger& outputChannels,
|
||||
|
|
@ -633,45 +607,18 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
bool isOpen()
|
||||
{
|
||||
return isOpen_ || insideControlPanelModalLoop;
|
||||
}
|
||||
bool isOpen() { return isOpen_ || insideControlPanelModalLoop; }
|
||||
bool isPlaying() { return isASIOOpen && (currentCallback != 0); }
|
||||
|
||||
int getCurrentBufferSizeSamples()
|
||||
{
|
||||
return currentBlockSizeSamples;
|
||||
}
|
||||
int getCurrentBufferSizeSamples() { return currentBlockSizeSamples; }
|
||||
double getCurrentSampleRate() { return currentSampleRate; }
|
||||
int getCurrentBitDepth() { return currentBitDepth; }
|
||||
|
||||
double getCurrentSampleRate()
|
||||
{
|
||||
return currentSampleRate;
|
||||
}
|
||||
const BigInteger getActiveOutputChannels() const { return currentChansOut; }
|
||||
const BigInteger getActiveInputChannels() const { return currentChansIn; }
|
||||
|
||||
const BigInteger getActiveOutputChannels() const
|
||||
{
|
||||
return currentChansOut;
|
||||
}
|
||||
|
||||
const BigInteger getActiveInputChannels() const
|
||||
{
|
||||
return currentChansIn;
|
||||
}
|
||||
|
||||
int getCurrentBitDepth()
|
||||
{
|
||||
return currentBitDepth;
|
||||
}
|
||||
|
||||
int getOutputLatencyInSamples()
|
||||
{
|
||||
return outputLatency + currentBlockSizeSamples / 4;
|
||||
}
|
||||
|
||||
int getInputLatencyInSamples()
|
||||
{
|
||||
return inputLatency + currentBlockSizeSamples / 4;
|
||||
}
|
||||
int getOutputLatencyInSamples() { return outputLatency + currentBlockSizeSamples / 4; }
|
||||
int getInputLatencyInSamples() { return inputLatency + currentBlockSizeSamples / 4; }
|
||||
|
||||
void start (AudioIODeviceCallback* callback)
|
||||
{
|
||||
|
|
@ -697,20 +644,8 @@ public:
|
|||
lastCallback->audioDeviceStopped();
|
||||
}
|
||||
|
||||
bool isPlaying()
|
||||
{
|
||||
return isASIOOpen && (currentCallback != 0);
|
||||
}
|
||||
|
||||
const String getLastError()
|
||||
{
|
||||
return error;
|
||||
}
|
||||
|
||||
bool hasControlPanel() const
|
||||
{
|
||||
return true;
|
||||
}
|
||||
const String getLastError() { return error; }
|
||||
bool hasControlPanel() const { return true; }
|
||||
|
||||
bool showControlPanel()
|
||||
{
|
||||
|
|
@ -840,7 +775,6 @@ private:
|
|||
bool volatile insideControlPanelModalLoop;
|
||||
bool volatile shouldUsePreferredSize;
|
||||
|
||||
|
||||
//==============================================================================
|
||||
void removeCurrentDriver()
|
||||
{
|
||||
|
|
@ -1242,7 +1176,6 @@ private:
|
|||
for (i = 0; i < numActiveInputChans; ++i)
|
||||
{
|
||||
float* const dst = inBuffers[i];
|
||||
|
||||
jassert (dst != 0);
|
||||
|
||||
const char* const src = (const char*) (infos[i].buffers[bi]);
|
||||
|
|
@ -1279,16 +1212,12 @@ private:
|
|||
}
|
||||
}
|
||||
|
||||
currentCallback->audioDeviceIOCallback ((const float**) inBuffers,
|
||||
numActiveInputChans,
|
||||
outBuffers,
|
||||
numActiveOutputChans,
|
||||
samps);
|
||||
currentCallback->audioDeviceIOCallback ((const float**) inBuffers, numActiveInputChans,
|
||||
outBuffers, numActiveOutputChans, samps);
|
||||
|
||||
for (i = 0; i < numActiveOutputChans; ++i)
|
||||
{
|
||||
float* const src = outBuffers[i];
|
||||
|
||||
jassert (src != 0);
|
||||
|
||||
char* const dst = (char*) (infos [numActiveInputChans + i].buffers[bi]);
|
||||
|
|
|
|||
|
|
@ -45,7 +45,8 @@ public:
|
|||
width (0),
|
||||
height (0),
|
||||
activeUsers (0),
|
||||
recordNextFrameTime (false)
|
||||
recordNextFrameTime (false),
|
||||
previewMaxFPS (60)
|
||||
{
|
||||
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
|
||||
if (FAILED (hr))
|
||||
|
|
@ -176,6 +177,11 @@ public:
|
|||
mediaControl->Stop();
|
||||
}
|
||||
|
||||
int getPreviewMaxFPS() const
|
||||
{
|
||||
return previewMaxFPS;
|
||||
}
|
||||
|
||||
void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
|
||||
{
|
||||
if (recordNextFrameTime)
|
||||
|
|
@ -246,13 +252,14 @@ public:
|
|||
g.drawImage (activeImage, rx, ry, rw, rh, 0, 0, width, height);
|
||||
}
|
||||
|
||||
bool createFileCaptureFilter (const File& file)
|
||||
bool createFileCaptureFilter (const File& file, int quality)
|
||||
{
|
||||
removeFileCaptureFilter();
|
||||
file.deleteFile();
|
||||
mediaControl->Stop();
|
||||
firstRecordedTime = Time();
|
||||
recordNextFrameTime = true;
|
||||
previewMaxFPS = 60;
|
||||
|
||||
HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
|
||||
|
||||
|
|
@ -278,17 +285,30 @@ public:
|
|||
hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
|
||||
|
||||
// This gibberish is the DirectShow profile for a video-only wmv file.
|
||||
String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\"><streamconfig "
|
||||
"majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
|
||||
"bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\"><videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
|
||||
"<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" btemporalcompression=\"1\" lsamplesize=\"0\"> <videoinfoheader "
|
||||
"dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"100000\"><rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/> <rctarget "
|
||||
"left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/> <bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
|
||||
"bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" biclrused=\"0\" biclrimportant=\"0\"/> "
|
||||
"</videoinfoheader></wmmediatype></streamconfig></profile>");
|
||||
String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
|
||||
" <streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\""
|
||||
" streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\""
|
||||
" bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
|
||||
" <videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
|
||||
" <wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\""
|
||||
" btemporalcompression=\"1\" lsamplesize=\"0\">"
|
||||
" <videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
|
||||
" <rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
|
||||
" <rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
|
||||
" <bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\""
|
||||
" bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\""
|
||||
" biclrused=\"0\" biclrimportant=\"0\"/>"
|
||||
" </videoinfoheader>"
|
||||
" </wmmediatype>"
|
||||
" </streamconfig>"
|
||||
"</profile>");
|
||||
|
||||
const int fps[] = { 10, 15, 30 };
|
||||
const int maxFramesPerSecond = fps [quality % numElementsInArray (fps)];
|
||||
|
||||
prof = prof.replace ("$WIDTH", String (width))
|
||||
.replace ("$HEIGHT", String (height));
|
||||
.replace ("$HEIGHT", String (height))
|
||||
.replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
|
||||
|
||||
ComSmartPtr <IWMProfile> currentProfile;
|
||||
hr = profileManager->LoadProfileByData ((const WCHAR*) prof, currentProfile.resetAndGetPointerAddress());
|
||||
|
|
@ -306,6 +326,7 @@ public:
|
|||
&& ok && activeUsers > 0
|
||||
&& SUCCEEDED (mediaControl->Run()))
|
||||
{
|
||||
previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -335,6 +356,8 @@ public:
|
|||
|
||||
if (ok && activeUsers > 0)
|
||||
mediaControl->Run();
|
||||
|
||||
previewMaxFPS = 60;
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
|
|
@ -377,7 +400,7 @@ public:
|
|||
{
|
||||
public:
|
||||
DShowCaptureViewerComp (DShowCameraDeviceInteral* const owner_)
|
||||
: owner (owner_)
|
||||
: owner (owner_), maxFPS (15), lastRepaintTime (0)
|
||||
{
|
||||
setOpaque (true);
|
||||
owner->addChangeListener (this);
|
||||
|
|
@ -414,11 +437,22 @@ public:
|
|||
|
||||
void changeListenerCallback (void*)
|
||||
{
|
||||
repaint();
|
||||
const int64 now = Time::currentTimeMillis();
|
||||
|
||||
if (now >= lastRepaintTime + (1000 / maxFPS))
|
||||
{
|
||||
lastRepaintTime = now;
|
||||
repaint();
|
||||
|
||||
if (owner != 0)
|
||||
maxFPS = owner->getPreviewMaxFPS();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
DShowCameraDeviceInteral* owner;
|
||||
int maxFPS;
|
||||
int64 lastRepaintTime;
|
||||
};
|
||||
|
||||
//==============================================================================
|
||||
|
|
@ -449,6 +483,7 @@ private:
|
|||
Image activeImage;
|
||||
|
||||
bool recordNextFrameTime;
|
||||
int previewMaxFPS;
|
||||
|
||||
void getVideoSizes (IAMStreamConfig* const streamConfig)
|
||||
{
|
||||
|
|
@ -681,11 +716,12 @@ const String CameraDevice::getFileExtension()
|
|||
|
||||
void CameraDevice::startRecordingToFile (const File& file, int quality)
|
||||
{
|
||||
jassert (quality >= 0 && quality <= 2);
|
||||
stopRecording();
|
||||
|
||||
DShowCameraDeviceInteral* const d = (DShowCameraDeviceInteral*) internal;
|
||||
d->addUser();
|
||||
isRecording = d->createFileCaptureFilter (file);
|
||||
isRecording = d->createFileCaptureFilter (file, quality);
|
||||
}
|
||||
|
||||
const Time CameraDevice::getTimeOfFirstRecordedFrame() const
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue