1
0
Fork 0
mirror of https://github.com/juce-framework/JUCE.git synced 2026-01-10 23:44:24 +00:00

Direct2D: Add support for bitmaps spanning multiple texture pages

This commit is contained in:
reuk 2024-09-03 18:47:32 +01:00
parent 470ada4454
commit 589d9940ed
No known key found for this signature in database
GPG key ID: FCB43929F012EE5C
7 changed files with 873 additions and 456 deletions

View file

@ -345,9 +345,9 @@ public:
const auto d2d1Bitmap = [&]
{
if (auto direct2DPixelData = dynamic_cast<Direct2DPixelData*> (fillType.image.getPixelData()))
if (auto bitmap = direct2DPixelData->getAdapterD2D1Bitmap())
if (bitmap->GetPixelFormat().format == DXGI_FORMAT_B8G8R8A8_UNORM)
return bitmap;
if (const auto page = direct2DPixelData->getFirstPageForContext (context))
if (page->GetPixelFormat().format == DXGI_FORMAT_B8G8R8A8_UNORM)
return page;
JUCE_D2DMETRICS_SCOPED_ELAPSED_TIME (Direct2DMetricsHub::getInstance()->imageContextMetrics, createBitmapTime);
@ -1162,11 +1162,20 @@ void Direct2DGraphicsContext::clipToImageAlpha (const Image& sourceImage, const
if (auto deviceContext = getPimpl()->getDeviceContext())
{
const auto maxDim = (int) deviceContext->GetMaximumBitmapSize();
if (sourceImage.getWidth() > maxDim || sourceImage.getHeight() > maxDim)
{
// The Direct2D renderer doesn't currently support clipping to very large images
jassertfalse;
return;
}
// Is this a Direct2D image already?
ComSmartPtr<ID2D1Bitmap> d2d1Bitmap;
if (auto direct2DPixelData = dynamic_cast<Direct2DPixelData*> (sourceImage.getPixelData()))
d2d1Bitmap = direct2DPixelData->getAdapterD2D1Bitmap();
d2d1Bitmap = direct2DPixelData->getFirstPageForContext (deviceContext);
if (! d2d1Bitmap)
{
@ -1430,78 +1439,108 @@ void Direct2DGraphicsContext::strokePath (const Path& p, const PathStrokeType& s
deviceContext->DrawGeometry (geometry, brush, strokeType.getStrokeThickness(), strokeStyle);
}
void Direct2DGraphicsContext::drawImage (const Image& image, const AffineTransform& transform)
void Direct2DGraphicsContext::drawImage (const Image& imageIn, const AffineTransform& transform)
{
JUCE_D2DMETRICS_SCOPED_ELAPSED_TIME (metrics, drawImageTime)
JUCE_SCOPED_TRACE_EVENT_FRAME (etw::drawImage, etw::direct2dKeyword, getFrameId());
if (image.isNull())
if (imageIn.isNull())
return;
applyPendingClipList();
if (auto deviceContext = getPimpl()->getDeviceContext())
{
// Is this a Direct2D image already with the correct format?
ComSmartPtr<ID2D1Bitmap1> d2d1Bitmap;
auto image = NativeImageType{}.convert (imageIn);
Direct2DPixelData* nativeBitmap = nullptr;
Rectangle<int> imageClipArea;
if (auto direct2DPixelData = dynamic_cast<Direct2DPixelData*> (image.getPixelData()))
const auto imageTransform = currentState->currentTransform.getTransformWith (transform);
if (auto* subsectionPixelData = dynamic_cast<SubsectionPixelData*> (image.getPixelData()))
{
d2d1Bitmap = direct2DPixelData->getAdapterD2D1Bitmap();
if (auto direct2DPixelData = dynamic_cast<Direct2DPixelData*> (subsectionPixelData->getSourcePixelData().get()))
{
nativeBitmap = direct2DPixelData;
imageClipArea = subsectionPixelData->getSubsection();
}
}
else if (auto direct2DPixelData = dynamic_cast<Direct2DPixelData*> (image.getPixelData()))
{
nativeBitmap = direct2DPixelData;
imageClipArea = { direct2DPixelData->width, direct2DPixelData->height };
}
if (! d2d1Bitmap || d2d1Bitmap->GetPixelFormat().format != DXGI_FORMAT_B8G8R8A8_UNORM)
else
{
JUCE_D2DMETRICS_SCOPED_ELAPSED_TIME (Direct2DMetricsHub::getInstance()->imageContextMetrics, createBitmapTime);
d2d1Bitmap = Direct2DBitmap::toBitmap (image, deviceContext, Image::ARGB);
imageClipArea = image.getBounds();
// This shouldn't happen, we converted the image to a native type already
jassertfalse;
}
if (d2d1Bitmap)
if (! nativeBitmap)
{
auto sourceRectF = D2DUtilities::toRECT_F (imageClipArea);
auto imageTransform = currentState->currentTransform.getTransformWith (transform);
if (imageTransform.isOnlyTranslation())
{
auto destinationRect = D2DUtilities::toRECT_F (imageClipArea.toFloat() + Point<float> { imageTransform.getTranslationX(), imageTransform.getTranslationY() });
deviceContext->DrawBitmap (d2d1Bitmap,
&destinationRect,
currentState->fillType.getOpacity(),
currentState->interpolationMode,
&sourceRectF,
{});
return;
}
if (D2DHelpers::isTransformAxisAligned (imageTransform))
{
auto destinationRect = D2DUtilities::toRECT_F (imageClipArea.toFloat().transformedBy (imageTransform));
deviceContext->DrawBitmap (d2d1Bitmap,
&destinationRect,
currentState->fillType.getOpacity(),
currentState->interpolationMode,
&sourceRectF,
{});
return;
}
ScopedTransform scopedTransform { *getPimpl(), currentState, transform };
deviceContext->DrawBitmap (d2d1Bitmap,
nullptr,
currentState->fillType.getOpacity(),
currentState->interpolationMode,
&sourceRectF,
{});
jassertfalse;
return;
}
auto drawTiles = [&] (const auto& pixelData, auto&& getRect)
{
for (const auto& page : pixelData->getPagesForContext (deviceContext))
{
const auto pageBounds = page.getBounds();
const auto intersection = pageBounds.toFloat().getIntersection (imageClipArea.toFloat());
if (intersection.isEmpty())
continue;
const auto src = intersection - pageBounds.getPosition().toFloat();
const auto dst = getRect (intersection - imageClipArea.getPosition().toFloat());
const auto [srcConverted, dstConverted] = std::tuple (D2DUtilities::toRECT_F (src),
D2DUtilities::toRECT_F (dst));
if (nativeBitmap->pixelFormat == Image::SingleChannel)
{
const auto lastColour = currentState->colourBrush->GetColor();
const auto lastMode = deviceContext->GetAntialiasMode();
currentState->colourBrush->SetColor (D2D1::ColorF (1.0f, 1.0f, 1.0f, currentState->fillType.getOpacity()));
deviceContext->SetAntialiasMode (D2D1_ANTIALIAS_MODE_ALIASED);
deviceContext->FillOpacityMask (page.bitmap,
currentState->colourBrush,
dstConverted,
srcConverted);
deviceContext->SetAntialiasMode (lastMode);
currentState->colourBrush->SetColor (lastColour);
}
else
{
deviceContext->DrawBitmap (page.bitmap,
dstConverted,
currentState->fillType.getOpacity(),
currentState->interpolationMode,
srcConverted,
{});
}
}
};
if (imageTransform.isOnlyTranslation() || D2DHelpers::isTransformAxisAligned (imageTransform))
{
drawTiles (nativeBitmap, [&] (auto intersection)
{
return intersection.transformedBy (imageTransform);
});
return;
}
ScopedTransform scopedTransform { *getPimpl(), currentState, transform };
drawTiles (nativeBitmap, [] (auto intersection)
{
return intersection;
});
}
}
@ -1787,4 +1826,102 @@ Direct2DGraphicsContext::ScopedTransform::~ScopedTransform()
pimpl.resetDeviceContextTransform();
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class Direct2DGraphicsContextTests : public UnitTest
{
public:
Direct2DGraphicsContextTests() : UnitTest ("Direct2D Graphics Context", UnitTestCategories::graphics) {}
void runTest() override
{
const auto imageWidth = 1 << 15;
const auto imageHeight = 128;
Image largeImageSoftware { Image::RGB, imageWidth, imageHeight, false, SoftwareImageType{} };
{
Graphics g { largeImageSoftware };
g.setGradientFill ({ Colours::red, 0, 0, Colours::cyan, (float) largeImageSoftware.getWidth(), 0, false });
g.fillAll();
}
constexpr auto targetDim = 512;
const auto largeImageNative = NativeImageType{}.convert (largeImageSoftware);
const auto subsection = largeImageNative.getClippedImage (largeImageNative.getBounds().withSizeKeepingCentre (1 << 14, 64));
beginTest ("Render large images");
{
for (const auto& imageToDraw : { largeImageNative, subsection })
{
const AffineTransform transformsToTest[]
{
{},
AffineTransform::translation ((float) targetDim - (float) imageToDraw.getWidth(), 0),
AffineTransform::translation (0, (float) targetDim - (float) imageToDraw.getHeight()),
AffineTransform::scale ((float) targetDim / imageWidth),
AffineTransform::scale ((float) targetDim / imageWidth)
.followedBy (AffineTransform::translation (32, 64)),
AffineTransform::scale (1.1f),
AffineTransform::scale ((float) targetDim / imageWidth,
(float) targetDim / imageHeight),
AffineTransform::rotation (MathConstants<float>::pi * 0.25f),
AffineTransform::rotation (MathConstants<float>::pi * 0.25f, imageWidth * 0.5f, 0)
.followedBy (AffineTransform::translation (-imageWidth * 0.5f, 0)),
};
for (const auto& transform : transformsToTest)
{
Image targetNative { Image::RGB, targetDim, targetDim, true, NativeImageType{} };
Image targetSoftware { Image::RGB, targetDim, targetDim, true, SoftwareImageType{} };
for (auto& image : { &targetNative, &targetSoftware })
{
Graphics g { *image };
g.drawImageTransformed (imageToDraw, transform);
}
compareImages (targetNative, targetSoftware);
}
}
}
}
void compareImages (const Image& a, const Image& b)
{
expect (a.getBounds() == b.getBounds());
const Image::BitmapData bitmapA { a, Image::BitmapData::readOnly };
const Image::BitmapData bitmapB { b, Image::BitmapData::readOnly };
int64_t accumulatedError{};
int64_t numSamples{};
for (auto y = 0; y < a.getHeight(); y += 16)
{
for (auto x = 0; x < a.getWidth(); x += 16)
{
const auto expected = bitmapA.getPixelColour (x, y);
const auto actual = bitmapB.getPixelColour (x, y);
for (auto& fn : { &Colour::getRed, &Colour::getGreen, &Colour::getBlue, &Colour::getAlpha })
{
accumulatedError += ((int64_t) (actual.*fn)() - (int64_t) (expected.*fn)());
++numSamples;
}
}
}
const auto averageError = (double) accumulatedError / (double) numSamples;
expect (std::abs (averageError) < 1.0);
}
};
static Direct2DGraphicsContextTests direct2DGraphicsContextTests;
#endif
} // namespace juce