Home | History | Annotate | Download | only in image
      1 /*
      2  * Copyright 2018 Google Inc.
      3  *
      4  * Use of this source code is governed by a BSD-style license that can be
      5  * found in the LICENSE file.
      6  */
      7 
      8 #include "SkImage_GpuBase.h"
      9 #include "GrBackendSurface.h"
     10 #include "GrClip.h"
     11 #include "GrContext.h"
     12 #include "GrContextPriv.h"
     13 #include "GrRecordingContext.h"
     14 #include "GrRecordingContextPriv.h"
     15 #include "GrRenderTargetContext.h"
     16 #include "GrTexture.h"
     17 #include "GrTextureAdjuster.h"
     18 #include "SkBitmapCache.h"
     19 #include "SkImage_Gpu.h"
     20 #include "SkPromiseImageTexture.h"
     21 #include "SkReadPixelsRec.h"
     22 #include "SkTLList.h"
     23 #include "effects/GrYUVtoRGBEffect.h"
     24 
     25 SkImage_GpuBase::SkImage_GpuBase(sk_sp<GrContext> context, int width, int height, uint32_t uniqueID,
     26                                  SkAlphaType at, sk_sp<SkColorSpace> cs)
     27         : INHERITED(width, height, uniqueID)
     28         , fContext(std::move(context))
     29         , fAlphaType(at)
     30         , fColorSpace(std::move(cs)) {}
     31 
     32 SkImage_GpuBase::~SkImage_GpuBase() {}
     33 
     34 //////////////////////////////////////////////////////////////////////////////////////////////////
     35 
     36 #if GR_TEST_UTILS
     37 void SkImage_GpuBase::resetContext(sk_sp<GrContext> newContext) {
     38     SkASSERT(fContext->priv().matches(newContext.get()));
     39     fContext = newContext;
     40 }
     41 #endif
     42 
     43 bool SkImage_GpuBase::ValidateBackendTexture(GrContext* ctx, const GrBackendTexture& tex,
     44                                              GrPixelConfig* config, SkColorType ct, SkAlphaType at,
     45                                              sk_sp<SkColorSpace> cs) {
     46     if (!tex.isValid()) {
     47         return false;
     48     }
     49     // TODO: Create a SkImageColorInfo struct for color, alpha, and color space so we don't need to
     50     // create a fake image info here.
     51     SkImageInfo info = SkImageInfo::Make(1, 1, ct, at, cs);
     52     if (!SkImageInfoIsValid(info)) {
     53         return false;
     54     }
     55     GrBackendFormat backendFormat = tex.getBackendFormat();
     56     if (!backendFormat.isValid()) {
     57         return false;
     58     }
     59     *config = ctx->priv().caps()->getConfigFromBackendFormat(backendFormat, ct);
     60     return *config != kUnknown_GrPixelConfig;
     61 }
     62 
     63 //////////////////////////////////////////////////////////////////////////////////////////////////
     64 
     65 bool SkImage_GpuBase::getROPixels(SkBitmap* dst, CachingHint chint) const {
     66     auto direct = fContext->priv().asDirectContext();
     67     if (!direct) {
     68         // DDL TODO: buffer up the readback so it occurs when the DDL is drawn?
     69         return false;
     70     }
     71 
     72     const auto desc = SkBitmapCacheDesc::Make(this);
     73     if (SkBitmapCache::Find(desc, dst)) {
     74         SkASSERT(dst->isImmutable());
     75         SkASSERT(dst->getPixels());
     76         return true;
     77     }
     78 
     79     SkBitmapCache::RecPtr rec = nullptr;
     80     SkPixmap pmap;
     81     if (kAllow_CachingHint == chint) {
     82         rec = SkBitmapCache::Alloc(desc, this->onImageInfo(), &pmap);
     83         if (!rec) {
     84             return false;
     85         }
     86     } else {
     87         if (!dst->tryAllocPixels(this->onImageInfo()) || !dst->peekPixels(&pmap)) {
     88             return false;
     89         }
     90     }
     91 
     92     sk_sp<GrSurfaceContext> sContext = direct->priv().makeWrappedSurfaceContext(
     93         this->asTextureProxyRef(direct), fColorSpace);
     94     if (!sContext) {
     95         return false;
     96     }
     97 
     98     if (!sContext->readPixels(pmap.info(), pmap.writable_addr(), pmap.rowBytes(), 0, 0)) {
     99         return false;
    100     }
    101 
    102     if (rec) {
    103         SkBitmapCache::Add(std::move(rec), dst);
    104         this->notifyAddedToRasterCache();
    105     }
    106     return true;
    107 }
    108 
    109 sk_sp<SkImage> SkImage_GpuBase::onMakeSubset(GrRecordingContext* context,
    110                                              const SkIRect& subset) const {
    111     if (!context || !fContext->priv().matches(context)) {
    112         return nullptr;
    113     }
    114 
    115     sk_sp<GrSurfaceProxy> proxy = this->asTextureProxyRef(context);
    116 
    117     GrSurfaceDesc desc;
    118     desc.fWidth = subset.width();
    119     desc.fHeight = subset.height();
    120     desc.fConfig = proxy->config();
    121 
    122     GrBackendFormat format = proxy->backendFormat().makeTexture2D();
    123     if (!format.isValid()) {
    124         return nullptr;
    125     }
    126 
    127     // TODO: Should this inherit our proxy's budgeted status?
    128     sk_sp<GrSurfaceContext> sContext(context->priv().makeDeferredSurfaceContext(
    129             format, desc, proxy->origin(), GrMipMapped::kNo, SkBackingFit::kExact,
    130             proxy->isBudgeted()));
    131     if (!sContext) {
    132         return nullptr;
    133     }
    134 
    135     if (!sContext->copy(proxy.get(), subset, SkIPoint::Make(0, 0))) {
    136         return nullptr;
    137     }
    138 
    139     // MDB: this call is okay bc we know 'sContext' was kExact
    140     return sk_make_sp<SkImage_Gpu>(fContext, kNeedNewImageUniqueID, fAlphaType,
    141                                    sContext->asTextureProxyRef(), this->refColorSpace());
    142 }
    143 
    144 static void apply_premul(const SkImageInfo& info, void* pixels, size_t rowBytes) {
    145     switch (info.colorType()) {
    146     case kRGBA_8888_SkColorType:
    147     case kBGRA_8888_SkColorType:
    148         break;
    149     default:
    150         return; // nothing to do
    151     }
    152 
    153     // SkColor is not necessarily RGBA or BGRA, but it is one of them on little-endian,
    154     // and in either case, the alpha-byte is always in the same place, so we can safely call
    155     // SkPreMultiplyColor()
    156     //
    157     SkColor* row = (SkColor*)pixels;
    158     for (int y = 0; y < info.height(); ++y) {
    159         for (int x = 0; x < info.width(); ++x) {
    160             row[x] = SkPreMultiplyColor(row[x]);
    161         }
    162         row = (SkColor*)((char*)(row)+rowBytes);
    163     }
    164 }
    165 
    166 bool SkImage_GpuBase::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB,
    167                                    int srcX, int srcY, CachingHint) const {
    168     auto direct = fContext->priv().asDirectContext();
    169     if (!direct) {
    170         // DDL TODO: buffer up the readback so it occurs when the DDL is drawn?
    171         return false;
    172     }
    173 
    174     if (!SkImageInfoValidConversion(dstInfo, this->onImageInfo())) {
    175         return false;
    176     }
    177 
    178     SkReadPixelsRec rec(dstInfo, dstPixels, dstRB, srcX, srcY);
    179     if (!rec.trim(this->width(), this->height())) {
    180         return false;
    181     }
    182 
    183     // TODO: this seems to duplicate code in GrTextureContext::onReadPixels and
    184     // GrRenderTargetContext::onReadPixels
    185     uint32_t flags = 0;
    186     if (kUnpremul_SkAlphaType == rec.fInfo.alphaType() && kPremul_SkAlphaType == fAlphaType) {
    187         // let the GPU perform this transformation for us
    188         flags = GrContextPriv::kUnpremul_PixelOpsFlag;
    189     }
    190 
    191     sk_sp<GrSurfaceContext> sContext = direct->priv().makeWrappedSurfaceContext(
    192         this->asTextureProxyRef(direct), this->refColorSpace());
    193     if (!sContext) {
    194         return false;
    195     }
    196 
    197     if (!sContext->readPixels(rec.fInfo, rec.fPixels, rec.fRowBytes, rec.fX, rec.fY, flags)) {
    198         return false;
    199     }
    200 
    201     // do we have to manually fix-up the alpha channel?
    202     //      src         dst
    203     //      unpremul    premul      fix manually
    204     //      premul      unpremul    done by kUnpremul_PixelOpsFlag
    205     // all other combos need to change.
    206     //
    207     // Should this be handled by Ganesh? todo:?
    208     //
    209     if (kPremul_SkAlphaType == rec.fInfo.alphaType() && kUnpremul_SkAlphaType == fAlphaType) {
    210         apply_premul(rec.fInfo, rec.fPixels, rec.fRowBytes);
    211     }
    212     return true;
    213 }
    214 
    215 sk_sp<GrTextureProxy> SkImage_GpuBase::asTextureProxyRef(GrRecordingContext* context,
    216                                                          const GrSamplerState& params,
    217                                                          SkScalar scaleAdjust[2]) const {
    218     if (!context || !fContext->priv().matches(context)) {
    219         SkASSERT(0);
    220         return nullptr;
    221     }
    222 
    223     GrTextureAdjuster adjuster(fContext.get(), this->asTextureProxyRef(context), fAlphaType,
    224                                this->uniqueID(), fColorSpace.get());
    225     return adjuster.refTextureProxyForParams(params, scaleAdjust);
    226 }
    227 
    228 GrBackendTexture SkImage_GpuBase::onGetBackendTexture(bool flushPendingGrContextIO,
    229                                                       GrSurfaceOrigin* origin) const {
    230     auto direct = fContext->priv().asDirectContext();
    231     if (!direct) {
    232         // This image was created with a DDL context and cannot be instantiated.
    233         return GrBackendTexture(); // invalid
    234     }
    235 
    236     sk_sp<GrTextureProxy> proxy = this->asTextureProxyRef(direct);
    237     SkASSERT(proxy);
    238 
    239     if (!proxy->isInstantiated()) {
    240         auto resourceProvider = direct->priv().resourceProvider();
    241 
    242         if (!proxy->instantiate(resourceProvider)) {
    243             return GrBackendTexture(); // invalid
    244         }
    245     }
    246 
    247     GrTexture* texture = proxy->peekTexture();
    248     if (texture) {
    249         if (flushPendingGrContextIO) {
    250             direct->priv().prepareSurfaceForExternalIO(proxy.get());
    251         }
    252         if (origin) {
    253             *origin = proxy->origin();
    254         }
    255         return texture->getBackendTexture();
    256     }
    257     return GrBackendTexture(); // invalid
    258 }
    259 
    260 GrTexture* SkImage_GpuBase::onGetTexture() const {
    261     GrTextureProxy* proxy = this->peekProxy();
    262     if (proxy && proxy->isInstantiated()) {
    263         return proxy->peekTexture();
    264     }
    265 
    266     auto direct = fContext->priv().asDirectContext();
    267     if (!direct) {
    268         // This image was created with a DDL context and cannot be instantiated.
    269         return nullptr;
    270     }
    271 
    272     sk_sp<GrTextureProxy> proxyRef = this->asTextureProxyRef(direct);
    273     SkASSERT(proxyRef && !proxyRef->isInstantiated());
    274 
    275     if (!proxyRef->instantiate(direct->priv().resourceProvider())) {
    276         return nullptr;
    277     }
    278 
    279     return proxyRef->peekTexture();
    280 }
    281 
    282 bool SkImage_GpuBase::onIsValid(GrContext* context) const {
    283     // The base class has already checked that context isn't abandoned (if it's not nullptr)
    284     if (fContext->priv().abandoned()) {
    285         return false;
    286     }
    287 
    288     if (context && !fContext->priv().matches(context)) {
    289         return false;
    290     }
    291 
    292     return true;
    293 }
    294 
    295 bool SkImage_GpuBase::MakeTempTextureProxies(GrContext* ctx, const GrBackendTexture yuvaTextures[],
    296                                              int numTextures, const SkYUVAIndex yuvaIndices[4],
    297                                              GrSurfaceOrigin imageOrigin,
    298                                              sk_sp<GrTextureProxy> tempTextureProxies[4]) {
    299     GrProxyProvider* proxyProvider = ctx->priv().proxyProvider();
    300 
    301     // We need to make a copy of the input backend textures because we need to preserve the result
    302     // of validate_backend_texture.
    303     GrBackendTexture yuvaTexturesCopy[4];
    304     for (int textureIndex = 0; textureIndex < numTextures; ++textureIndex) {
    305         yuvaTexturesCopy[textureIndex] = yuvaTextures[textureIndex];
    306         GrBackendFormat backendFormat = yuvaTexturesCopy[textureIndex].getBackendFormat();
    307         if (!backendFormat.isValid()) {
    308             return false;
    309         }
    310         yuvaTexturesCopy[textureIndex].fConfig =
    311                 ctx->priv().caps()->getYUVAConfigFromBackendFormat(backendFormat);
    312         if (yuvaTexturesCopy[textureIndex].fConfig == kUnknown_GrPixelConfig) {
    313             return false;
    314         }
    315         SkASSERT(yuvaTexturesCopy[textureIndex].isValid());
    316 
    317         tempTextureProxies[textureIndex] = proxyProvider->wrapBackendTexture(
    318                 yuvaTexturesCopy[textureIndex], imageOrigin, kBorrow_GrWrapOwnership,
    319                 GrWrapCacheable::kNo, kRead_GrIOType);
    320         if (!tempTextureProxies[textureIndex]) {
    321             return false;
    322         }
    323 
    324         // Check that each texture contains the channel data for the corresponding YUVA index
    325         GrPixelConfig config = yuvaTexturesCopy[textureIndex].fConfig;
    326         for (int yuvaIndex = 0; yuvaIndex < SkYUVAIndex::kIndexCount; ++yuvaIndex) {
    327             if (yuvaIndices[yuvaIndex].fIndex == textureIndex) {
    328                 switch (yuvaIndices[yuvaIndex].fChannel) {
    329                     case SkColorChannel::kR:
    330                         if (kAlpha_8_as_Alpha_GrPixelConfig == config) {
    331                             return false;
    332                         }
    333                         break;
    334                     case SkColorChannel::kG:
    335                     case SkColorChannel::kB:
    336                         if (kAlpha_8_as_Alpha_GrPixelConfig == config ||
    337                             kAlpha_8_as_Red_GrPixelConfig == config) {
    338                             return false;
    339                         }
    340                         break;
    341                     case SkColorChannel::kA:
    342                     default:
    343                         if (kRGB_888_GrPixelConfig == config) {
    344                             return false;
    345                         }
    346                         break;
    347                 }
    348             }
    349         }
    350     }
    351 
    352     return true;
    353 }
    354 
    355 bool SkImage_GpuBase::RenderYUVAToRGBA(GrContext* ctx, GrRenderTargetContext* renderTargetContext,
    356                                        const SkRect& rect, SkYUVColorSpace yuvColorSpace,
    357                                        sk_sp<GrColorSpaceXform> colorSpaceXform,
    358                                        const sk_sp<GrTextureProxy> proxies[4],
    359                                        const SkYUVAIndex yuvaIndices[4]) {
    360     SkASSERT(renderTargetContext);
    361     if (!renderTargetContext->asSurfaceProxy()) {
    362         return false;
    363     }
    364 
    365     GrPaint paint;
    366     paint.setPorterDuffXPFactory(SkBlendMode::kSrc);
    367 
    368     auto fp = GrYUVtoRGBEffect::Make(proxies, yuvaIndices, yuvColorSpace,
    369                                      GrSamplerState::Filter::kNearest);
    370     if (colorSpaceXform) {
    371         fp = GrColorSpaceXformEffect::Make(std::move(fp), std::move(colorSpaceXform));
    372     }
    373     paint.addColorFragmentProcessor(std::move(fp));
    374 
    375     renderTargetContext->drawRect(GrNoClip(), std::move(paint), GrAA::kNo, SkMatrix::I(), rect);
    376 
    377     // DDL TODO: in the promise image version we must not flush here
    378     ctx->priv().flushSurfaceWrites(renderTargetContext->asSurfaceProxy());
    379 
    380     return true;
    381 }
    382 
    383 sk_sp<GrTextureProxy> SkImage_GpuBase::MakePromiseImageLazyProxy(
    384         GrContext* context, int width, int height, GrSurfaceOrigin origin, GrPixelConfig config,
    385         GrBackendFormat backendFormat, GrMipMapped mipMapped,
    386         PromiseImageTextureFulfillProc fulfillProc,
    387         PromiseImageTextureReleaseProc releaseProc,
    388         PromiseImageTextureDoneProc doneProc,
    389         PromiseImageTextureContext textureContext) {
    390     SkASSERT(context);
    391     SkASSERT(width > 0 && height > 0);
    392     SkASSERT(doneProc);
    393     SkASSERT(config != kUnknown_GrPixelConfig);
    394 
    395     if (!fulfillProc || !releaseProc) {
    396         doneProc(textureContext);
    397         return nullptr;
    398     }
    399 
    400     if (mipMapped == GrMipMapped::kYes &&
    401         GrTextureTypeHasRestrictedSampling(backendFormat.textureType())) {
    402         // It is invalid to have a GL_TEXTURE_EXTERNAL or GL_TEXTURE_RECTANGLE and have mips as
    403         // well.
    404         doneProc(textureContext);
    405         return nullptr;
    406     }
    407 
    408     /**
    409      * This class is the lazy instantiation callback for promise images. It manages calling the
    410      * client's Fulfill, Release, and Done procs. It attempts to reuse a GrTexture instance in
    411      * cases where the client provides the same SkPromiseImageTexture as Fulfill results for
    412      * multiple SkImages. The created GrTexture is given a key based on a unique ID associated with
    413      * the SkPromiseImageTexture.
    414      *
    415      * The GrTexutre idle proc mechanism is used to call the Release and Done procs. We use this
    416      * instead of the GrSurface release proc because the GrTexture is cached and therefore may
    417      * outlive the proxy into which this callback is installed.
    418      *
    419      * A key invalidation message is installed on the SkPromiseImageTexture so that the GrTexture
    420      * is deleted once it can no longer be used to instantiate a proxy.
    421      */
    422     class PromiseLazyInstantiateCallback {
    423     public:
    424         PromiseLazyInstantiateCallback(PromiseImageTextureFulfillProc fulfillProc,
    425                                        PromiseImageTextureReleaseProc releaseProc,
    426                                        PromiseImageTextureDoneProc doneProc,
    427                                        PromiseImageTextureContext context,
    428                                        GrPixelConfig config)
    429                 : fFulfillProc(fulfillProc), fConfig(config) {
    430             auto doneHelper = sk_make_sp<GrRefCntedCallback>(doneProc, context);
    431             fIdleCallback = sk_make_sp<GrRefCntedCallback>(releaseProc, context);
    432             fIdleCallback->addChild(std::move(doneHelper));
    433         }
    434         PromiseLazyInstantiateCallback(PromiseLazyInstantiateCallback&&) = default;
    435         PromiseLazyInstantiateCallback(const PromiseLazyInstantiateCallback&) {
    436             // Because we get wrapped in std::function we must be copyable. But we should never
    437             // be copied.
    438             SkASSERT(false);
    439         }
    440         PromiseLazyInstantiateCallback& operator=(PromiseLazyInstantiateCallback&&) = default;
    441         PromiseLazyInstantiateCallback& operator=(const PromiseLazyInstantiateCallback&) {
    442             SkASSERT(false);
    443             return *this;
    444         }
    445 
    446         ~PromiseLazyInstantiateCallback() {
    447             if (fIdleCallback) {
    448                 SkASSERT(!fTexture);
    449                 // We were never fulfilled. Pass false so done proc is still called.
    450                 fIdleCallback->abandon();
    451             }
    452             // Our destructor can run on any thread. We trigger the unref of fTexture by message.
    453             if (fTexture) {
    454                 SkASSERT(!fIdleCallback);
    455                 SkMessageBus<GrGpuResourceFreedMessage>::Post({fTexture, fTextureContextID});
    456             }
    457         }
    458 
    459         sk_sp<GrSurface> operator()(GrResourceProvider* resourceProvider) {
    460             // Our proxy is getting instantiated for the second+ time. We are only allowed to call
    461             // Fulfill once. So return our cached result.
    462             if (fTexture) {
    463                 return sk_ref_sp(fTexture);
    464             }
    465             SkASSERT(fIdleCallback);
    466             PromiseImageTextureContext textureContext = fIdleCallback->context();
    467             sk_sp<SkPromiseImageTexture> promiseTexture = fFulfillProc(textureContext);
    468             // From here on out our contract is that the release proc must be called, even if
    469             // the return from fulfill was invalid or we fail for some other reason.
    470             if (!promiseTexture) {
    471                 // Make sure we explicitly reset this because our destructor assumes a non-null
    472                 // fIdleCallback means fulfill was never called.
    473                 fIdleCallback.reset();
    474                 return sk_sp<GrTexture>();
    475             }
    476 
    477             auto backendTexture = promiseTexture->backendTexture();
    478             backendTexture.fConfig = fConfig;
    479             if (!backendTexture.isValid()) {
    480                 fIdleCallback.reset();
    481                 return sk_sp<GrTexture>();
    482             }
    483 
    484             sk_sp<GrTexture> tex;
    485             static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain();
    486             GrUniqueKey key;
    487             GrUniqueKey::Builder builder(&key, kDomain, 2, "promise");
    488             builder[0] = promiseTexture->uniqueID();
    489             builder[1] = fConfig;
    490             builder.finish();
    491             // A texture with this key may already exist from a different instance of this lazy
    492             // callback. This could happen if the client fulfills a promise image with a texture
    493             // that was previously used to fulfill a different promise image.
    494             if (auto surf = resourceProvider->findByUniqueKey<GrSurface>(key)) {
    495                 tex = sk_ref_sp(surf->asTexture());
    496                 SkASSERT(tex);
    497             } else {
    498                 if ((tex = resourceProvider->wrapBackendTexture(
    499                              backendTexture, kBorrow_GrWrapOwnership, GrWrapCacheable::kYes,
    500                              kRead_GrIOType))) {
    501                     tex->resourcePriv().setUniqueKey(key);
    502                 } else {
    503                     fIdleCallback.reset();
    504                     return sk_sp<GrTexture>();
    505                 }
    506             }
    507             tex->addIdleProc(std::move(fIdleCallback));
    508             promiseTexture->addKeyToInvalidate(tex->getContext()->priv().contextID(), key);
    509             fTexture = tex.get();
    510             // We need to hold on to the GrTexture in case our proxy gets reinstantiated. However,
    511             // we can't unref in our destructor because we may be on another thread then. So we
    512             // let the cache know it is waiting on an unref message. We will send that message from
    513             // our destructor.
    514             GrContext* context = fTexture->getContext();
    515             context->priv().getResourceCache()->insertDelayedResourceUnref(fTexture);
    516             fTextureContextID = context->priv().contextID();
    517             return std::move(tex);
    518         }
    519 
    520     private:
    521         GrTexture* fTexture = nullptr;
    522         uint32_t fTextureContextID = SK_InvalidUniqueID;
    523         sk_sp<GrRefCntedCallback> fIdleCallback;
    524         PromiseImageTextureFulfillProc fFulfillProc;
    525         GrPixelConfig fConfig;
    526     } callback(fulfillProc, releaseProc, doneProc, textureContext, config);
    527 
    528     GrProxyProvider* proxyProvider = context->priv().proxyProvider();
    529 
    530     GrSurfaceDesc desc;
    531     desc.fWidth = width;
    532     desc.fHeight = height;
    533     desc.fConfig = config;
    534 
    535     // We pass kReadOnly here since we should treat content of the client's texture as immutable.
    536     return proxyProvider->createLazyProxy(std::move(callback), backendFormat, desc, origin,
    537                                           mipMapped, GrInternalSurfaceFlags::kReadOnly,
    538                                           SkBackingFit::kExact, SkBudgeted::kNo,
    539                                           GrSurfaceProxy::LazyInstantiationType::kDeinstantiate);
    540 }
    541