aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/gpu/ccpr/GrCoverageCountingPathRenderer.h
blob: 1d08f381a28942933ba4fa3e8de2bd663ceaba91 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
/*
 * Copyright 2017 Google Inc.
 *
 * Use of this source code is governed by a BSD-style license that can be
 * found in the LICENSE file.
 */

#ifndef GrCoverageCountingPathRenderer_DEFINED
#define GrCoverageCountingPathRenderer_DEFINED

#include "GrAllocator.h"
#include "GrOnFlushResourceProvider.h"
#include "GrPathRenderer.h"
#include "SkTInternalLList.h"
#include "ccpr/GrCCPRAtlas.h"
#include "ccpr/GrCCPRCoverageOp.h"
#include "ccpr/GrCCPRPathProcessor.h"
#include "ops/GrDrawOp.h"
#include <map>

/**
 * This is a path renderer that draws antialiased paths by counting coverage in an offscreen
 * buffer. (See GrCCPRCoverageProcessor, GrCCPRPathProcessor)
 *
 * It also serves as the per-render-target tracker for pending path draws, and at the start of
 * flush, it compiles GPU buffers and renders a "coverage count atlas" for the upcoming paths.
 */
class GrCoverageCountingPathRenderer
    : public GrPathRenderer
    , public GrOnFlushCallbackObject {

    struct RTPendingPaths;

public:
    static bool IsSupported(const GrCaps&);
    static sk_sp<GrCoverageCountingPathRenderer> CreateIfSupported(const GrCaps&,
                                                                   bool drawCachablePaths);

    ~GrCoverageCountingPathRenderer() override {
        // Ensure no Ops exist that could have a dangling pointer back into this class.
        SkASSERT(fRTPendingPathsMap.empty());
        SkASSERT(0 == fPendingDrawOpsCount);
    }

    // This is the Op that ultimately draws a path into its final destination, using the atlas we
    // generate at flush time.
    class DrawPathsOp : public GrDrawOp {
    public:
        DEFINE_OP_CLASS_ID
        SK_DECLARE_INTERNAL_LLIST_INTERFACE(DrawPathsOp);

        DrawPathsOp(GrCoverageCountingPathRenderer*, const DrawPathArgs&, GrColor);
        ~DrawPathsOp() override;

        struct SingleDraw  {
            SkIRect fClipIBounds;
            SkMatrix fMatrix;
            SkPath fPath;
            GrColor fColor;
            SingleDraw* fNext = nullptr;
        };

        const SingleDraw* head() const {
            SkASSERT(fInstanceCount >= 1);
            return &fHeadDraw;
        }

        SkDEBUGCODE(int numSkippedInstances_debugOnly() const { return fNumSkippedInstances; })

        // GrDrawOp overrides.
        const char* name() const override { return "GrCoverageCountingPathRenderer::DrawPathsOp"; }
        FixedFunctionFlags fixedFunctionFlags() const override { return FixedFunctionFlags::kNone; }
        RequiresDstTexture finalize(const GrCaps&, const GrAppliedClip*,
                                    GrPixelConfigIsClamped) override;
        void wasRecorded(GrRenderTargetOpList*) override;
        bool onCombineIfPossible(GrOp* other, const GrCaps& caps) override;
        void visitProxies(const VisitProxyFunc& func) const override {
            fProcessors.visitProxies(func);
        }
        void onPrepare(GrOpFlushState*) override {}
        void onExecute(GrOpFlushState*) override;

        int setupResources(GrOnFlushResourceProvider*, GrCCPRCoverageOpsBuilder*,
                           GrCCPRPathProcessor::Instance* pathInstanceData, int pathInstanceIdx);

    private:
        SkPath::FillType getFillType() const {
            SkASSERT(fInstanceCount >= 1);
            return fHeadDraw.fPath.getFillType();
        }

        struct AtlasBatch {
            const GrCCPRAtlas* fAtlas;
            int fEndInstanceIdx;
        };

        void addAtlasBatch(const GrCCPRAtlas* atlas, int endInstanceIdx) {
            SkASSERT(endInstanceIdx > fBaseInstance);
            SkASSERT(fAtlasBatches.empty() ||
                     endInstanceIdx > fAtlasBatches.back().fEndInstanceIdx);
            fAtlasBatches.push_back() = {atlas, endInstanceIdx};
        }

        GrCoverageCountingPathRenderer* const fCCPR;
        const uint32_t fSRGBFlags;
        GrProcessorSet fProcessors;
        SingleDraw fHeadDraw;
        SingleDraw* fTailDraw;
        RTPendingPaths* fOwningRTPendingPaths;
        int fBaseInstance;
        SkDEBUGCODE(int fInstanceCount;)
        SkDEBUGCODE(int fNumSkippedInstances;)
        SkSTArray<1, AtlasBatch, true> fAtlasBatches;

        typedef GrDrawOp INHERITED;
    };

    // GrPathRenderer overrides.
    StencilSupport onGetStencilSupport(const GrShape&) const override {
        return GrPathRenderer::kNoSupport_StencilSupport;
    }
    CanDrawPath onCanDrawPath(const CanDrawPathArgs& args) const override;
    bool onDrawPath(const DrawPathArgs&) final;

    // These are keyed by SkPath generation ID, and store which device-space paths are accessed and
    // where by clip FPs in a given opList. A single ClipPath can be referenced by multiple FPs. At
    // flush time their coverage count masks are packed into atlas(es) alongside normal DrawPathOps.
    class ClipPath {
    public:
        ClipPath() = default;
        ClipPath(const ClipPath&) = delete;

        ~ClipPath() {
            // Ensure no clip FPs exist with a dangling pointer back into this class.
            SkASSERT(!fAtlasLazyProxy || fAtlasLazyProxy->isUnique_debugOnly());
            // Ensure no lazy proxy callbacks exist with a dangling pointer back into this class.
            SkASSERT(fHasAtlasTransform);
        }

        bool isUninitialized() const { return !fAtlasLazyProxy; }
        void init(const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth,
                  int rtHeight);
        void addAccess(const SkIRect& accessRect) {
            SkASSERT(!this->isUninitialized());
            fAccessRect.join(accessRect);
        }

        GrTextureProxy* atlasLazyProxy() const {
            SkASSERT(!this->isUninitialized());
            return fAtlasLazyProxy.get();
        }
        const SkPath& deviceSpacePath() const {
            SkASSERT(!this->isUninitialized());
            return fDeviceSpacePath;
        }
        const SkIRect& pathDevIBounds() const {
            SkASSERT(!this->isUninitialized());
            return fPathDevIBounds;
        }
        void placePathInAtlas(GrCoverageCountingPathRenderer*, GrOnFlushResourceProvider*,
                              GrCCPRCoverageOpsBuilder*);

        const SkVector& atlasScale() const { SkASSERT(fHasAtlasTransform); return fAtlasScale; }
        const SkVector& atlasTranslate() const {
            SkASSERT(fHasAtlasTransform);
            return fAtlasTranslate;
        }

    private:
        sk_sp<GrTextureProxy> fAtlasLazyProxy;
        SkPath fDeviceSpacePath;
        SkIRect fPathDevIBounds;
        SkIRect fAccessRect;

        const GrCCPRAtlas* fAtlas = nullptr;
        int16_t fAtlasOffsetX;
        int16_t fAtlasOffsetY;
        SkDEBUGCODE(bool fHasAtlas = false);

        SkVector fAtlasScale;
        SkVector fAtlasTranslate;
        SkDEBUGCODE(bool fHasAtlasTransform = false);
    };

    bool canMakeClipProcessor(const SkPath& deviceSpacePath) const;

    std::unique_ptr<GrFragmentProcessor> makeClipProcessor(uint32_t oplistID,
                                                           const SkPath& deviceSpacePath,
                                                           const SkIRect& accessRect,
                                                           int rtWidth, int rtHeight);

    // GrOnFlushCallbackObject overrides.
    void preFlush(GrOnFlushResourceProvider*, const uint32_t* opListIDs, int numOpListIDs,
                  SkTArray<sk_sp<GrRenderTargetContext>>* results) override;
    void postFlush(GrDeferredUploadToken, const uint32_t* opListIDs, int numOpListIDs) override;

private:
    GrCoverageCountingPathRenderer(bool drawCachablePaths)
            : fDrawCachablePaths(drawCachablePaths) {}

    GrCCPRAtlas* placeParsedPathInAtlas(GrOnFlushResourceProvider*, const SkIRect& accessRect,
                                        const SkIRect& pathIBounds, int16_t* atlasOffsetX,
                                        int16_t* atlasOffsetY, GrCCPRCoverageOpsBuilder*);

    struct RTPendingPaths {
        ~RTPendingPaths() {
            // Ensure all DrawPathsOps in this opList have been deleted.
            SkASSERT(fDrawOps.isEmpty());
        }

        SkTInternalLList<DrawPathsOp> fDrawOps;
        std::map<uint32_t, ClipPath> fClipPaths;
        GrSTAllocator<256, DrawPathsOp::SingleDraw> fDrawsAllocator;
    };

    // A map from render target ID to the individual render target's pending paths.
    std::map<uint32_t, RTPendingPaths> fRTPendingPathsMap;
    SkDEBUGCODE(int fPendingDrawOpsCount = 0;)

    sk_sp<GrBuffer> fPerFlushIndexBuffer;
    sk_sp<GrBuffer> fPerFlushVertexBuffer;
    sk_sp<GrBuffer> fPerFlushInstanceBuffer;
    GrSTAllocator<4, GrCCPRAtlas> fPerFlushAtlases;
    bool fPerFlushResourcesAreValid;
    SkDEBUGCODE(bool fFlushing = false;)

    const bool fDrawCachablePaths;
};

#endif