[go: nahoru, domu]

blob: 76ecd0d63661aadc3aa5f3c2e3c6c37248ab09a0 [file] [log] [blame]
reveman@chromium.orgb5641b92014-02-15 14:21:581// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
prashant.nb4d4f492016-04-29 12:51:285#include "cc/raster/gpu_raster_buffer_provider.h"
reveman@chromium.orgb5641b92014-02-15 14:21:586
avi02a4d172015-12-21 06:14:367#include <stdint.h>
8
ernstm69fedbd2014-09-18 01:23:419#include <algorithm>
Robert Phillipsdbb0b7d2020-07-29 16:07:4410#include <memory>
Mike Reedf7074ff2019-03-14 14:28:1611#include <utility>
Robert Phillipsdbb0b7d2020-07-29 16:07:4412#include <vector>
ernstm69fedbd2014-09-18 01:23:4113
Hans Wennborg66d784de2020-06-17 20:23:0314#include "base/logging.h"
ericrke4027312016-06-30 00:12:4215#include "base/metrics/histogram_macros.h"
Khushal3c3873a2018-11-06 18:14:0116#include "base/rand_util.h"
Antoine Labour6283c792018-09-27 16:59:2817#include "base/strings/stringprintf.h"
Alexandr Ilin0443a8f2018-07-20 20:14:5018#include "base/trace_event/process_memory_dump.h"
ssid904ce3b2015-01-27 15:20:1619#include "base/trace_event/trace_event.h"
Yuta Hijikata173c7d62020-11-10 06:21:3720#include "build/chromeos_buildflags.h"
ericrkc7c9e3f2016-07-01 17:30:1621#include "cc/base/histograms.h"
Adrienne Walker436a7752017-08-28 23:33:0922#include "cc/paint/display_item_list.h"
enne5a9630362017-02-24 23:41:0323#include "cc/paint/paint_canvas.h"
Adrienne Walker436a7752017-08-28 23:33:0924#include "cc/paint/paint_recorder.h"
chrishtrac41ff92017-03-17 05:07:3025#include "cc/raster/raster_source.h"
danakj920156852015-05-18 20:22:2926#include "cc/raster/scoped_gpu_raster.h"
Khushal3cfc77f2018-08-15 07:19:5827#include "cc/raster/scoped_grcontext_access.h"
danakj57baa772018-05-29 15:59:1428#include "components/viz/client/client_resource_provider.h"
Victor Miura29b7ea3d2017-12-19 20:23:5929#include "components/viz/common/gpu/context_provider.h"
30#include "components/viz/common/gpu/raster_context_provider.h"
danakjaf3170e2018-02-09 17:31:5831#include "gpu/GLES2/gl2extchromium.h"
ericrk7f6a27f2017-01-31 22:34:3232#include "gpu/command_buffer/client/context_support.h"
reveman@chromium.orgb5641b92014-02-15 14:21:5833#include "gpu/command_buffer/client/gles2_interface.h"
Victor Miura3a4ad4f82017-12-13 06:03:4534#include "gpu/command_buffer/client/raster_interface.h"
Antoine Labour6283c792018-09-27 16:59:2835#include "gpu/command_buffer/client/shared_image_interface.h"
Antoine Labour6283c792018-09-27 16:59:2836#include "gpu/command_buffer/common/shared_image_trace_utils.h"
37#include "gpu/command_buffer/common/shared_image_usage.h"
Ian Preste598eece2020-10-19 23:31:0238#include "skia/ext/legacy_display_globals.h"
hendrikw04cea972014-09-23 20:50:5339#include "third_party/skia/include/core/SkPictureRecorder.h"
reveman47560ab2014-09-18 19:39:2140#include "third_party/skia/include/core/SkSurface.h"
Robert Phillipsdbb0b7d2020-07-29 16:07:4441#include "third_party/skia/include/gpu/GrDirectContext.h"
Adrienne Walker436a7752017-08-28 23:33:0942#include "ui/gfx/geometry/axis_transform2d.h"
Khushal49836ab2018-07-25 02:08:4543#include "url/gurl.h"
reveman@chromium.orgb5641b92014-02-15 14:21:5844
45namespace cc {
reveman47560ab2014-09-18 19:39:2146namespace {
47
Eric Karl247f09c2018-03-15 02:06:3648class ScopedSkSurfaceForUnpremultiplyAndDither {
49 public:
50 ScopedSkSurfaceForUnpremultiplyAndDither(
51 viz::RasterContextProvider* context_provider,
Mike Reedf7074ff2019-03-14 14:28:1652 sk_sp<SkColorSpace> color_space,
Eric Karl247f09c2018-03-15 02:06:3653 const gfx::Rect& playback_rect,
54 const gfx::Rect& raster_full_rect,
55 const gfx::Size& max_tile_size,
56 GLuint texture_id,
57 const gfx::Size& texture_size,
Eric Karl247f09c2018-03-15 02:06:3658 bool can_use_lcd_text,
59 int msaa_sample_count)
60 : context_provider_(context_provider),
61 texture_id_(texture_id),
62 offset_(playback_rect.OffsetFromOrigin() -
63 raster_full_rect.OffsetFromOrigin()),
64 size_(playback_rect.size()) {
65 // Determine the |intermediate_size| to use for our 32-bit texture. If we
66 // know the max tile size, use that. This prevents GPU cache explosion due
67 // to using lots of different 32-bit texture sizes. Otherwise just use the
68 // exact size of the target texture.
69 gfx::Size intermediate_size;
70 if (!max_tile_size.IsEmpty()) {
71 DCHECK_GE(max_tile_size.width(), texture_size.width());
72 DCHECK_GE(max_tile_size.height(), texture_size.height());
73 intermediate_size = max_tile_size;
74 } else {
75 intermediate_size = texture_size;
76 }
77
78 // Allocate a 32-bit surface for raster. We will copy from that into our
79 // actual surface in destruction.
Mike Reedf7074ff2019-03-14 14:28:1680 SkImageInfo n32Info = SkImageInfo::MakeN32Premul(intermediate_size.width(),
81 intermediate_size.height(),
82 std::move(color_space));
Eric Karl247f09c2018-03-15 02:06:3683 SkSurfaceProps surface_props =
Ian Preste598eece2020-10-19 23:31:0284 skia::LegacyDisplayGlobals::ComputeSurfaceProps(can_use_lcd_text);
Eric Karl247f09c2018-03-15 02:06:3685 surface_ = SkSurface::MakeRenderTarget(
86 context_provider->GrContext(), SkBudgeted::kNo, n32Info,
87 msaa_sample_count, kTopLeft_GrSurfaceOrigin, &surface_props);
88 }
89
90 ~ScopedSkSurfaceForUnpremultiplyAndDither() {
91 // In lost-context cases, |surface_| may be null and there's nothing
92 // meaningful to do here.
93 if (!surface_)
94 return;
95
Greg Daniel0691c672018-04-17 17:21:1496 GrBackendTexture backend_texture =
97 surface_->getBackendTexture(SkSurface::kFlushRead_BackendHandleAccess);
98 if (!backend_texture.isValid()) {
99 return;
100 }
101 GrGLTextureInfo info;
102 if (!backend_texture.getGLTextureInfo(&info)) {
103 return;
104 }
Eric Karl247f09c2018-03-15 02:06:36105 context_provider_->ContextGL()->UnpremultiplyAndDitherCopyCHROMIUM(
Greg Daniel0691c672018-04-17 17:21:14106 info.fID, texture_id_, offset_.x(), offset_.y(), size_.width(),
Eric Karl247f09c2018-03-15 02:06:36107 size_.height());
108 }
109
110 SkSurface* surface() { return surface_.get(); }
111
112 private:
113 viz::RasterContextProvider* context_provider_;
114 GLuint texture_id_;
115 gfx::Vector2d offset_;
116 gfx::Size size_;
117 sk_sp<SkSurface> surface_;
118};
119
Adrienne Walker436a7752017-08-28 23:33:09120static void RasterizeSourceOOP(
121 const RasterSource* raster_source,
122 bool resource_has_previous_content,
Antoine Labour6283c792018-09-27 16:59:28123 gpu::Mailbox* mailbox,
124 const gpu::SyncToken& sync_token,
danakjaf3170e2018-02-09 17:31:58125 GLenum texture_target,
126 bool texture_is_overlay_candidate,
danakjaf3170e2018-02-09 17:31:58127 const gfx::Size& resource_size,
128 viz::ResourceFormat resource_format,
129 const gfx::ColorSpace& color_space,
Adrienne Walker436a7752017-08-28 23:33:09130 const gfx::Rect& raster_full_rect,
131 const gfx::Rect& playback_rect,
132 const gfx::AxisTransform2d& transform,
133 const RasterSource::PlaybackSettings& playback_settings,
Khushalec3ba5dc2019-11-04 22:30:21134 viz::RasterContextProvider* context_provider) {
Victor Miura29b7ea3d2017-12-19 20:23:59135 gpu::raster::RasterInterface* ri = context_provider->RasterInterface();
Antoine Labour6283c792018-09-27 16:59:28136 if (mailbox->IsZero()) {
137 DCHECK(!sync_token.HasData());
138 auto* sii = context_provider->SharedImageInterface();
Peng Huange3b8c1d2019-03-05 17:21:12139 uint32_t flags = gpu::SHARED_IMAGE_USAGE_DISPLAY |
140 gpu::SHARED_IMAGE_USAGE_RASTER |
Jonathan Backera1f3d7c2018-10-16 14:46:32141 gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
Antoine Labour6283c792018-09-27 16:59:28142 if (texture_is_overlay_candidate)
143 flags |= gpu::SHARED_IMAGE_USAGE_SCANOUT;
Nathan Zabriskie659c2742020-07-16 03:49:32144 *mailbox = sii->CreateSharedImage(
145 resource_format, resource_size, color_space, kTopLeft_GrSurfaceOrigin,
146 kPremul_SkAlphaType, flags, gpu::kNullSurfaceHandle);
Antoine Labour6283c792018-09-27 16:59:28147 ri->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
148 } else {
149 ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
150 }
151
Khushalec3ba5dc2019-11-04 22:30:21152 ri->BeginRasterCHROMIUM(
153 raster_source->background_color(), playback_settings.msaa_sample_count,
154 playback_settings.use_lcd_text, color_space, mailbox->name);
Adrienne Walkerbc5cd82e2017-10-31 21:58:15155 float recording_to_raster_scale =
156 transform.scale() / raster_source->recording_scale_factor();
Adrienne Walker51c8e382018-02-06 20:30:33157 gfx::Size content_size = raster_source->GetContentSize(transform.scale());
Khushalcd8fbb772018-10-16 22:46:14158
Adrienne Walker51c8e382018-02-06 20:30:33159 // TODO(enne): could skip the clear on new textures, as the service side has
160 // to do that anyway. resource_has_previous_content implies that the texture
161 // is not new, but the reverse does not hold, so more plumbing is needed.
Khushal6cabe802019-03-26 17:58:46162 ri->RasterCHROMIUM(
163 raster_source->GetDisplayItemList().get(),
164 playback_settings.image_provider, content_size, raster_full_rect,
165 playback_rect, transform.translation(), recording_to_raster_scale,
166 raster_source->requires_clear(),
167 const_cast<RasterSource*>(raster_source)->max_op_size_hint());
Victor Miura3a4ad4f82017-12-13 06:03:45168 ri->EndRasterCHROMIUM();
Adrienne Walker436a7752017-08-28 23:33:09169
Eric Karl247f09c2018-03-15 02:06:36170 // TODO(ericrk): Handle unpremultiply+dither for 4444 cases.
171 // https://crbug.com/789153
Adrienne Walker436a7752017-08-28 23:33:09172}
173
vmiuraf7c765c2016-12-03 21:02:32174static void RasterizeSource(
sunnyps5d6ff0d02016-06-28 00:40:11175 const RasterSource* raster_source,
176 bool resource_has_previous_content,
Antoine Labour6283c792018-09-27 16:59:28177 gpu::Mailbox* mailbox,
178 const gpu::SyncToken& sync_token,
danakjaf3170e2018-02-09 17:31:58179 GLenum texture_target,
180 bool texture_is_overlay_candidate,
danakjaf3170e2018-02-09 17:31:58181 const gfx::Size& resource_size,
182 viz::ResourceFormat resource_format,
183 const gfx::ColorSpace& color_space,
sunnyps5d6ff0d02016-06-28 00:40:11184 const gfx::Rect& raster_full_rect,
Adrienne Walker436a7752017-08-28 23:33:09185 const gfx::Rect& playback_rect,
trchen178ac912017-04-04 10:11:10186 const gfx::AxisTransform2d& transform,
vmiuraf7c765c2016-12-03 21:02:32187 const RasterSource::PlaybackSettings& playback_settings,
Victor Miura29b7ea3d2017-12-19 20:23:59188 viz::RasterContextProvider* context_provider,
Eric Karl247f09c2018-03-15 02:06:36189 bool unpremultiply_and_dither,
190 const gfx::Size& max_tile_size) {
Victor Miura29b7ea3d2017-12-19 20:23:59191 gpu::raster::RasterInterface* ri = context_provider->RasterInterface();
Antoine Labour6283c792018-09-27 16:59:28192 if (mailbox->IsZero()) {
193 auto* sii = context_provider->SharedImageInterface();
Peng Huange3b8c1d2019-03-05 17:21:12194 uint32_t flags = gpu::SHARED_IMAGE_USAGE_DISPLAY |
195 gpu::SHARED_IMAGE_USAGE_GLES2 |
Antoine Labour6283c792018-09-27 16:59:28196 gpu::SHARED_IMAGE_USAGE_GLES2_FRAMEBUFFER_HINT;
197 if (texture_is_overlay_candidate)
198 flags |= gpu::SHARED_IMAGE_USAGE_SCANOUT;
Nathan Zabriskie659c2742020-07-16 03:49:32199 *mailbox = sii->CreateSharedImage(
200 resource_format, resource_size, color_space, kTopLeft_GrSurfaceOrigin,
201 kPremul_SkAlphaType, flags, gpu::kNullSurfaceHandle);
Antoine Labour6283c792018-09-27 16:59:28202 ri->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
203 } else {
204 // Wait on the SyncToken that was created on the compositor thread after
205 // making the mailbox. This ensures that the mailbox we consume here is
206 // valid by the time the consume command executes.
207 ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
208 }
Nathan Zabriskie0db1d9e412019-11-19 00:55:17209 GLuint texture_id = ri->CreateAndConsumeForGpuRaster(*mailbox);
Sunny Sachanandanid87c16e2019-11-23 02:48:47210 ri->BeginSharedImageAccessDirectCHROMIUM(
211 texture_id, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
Sunny Sachanandani0cb875e2017-08-15 02:54:52212 {
Justin Novosad60f840e2018-03-21 22:00:50213 ScopedGrContextAccess gr_context_access(context_provider);
danakj57baa772018-05-29 15:59:14214 base::Optional<viz::ClientResourceProvider::ScopedSkSurface> scoped_surface;
Eric Karl247f09c2018-03-15 02:06:36215 base::Optional<ScopedSkSurfaceForUnpremultiplyAndDither>
216 scoped_dither_surface;
217 SkSurface* surface;
Mike Reedf7074ff2019-03-14 14:28:16218 sk_sp<SkColorSpace> sk_color_space = color_space.ToSkColorSpace();
Eric Karl247f09c2018-03-15 02:06:36219 if (!unpremultiply_and_dither) {
Mike Reedf7074ff2019-03-14 14:28:16220 scoped_surface.emplace(context_provider->GrContext(), sk_color_space,
221 texture_id, texture_target, resource_size,
Ian Preste598eece2020-10-19 23:31:02222 resource_format,
223 skia::LegacyDisplayGlobals::ComputeSurfaceProps(
224 playback_settings.use_lcd_text),
Khushalec3ba5dc2019-11-04 22:30:21225 playback_settings.msaa_sample_count);
Eric Karl247f09c2018-03-15 02:06:36226 surface = scoped_surface->surface();
227 } else {
228 scoped_dither_surface.emplace(
Mike Reedf7074ff2019-03-14 14:28:16229 context_provider, sk_color_space, playback_rect, raster_full_rect,
230 max_tile_size, texture_id, resource_size,
Khushalec3ba5dc2019-11-04 22:30:21231 playback_settings.use_lcd_text, playback_settings.msaa_sample_count);
Eric Karl247f09c2018-03-15 02:06:36232 surface = scoped_dither_surface->surface();
233 }
Sunny Sachanandani0cb875e2017-08-15 02:54:52234
235 // Allocating an SkSurface will fail after a lost context. Pretend we
236 // rasterized, as the contents of the resource don't matter anymore.
237 if (!surface) {
238 DLOG(ERROR) << "Failed to allocate raster surface";
239 return;
240 }
241
Sunny Sachanandani0cb875e2017-08-15 02:54:52242 SkCanvas* canvas = surface->getCanvas();
243
244 // As an optimization, inform Skia to discard when not doing partial raster.
245 if (raster_full_rect == playback_rect)
246 canvas->discard();
247
Adrienne Walker51c8e382018-02-06 20:30:33248 gfx::Size content_size = raster_source->GetContentSize(transform.scale());
Mike Reedf7074ff2019-03-14 14:28:16249 raster_source->PlaybackToCanvas(canvas, content_size, raster_full_rect,
250 playback_rect, transform,
danakjaf3170e2018-02-09 17:31:58251 playback_settings);
ccameron220942362017-02-06 20:29:19252 }
Sunny Sachanandanid87c16e2019-11-23 02:48:47253 ri->EndSharedImageAccessDirectCHROMIUM(texture_id);
Antoine Labouraeb4bd70e2019-01-16 02:33:28254 ri->DeleteGpuRasterTexture(texture_id);
sunnyps5d6ff0d02016-06-28 00:40:11255}
reveman47560ab2014-09-18 19:39:21256
257} // namespace
vmiura78b69282015-02-14 00:01:17258
danakjaf3170e2018-02-09 17:31:58259// Subclass for InUsePoolResource that holds ownership of a gpu-rastered backing
260// and does cleanup of the backing when destroyed.
261class GpuRasterBufferProvider::GpuRasterBacking
262 : public ResourcePool::GpuBacking {
263 public:
264 ~GpuRasterBacking() override {
Antoine Labour6283c792018-09-27 16:59:28265 if (mailbox.IsZero())
266 return;
267 auto* sii = worker_context_provider->SharedImageInterface();
danakjaf3170e2018-02-09 17:31:58268 if (returned_sync_token.HasData())
Antoine Labour6283c792018-09-27 16:59:28269 sii->DestroySharedImage(returned_sync_token, mailbox);
270 else if (mailbox_sync_token.HasData())
271 sii->DestroySharedImage(mailbox_sync_token, mailbox);
danakjaf3170e2018-02-09 17:31:58272 }
273
Alexandr Ilin0443a8f2018-07-20 20:14:50274 void OnMemoryDump(
275 base::trace_event::ProcessMemoryDump* pmd,
276 const base::trace_event::MemoryAllocatorDumpGuid& buffer_dump_guid,
277 uint64_t tracing_process_id,
278 int importance) const override {
Antoine Labour6283c792018-09-27 16:59:28279 if (mailbox.IsZero())
Alexandr Ilin0443a8f2018-07-20 20:14:50280 return;
281
Antoine Labour6283c792018-09-27 16:59:28282 auto tracing_guid = gpu::GetSharedImageGUIDForTracing(mailbox);
283 pmd->CreateSharedGlobalAllocatorDump(tracing_guid);
284 pmd->AddOwnershipEdge(buffer_dump_guid, tracing_guid, importance);
danakjaf3170e2018-02-09 17:31:58285 }
danakjaf3170e2018-02-09 17:31:58286
Antoine Labour6283c792018-09-27 16:59:28287 // The ContextProvider used to clean up the mailbox
288 viz::RasterContextProvider* worker_context_provider = nullptr;
danakjaf3170e2018-02-09 17:31:58289};
290
sunnyps5d6ff0d02016-06-28 00:40:11291GpuRasterBufferProvider::RasterBufferImpl::RasterBufferImpl(
292 GpuRasterBufferProvider* client,
danakjaf3170e2018-02-09 17:31:58293 const ResourcePool::InUsePoolResource& in_use_resource,
294 GpuRasterBacking* backing,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05295 bool resource_has_previous_content,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48296 bool depends_on_at_raster_decodes,
297 bool depends_on_hardware_accelerated_jpeg_candidates,
298 bool depends_on_hardware_accelerated_webp_candidates)
sunnyps5d6ff0d02016-06-28 00:40:11299 : client_(client),
danakjaf3170e2018-02-09 17:31:58300 backing_(backing),
301 resource_size_(in_use_resource.size()),
302 resource_format_(in_use_resource.format()),
303 color_space_(in_use_resource.color_space()),
304 resource_has_previous_content_(resource_has_previous_content),
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05305 depends_on_at_raster_decodes_(depends_on_at_raster_decodes),
Andres Calderon Jaramillo5057f232019-11-29 23:05:48306 depends_on_hardware_accelerated_jpeg_candidates_(
307 depends_on_hardware_accelerated_jpeg_candidates),
308 depends_on_hardware_accelerated_webp_candidates_(
309 depends_on_hardware_accelerated_webp_candidates),
Sunny Sachanandani2f7fef22018-05-04 01:48:37310 before_raster_sync_token_(backing->returned_sync_token),
danakjaf3170e2018-02-09 17:31:58311 texture_target_(backing->texture_target),
312 texture_is_overlay_candidate_(backing->overlay_candidate),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30313 mailbox_(backing->mailbox) {
Yuta Hijikata173c7d62020-11-10 06:21:37314#if BUILDFLAG(IS_CHROMEOS_ASH)
Andres Calderon Jaramillo01d329552019-08-23 17:14:30315 // Only do this in Chrome OS with OOP-R because:
316 // 1) We will use this timestamp to measure raster scheduling delay and we
317 // only need to collect that data to assess the impact of hardware
318 // acceleration of image decodes which works only on Chrome OS with
319 // OOP-R.
320 // 2) We use CLOCK_MONOTONIC in that OS to get timestamps, so we can assert
321 // certain assumptions.
322 if (client_->enable_oop_rasterization_)
323 creation_time_ = base::TimeTicks::Now();
324#endif
325}
sunnyps5d6ff0d02016-06-28 00:40:11326
327GpuRasterBufferProvider::RasterBufferImpl::~RasterBufferImpl() {
danakjaf3170e2018-02-09 17:31:58328 // This SyncToken was created on the worker context after rastering the
329 // texture content.
330 backing_->mailbox_sync_token = after_raster_sync_token_;
331 if (after_raster_sync_token_.HasData()) {
332 // The returned SyncToken was waited on in Playback. We know Playback
333 // happened if the |after_raster_sync_token_| was set.
334 backing_->returned_sync_token = gpu::SyncToken();
335 }
Antoine Labour6283c792018-09-27 16:59:28336 backing_->mailbox = mailbox_;
sunnyps5d6ff0d02016-06-28 00:40:11337}
338
339void GpuRasterBufferProvider::RasterBufferImpl::Playback(
340 const RasterSource* raster_source,
341 const gfx::Rect& raster_full_rect,
342 const gfx::Rect& raster_dirty_rect,
343 uint64_t new_content_id,
trchen178ac912017-04-04 10:11:10344 const gfx::AxisTransform2d& transform,
Khushal49836ab2018-07-25 02:08:45345 const RasterSource::PlaybackSettings& playback_settings,
346 const GURL& url) {
sunnyps5d6ff0d02016-06-28 00:40:11347 TRACE_EVENT0("cc", "GpuRasterBuffer::Playback");
danakjd1dd03152018-02-23 18:13:46348 // The |before_raster_sync_token_| passed in here was created on the
349 // compositor thread, or given back with the texture for reuse. This call
350 // returns another SyncToken generated on the worker thread to synchronize
351 // with after the raster is complete.
danakjaf3170e2018-02-09 17:31:58352 after_raster_sync_token_ = client_->PlaybackOnWorkerThread(
Antoine Labour6283c792018-09-27 16:59:28353 &mailbox_, texture_target_, texture_is_overlay_candidate_,
354 before_raster_sync_token_, resource_size_, resource_format_, color_space_,
355 resource_has_previous_content_, raster_source, raster_full_rect,
Andres Calderon Jaramillo01d329552019-08-23 17:14:30356 raster_dirty_rect, new_content_id, transform, playback_settings, url,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48357 creation_time_, depends_on_at_raster_decodes_,
358 depends_on_hardware_accelerated_jpeg_candidates_,
359 depends_on_hardware_accelerated_webp_candidates_);
sunnyps5d6ff0d02016-06-28 00:40:11360}
361
Francois Dorayaffe0912020-06-30 20:29:21362bool GpuRasterBufferProvider::RasterBufferImpl::
363 SupportsBackgroundThreadPriority() const {
364 return true;
365}
366
prashant.nb4d4f492016-04-29 12:51:28367GpuRasterBufferProvider::GpuRasterBufferProvider(
Xu Xing32549162017-07-17 22:25:43368 viz::ContextProvider* compositor_context_provider,
Victor Miura29b7ea3d2017-12-19 20:23:59369 viz::RasterContextProvider* worker_context_provider,
danakjaf3170e2018-02-09 17:31:58370 bool use_gpu_memory_buffer_resources,
danakja32578c2018-04-25 21:18:36371 viz::ResourceFormat tile_format,
Eric Karl247f09c2018-03-15 02:06:36372 const gfx::Size& max_tile_size,
Eric Karla6ff8862018-04-16 20:21:06373 bool unpremultiply_and_dither_low_bit_depth_tiles,
Khushale898b992018-10-19 22:25:16374 bool enable_oop_rasterization,
Aaron Krajeski6392a86f2019-07-18 13:40:47375 float raster_metric_probability)
danakj0de0c95a2016-05-25 01:42:49376 : compositor_context_provider_(compositor_context_provider),
sunnyps5d6ff0d02016-06-28 00:40:11377 worker_context_provider_(worker_context_provider),
danakjaf3170e2018-02-09 17:31:58378 use_gpu_memory_buffer_resources_(use_gpu_memory_buffer_resources),
danakja32578c2018-04-25 21:18:36379 tile_format_(tile_format),
Eric Karl247f09c2018-03-15 02:06:36380 max_tile_size_(max_tile_size),
Eric Karla6ff8862018-04-16 20:21:06381 unpremultiply_and_dither_low_bit_depth_tiles_(
382 unpremultiply_and_dither_low_bit_depth_tiles),
Khushale898b992018-10-19 22:25:16383 enable_oop_rasterization_(enable_oop_rasterization),
Chris Blume9e8872e2020-08-13 19:40:40384 random_generator_(static_cast<uint32_t>(base::RandUint64())),
Aaron Krajeski6392a86f2019-07-18 13:40:47385 bernoulli_distribution_(raster_metric_probability) {
sunnyps5d6ff0d02016-06-28 00:40:11386 DCHECK(compositor_context_provider);
387 DCHECK(worker_context_provider);
danakj0de0c95a2016-05-25 01:42:49388}
reveman@chromium.orgb5641b92014-02-15 14:21:58389
sunnyps5d6ff0d02016-06-28 00:40:11390GpuRasterBufferProvider::~GpuRasterBufferProvider() {
sunnyps5d6ff0d02016-06-28 00:40:11391}
reveman@chromium.orgb5641b92014-02-15 14:21:58392
prashant.nb4d4f492016-04-29 12:51:28393std::unique_ptr<RasterBuffer> GpuRasterBufferProvider::AcquireBufferForRaster(
danakj4e871d82018-01-18 21:56:57394 const ResourcePool::InUsePoolResource& resource,
danakj510822aa2015-06-01 20:23:02395 uint64_t resource_content_id,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05396 uint64_t previous_content_id,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48397 bool depends_on_at_raster_decodes,
398 bool depends_on_hardware_accelerated_jpeg_candidates,
399 bool depends_on_hardware_accelerated_webp_candidates) {
danakjaf3170e2018-02-09 17:31:58400 if (!resource.gpu_backing()) {
401 auto backing = std::make_unique<GpuRasterBacking>();
Antoine Labour6283c792018-09-27 16:59:28402 backing->worker_context_provider = worker_context_provider_;
Jonathan Backere29d0232018-10-01 21:59:15403 backing->InitOverlayCandidateAndTextureTarget(
404 resource.format(), compositor_context_provider_->ContextCapabilities(),
405 use_gpu_memory_buffer_resources_);
danakjaf3170e2018-02-09 17:31:58406 resource.set_gpu_backing(std::move(backing));
407 }
408 GpuRasterBacking* backing =
409 static_cast<GpuRasterBacking*>(resource.gpu_backing());
sunnyps5d6ff0d02016-06-28 00:40:11410 bool resource_has_previous_content =
411 resource_content_id && resource_content_id == previous_content_id;
Andres Calderon Jaramillo5057f232019-11-29 23:05:48412 return std::make_unique<RasterBufferImpl>(
413 this, resource, backing, resource_has_previous_content,
414 depends_on_at_raster_decodes,
415 depends_on_hardware_accelerated_jpeg_candidates,
416 depends_on_hardware_accelerated_webp_candidates);
reveman@chromium.orgb5641b92014-02-15 14:21:58417}
418
Sunny Sachanandani5f5419e22017-05-12 20:35:30419void GpuRasterBufferProvider::Flush() {
Victor Miuraff6488612017-12-21 04:16:15420 compositor_context_provider_->ContextSupport()->FlushPendingWork();
Sunny Sachanandani5f5419e22017-05-12 20:35:30421}
422
danakja32578c2018-04-25 21:18:36423viz::ResourceFormat GpuRasterBufferProvider::GetResourceFormat() const {
424 return tile_format_;
prashant.nb4d4f492016-04-29 12:51:28425}
426
danakja32578c2018-04-25 21:18:36427bool GpuRasterBufferProvider::IsResourcePremultiplied() const {
428 return !ShouldUnpremultiplyAndDitherResource(GetResourceFormat());
Eric Karl247f09c2018-03-15 02:06:36429}
430
ericrk7f6a27f2017-01-31 22:34:32431bool GpuRasterBufferProvider::IsResourceReadyToDraw(
danakj4e871d82018-01-18 21:56:57432 const ResourcePool::InUsePoolResource& resource) const {
danakjaf3170e2018-02-09 17:31:58433 const gpu::SyncToken& sync_token = resource.gpu_backing()->mailbox_sync_token;
434 // This SyncToken() should have been set by calling OrderingBarrier() before
435 // calling this.
436 DCHECK(sync_token.HasData());
ericrk7f6a27f2017-01-31 22:34:32437
sunnyps74996292017-03-15 02:35:48438 // IsSyncTokenSignaled is thread-safe, no need for worker context lock.
439 return worker_context_provider_->ContextSupport()->IsSyncTokenSignaled(
ericrk7f6a27f2017-01-31 22:34:32440 sync_token);
441}
442
Khushalec3ba5dc2019-11-04 22:30:21443bool GpuRasterBufferProvider::CanPartialRasterIntoProvidedResource() const {
444 return true;
445}
446
ericrk7f6a27f2017-01-31 22:34:32447uint64_t GpuRasterBufferProvider::SetReadyToDrawCallback(
danakj4e871d82018-01-18 21:56:57448 const std::vector<const ResourcePool::InUsePoolResource*>& resources,
kylechar4bb144d2019-01-11 20:42:07449 base::OnceClosure callback,
ericrk7f6a27f2017-01-31 22:34:32450 uint64_t pending_callback_id) const {
danakjaf3170e2018-02-09 17:31:58451 gpu::SyncToken latest_sync_token;
452 for (const auto* in_use : resources) {
453 const gpu::SyncToken& sync_token =
454 in_use->gpu_backing()->mailbox_sync_token;
455 if (sync_token.release_count() > latest_sync_token.release_count())
456 latest_sync_token = sync_token;
457 }
458 uint64_t callback_id = latest_sync_token.release_count();
ericrk7f6a27f2017-01-31 22:34:32459 DCHECK_NE(callback_id, 0u);
460
461 // If the callback is different from the one the caller is already waiting on,
sunnyps31c92fe2017-02-10 23:46:55462 // pass the callback through to SignalSyncToken. Otherwise the request is
ericrk7f6a27f2017-01-31 22:34:32463 // redundant.
464 if (callback_id != pending_callback_id) {
danakjaf3170e2018-02-09 17:31:58465 // Use the compositor context because we want this callback on the
466 // compositor thread.
467 compositor_context_provider_->ContextSupport()->SignalSyncToken(
kylechar4bb144d2019-01-11 20:42:07468 latest_sync_token, std::move(callback));
ericrk7f6a27f2017-01-31 22:34:32469 }
470
471 return callback_id;
472}
473
sunnyps5d6ff0d02016-06-28 00:40:11474void GpuRasterBufferProvider::Shutdown() {
sunnyps5d6ff0d02016-06-28 00:40:11475}
476
danakjaf3170e2018-02-09 17:31:58477gpu::SyncToken GpuRasterBufferProvider::PlaybackOnWorkerThread(
Antoine Labour6283c792018-09-27 16:59:28478 gpu::Mailbox* mailbox,
danakjaf3170e2018-02-09 17:31:58479 GLenum texture_target,
480 bool texture_is_overlay_candidate,
sunnyps5d6ff0d02016-06-28 00:40:11481 const gpu::SyncToken& sync_token,
danakjaf3170e2018-02-09 17:31:58482 const gfx::Size& resource_size,
483 viz::ResourceFormat resource_format,
484 const gfx::ColorSpace& color_space,
sunnyps5d6ff0d02016-06-28 00:40:11485 bool resource_has_previous_content,
486 const RasterSource* raster_source,
487 const gfx::Rect& raster_full_rect,
488 const gfx::Rect& raster_dirty_rect,
489 uint64_t new_content_id,
trchen178ac912017-04-04 10:11:10490 const gfx::AxisTransform2d& transform,
Khushal49836ab2018-07-25 02:08:45491 const RasterSource::PlaybackSettings& playback_settings,
Andres Calderon Jaramillo01d329552019-08-23 17:14:30492 const GURL& url,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05493 base::TimeTicks raster_buffer_creation_time,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48494 bool depends_on_at_raster_decodes,
495 bool depends_on_hardware_accelerated_jpeg_candidates,
496 bool depends_on_hardware_accelerated_webp_candidates) {
Khushal5d4e0962018-10-18 18:04:07497 PendingRasterQuery query;
Andres Calderon Jaramillo5057f232019-11-29 23:05:48498 query.depends_on_hardware_accelerated_jpeg_candidates =
499 depends_on_hardware_accelerated_jpeg_candidates;
500 query.depends_on_hardware_accelerated_webp_candidates =
501 depends_on_hardware_accelerated_webp_candidates;
Khushal5d4e0962018-10-18 18:04:07502 gpu::SyncToken raster_finished_token = PlaybackOnWorkerThreadInternal(
503 mailbox, texture_target, texture_is_overlay_candidate, sync_token,
504 resource_size, resource_format, color_space,
505 resource_has_previous_content, raster_source, raster_full_rect,
506 raster_dirty_rect, new_content_id, transform, playback_settings, url,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05507 depends_on_at_raster_decodes, &query);
Khushal5d4e0962018-10-18 18:04:07508
Andres Calderon Jaramillo01d329552019-08-23 17:14:30509 if (query.raster_duration_query_id) {
510 if (query.raster_start_query_id)
511 query.raster_buffer_creation_time = raster_buffer_creation_time;
512
Khushal5d4e0962018-10-18 18:04:07513 // Note that it is important to scope the raster context lock to
514 // PlaybackOnWorkerThreadInternal and release it before acquiring this lock
515 // to avoid a deadlock in CheckRasterFinishedQueries which acquires the
516 // raster context lock while holding this lock.
517 base::AutoLock hold(pending_raster_queries_lock_);
518 pending_raster_queries_.push_back(query);
519 }
Andres Calderon Jaramillo01d329552019-08-23 17:14:30520 DCHECK(!query.raster_start_query_id || query.raster_duration_query_id);
Khushal5d4e0962018-10-18 18:04:07521
522 return raster_finished_token;
523}
524
525gpu::SyncToken GpuRasterBufferProvider::PlaybackOnWorkerThreadInternal(
526 gpu::Mailbox* mailbox,
527 GLenum texture_target,
528 bool texture_is_overlay_candidate,
529 const gpu::SyncToken& sync_token,
530 const gfx::Size& resource_size,
531 viz::ResourceFormat resource_format,
532 const gfx::ColorSpace& color_space,
533 bool resource_has_previous_content,
534 const RasterSource* raster_source,
535 const gfx::Rect& raster_full_rect,
536 const gfx::Rect& raster_dirty_rect,
537 uint64_t new_content_id,
538 const gfx::AxisTransform2d& transform,
539 const RasterSource::PlaybackSettings& playback_settings,
540 const GURL& url,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05541 bool depends_on_at_raster_decodes,
Khushal5d4e0962018-10-18 18:04:07542 PendingRasterQuery* query) {
Victor Miura29b7ea3d2017-12-19 20:23:59543 viz::RasterContextProvider::ScopedRasterContextLock scoped_context(
Khushal49836ab2018-07-25 02:08:45544 worker_context_provider_, url.possibly_invalid_spec().c_str());
Victor Miura29b7ea3d2017-12-19 20:23:59545 gpu::raster::RasterInterface* ri = scoped_context.RasterInterface();
Victor Miura3a4ad4f82017-12-13 06:03:45546 DCHECK(ri);
sunnyps5d6ff0d02016-06-28 00:40:11547
Aaron Krajeski6392a86f2019-07-18 13:40:47548 const bool measure_raster_metric = bernoulli_distribution_(random_generator_);
Khushale898b992018-10-19 22:25:16549
Adrienne Walker436a7752017-08-28 23:33:09550 gfx::Rect playback_rect = raster_full_rect;
551 if (resource_has_previous_content) {
552 playback_rect.Intersect(raster_dirty_rect);
553 }
554 DCHECK(!playback_rect.IsEmpty())
555 << "Why are we rastering a tile that's not dirty?";
556
Khushale898b992018-10-19 22:25:16557 if (measure_raster_metric) {
Yuta Hijikata173c7d62020-11-10 06:21:37558#if BUILDFLAG(IS_CHROMEOS_ASH)
Andres Calderon Jaramillo01d329552019-08-23 17:14:30559 // Use a query to detect when the GPU side is ready to start issuing raster
560 // work to the driver. We will use the resulting timestamp to measure raster
561 // scheduling delay. We only care about this in Chrome OS and when OOP-R is
562 // enabled because we will use this timestamp to measure raster scheduling
563 // delay and we only need to collect that data to assess the impact of
564 // hardware acceleration of image decodes which work only in Chrome OS with
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05565 // OOP-R. Furthermore, we don't count raster work that depends on at-raster
566 // image decodes. This is because we want the delay to always include
567 // image decoding and uploading time, and at-raster decodes should be
568 // relatively rare.
569 if (enable_oop_rasterization_ && !depends_on_at_raster_decodes) {
Andres Calderon Jaramillo01d329552019-08-23 17:14:30570 ri->GenQueriesEXT(1, &query->raster_start_query_id);
571 DCHECK_GT(query->raster_start_query_id, 0u);
572 ri->QueryCounterEXT(query->raster_start_query_id,
573 GL_COMMANDS_ISSUED_TIMESTAMP_CHROMIUM);
574 }
575#endif
Andres Calderon Jaramillob805fa82019-08-24 05:58:19576
577 // Use a query to time the GPU side work for rasterizing this tile.
578 ri->GenQueriesEXT(1, &query->raster_duration_query_id);
579 DCHECK_GT(query->raster_duration_query_id, 0u);
580 ri->BeginQueryEXT(GL_COMMANDS_ISSUED_CHROMIUM,
581 query->raster_duration_query_id);
Khushale898b992018-10-19 22:25:16582 }
Khushalcd8fbb772018-10-16 22:46:14583
584 {
Khushale898b992018-10-19 22:25:16585 base::Optional<base::ElapsedTimer> timer;
586 if (measure_raster_metric)
587 timer.emplace();
Khushalcd8fbb772018-10-16 22:46:14588 if (enable_oop_rasterization_) {
Khushalec3ba5dc2019-11-04 22:30:21589 RasterizeSourceOOP(
590 raster_source, resource_has_previous_content, mailbox, sync_token,
591 texture_target, texture_is_overlay_candidate, resource_size,
592 resource_format, color_space, raster_full_rect, playback_rect,
593 transform, playback_settings, worker_context_provider_);
Khushalcd8fbb772018-10-16 22:46:14594 } else {
595 RasterizeSource(raster_source, resource_has_previous_content, mailbox,
596 sync_token, texture_target, texture_is_overlay_candidate,
597 resource_size, resource_format, color_space,
598 raster_full_rect, playback_rect, transform,
599 playback_settings, worker_context_provider_,
Khushalcd8fbb772018-10-16 22:46:14600 ShouldUnpremultiplyAndDitherResource(resource_format),
601 max_tile_size_);
602 }
Andres Calderon Jaramillo01d329552019-08-23 17:14:30603 if (measure_raster_metric) {
604 query->worker_raster_duration = timer->Elapsed();
605 ri->EndQueryEXT(GL_COMMANDS_ISSUED_CHROMIUM);
606 }
Adrienne Walker436a7752017-08-28 23:33:09607 }
sunnyps5d6ff0d02016-06-28 00:40:11608
Sunny Sachanandani0cb875e2017-08-15 02:54:52609 // Generate sync token for cross context synchronization.
danakj57baa772018-05-29 15:59:14610 return viz::ClientResourceProvider::GenerateSyncTokenHelper(ri);
sunnyps5d6ff0d02016-06-28 00:40:11611}
prashant.nb4d4f492016-04-29 12:51:28612
Eric Karla6ff8862018-04-16 20:21:06613bool GpuRasterBufferProvider::ShouldUnpremultiplyAndDitherResource(
614 viz::ResourceFormat format) const {
615 switch (format) {
616 case viz::RGBA_4444:
617 return unpremultiply_and_dither_low_bit_depth_tiles_;
618 default:
619 return false;
620 }
621}
622
Khushalcd8fbb772018-10-16 22:46:14623#define UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(name, total_time) \
624 UMA_HISTOGRAM_CUSTOM_MICROSECONDS_TIMES( \
625 name, total_time, base::TimeDelta::FromMicroseconds(1), \
626 base::TimeDelta::FromMilliseconds(100), 100);
627
628bool GpuRasterBufferProvider::CheckRasterFinishedQueries() {
Khushal5d4e0962018-10-18 18:04:07629 base::AutoLock hold(pending_raster_queries_lock_);
630 if (pending_raster_queries_.empty())
631 return false;
632
Khushalcd8fbb772018-10-16 22:46:14633 viz::RasterContextProvider::ScopedRasterContextLock scoped_context(
634 worker_context_provider_);
635 auto* ri = scoped_context.RasterInterface();
636
637 auto it = pending_raster_queries_.begin();
638 while (it != pending_raster_queries_.end()) {
Andres Calderon Jaramillo01d329552019-08-23 17:14:30639 GLuint complete = 0;
640 ri->GetQueryObjectuivEXT(it->raster_duration_query_id,
Khushalcd8fbb772018-10-16 22:46:14641 GL_QUERY_RESULT_AVAILABLE_NO_FLUSH_CHROMIUM_EXT,
642 &complete);
643 if (!complete)
644 break;
645
Andres Calderon Jaramillo01d329552019-08-23 17:14:30646#if DCHECK_IS_ON()
647 if (it->raster_start_query_id) {
648 // We issued the GL_COMMANDS_ISSUED_TIMESTAMP_CHROMIUM query prior to the
649 // GL_COMMANDS_ISSUED_CHROMIUM query. Therefore, if the result of the
650 // latter is available, the result of the former should be too.
651 complete = 0;
652 ri->GetQueryObjectuivEXT(it->raster_start_query_id,
653 GL_QUERY_RESULT_AVAILABLE_NO_FLUSH_CHROMIUM_EXT,
654 &complete);
655 DCHECK(complete);
656 }
657#endif
Khushalcd8fbb772018-10-16 22:46:14658
Andres Calderon Jaramillo01d329552019-08-23 17:14:30659 GLuint gpu_raster_duration = 0u;
660 ri->GetQueryObjectuivEXT(it->raster_duration_query_id, GL_QUERY_RESULT_EXT,
661 &gpu_raster_duration);
662 ri->DeleteQueriesEXT(1, &it->raster_duration_query_id);
663
664 base::TimeDelta raster_duration =
665 it->worker_raster_duration +
666 base::TimeDelta::FromMicroseconds(gpu_raster_duration);
Khushalcd8fbb772018-10-16 22:46:14667
668 // It is safe to use the UMA macros here with runtime generated strings
669 // because the client name should be initialized once in the process, before
670 // recording any metrics here.
671 const char* client_name = GetClientNameForMetrics();
Andres Calderon Jaramillo01d329552019-08-23 17:14:30672
673 if (it->raster_start_query_id) {
674 GLuint64 gpu_raster_start_time = 0u;
675 ri->GetQueryObjectui64vEXT(it->raster_start_query_id, GL_QUERY_RESULT_EXT,
676 &gpu_raster_start_time);
677 ri->DeleteQueriesEXT(1, &it->raster_start_query_id);
678
679 // The base::checked_cast<int64_t> should not crash as long as the GPU
680 // process was not compromised: that's because the result of the query
681 // should have been generated using base::TimeDelta::InMicroseconds()
682 // there, so the result should fit in an int64_t.
683 base::TimeDelta raster_scheduling_delay =
684 base::TimeDelta::FromMicroseconds(
685 base::checked_cast<int64_t>(gpu_raster_start_time)) -
686 it->raster_buffer_creation_time.since_origin();
687
688 // We expect the clock we're using to be monotonic, so we shouldn't get a
689 // negative scheduling delay.
690 DCHECK_GE(raster_scheduling_delay.InMicroseconds(), 0u);
691 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05692 base::StringPrintf(
693 "Renderer4.%s.RasterTaskSchedulingDelayNoAtRasterDecodes.All",
694 client_name),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30695 raster_scheduling_delay);
Andres Calderon Jaramillo5057f232019-11-29 23:05:48696 if (it->depends_on_hardware_accelerated_jpeg_candidates) {
697 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
698 base::StringPrintf(
699 "Renderer4.%s.RasterTaskSchedulingDelayNoAtRasterDecodes."
700 "TilesWithJpegHwDecodeCandidates",
701 client_name),
702 raster_scheduling_delay);
703 }
704 if (it->depends_on_hardware_accelerated_webp_candidates) {
705 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
706 base::StringPrintf(
707 "Renderer4.%s.RasterTaskSchedulingDelayNoAtRasterDecodes."
708 "TilesWithWebPHwDecodeCandidates",
709 client_name),
710 raster_scheduling_delay);
711 }
Andres Calderon Jaramillo01d329552019-08-23 17:14:30712 }
713
Khushalcd8fbb772018-10-16 22:46:14714 if (enable_oop_rasterization_) {
715 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
716 base::StringPrintf("Renderer4.%s.RasterTaskTotalDuration.Oop",
717 client_name),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30718 raster_duration);
Khushalcd8fbb772018-10-16 22:46:14719 } else {
720 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
721 base::StringPrintf("Renderer4.%s.RasterTaskTotalDuration.Gpu",
722 client_name),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30723 raster_duration);
Khushalcd8fbb772018-10-16 22:46:14724 }
725
726 it = pending_raster_queries_.erase(it);
727 }
728
729 return pending_raster_queries_.size() > 0u;
730}
731
reveman@chromium.orgb5641b92014-02-15 14:21:58732} // namespace cc