[go: nahoru, domu]

blob: ae560bafe01b76296e94e38966b50629166df1b8 [file] [log] [blame]
reveman@chromium.orgb5641b92014-02-15 14:21:581// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
prashant.nb4d4f492016-04-29 12:51:285#include "cc/raster/gpu_raster_buffer_provider.h"
reveman@chromium.orgb5641b92014-02-15 14:21:586
avi02a4d172015-12-21 06:14:367#include <stdint.h>
8
ernstm69fedbd2014-09-18 01:23:419#include <algorithm>
Mike Reedf7074ff2019-03-14 14:28:1610#include <utility>
ernstm69fedbd2014-09-18 01:23:4111
ericrke4027312016-06-30 00:12:4212#include "base/metrics/histogram_macros.h"
Khushal3c3873a2018-11-06 18:14:0113#include "base/rand_util.h"
Antoine Labour6283c792018-09-27 16:59:2814#include "base/strings/stringprintf.h"
Alexandr Ilin0443a8f2018-07-20 20:14:5015#include "base/trace_event/process_memory_dump.h"
ssid904ce3b2015-01-27 15:20:1616#include "base/trace_event/trace_event.h"
ericrkc7c9e3f2016-07-01 17:30:1617#include "cc/base/histograms.h"
Adrienne Walker436a7752017-08-28 23:33:0918#include "cc/paint/display_item_list.h"
enne5a9630362017-02-24 23:41:0319#include "cc/paint/paint_canvas.h"
Adrienne Walker436a7752017-08-28 23:33:0920#include "cc/paint/paint_recorder.h"
chrishtrac41ff92017-03-17 05:07:3021#include "cc/raster/raster_source.h"
danakj920156852015-05-18 20:22:2922#include "cc/raster/scoped_gpu_raster.h"
Khushal3cfc77f2018-08-15 07:19:5823#include "cc/raster/scoped_grcontext_access.h"
danakj57baa772018-05-29 15:59:1424#include "components/viz/client/client_resource_provider.h"
Victor Miura29b7ea3d2017-12-19 20:23:5925#include "components/viz/common/gpu/context_provider.h"
26#include "components/viz/common/gpu/raster_context_provider.h"
danakjaf3170e2018-02-09 17:31:5827#include "gpu/GLES2/gl2extchromium.h"
ericrk7f6a27f2017-01-31 22:34:3228#include "gpu/command_buffer/client/context_support.h"
reveman@chromium.orgb5641b92014-02-15 14:21:5829#include "gpu/command_buffer/client/gles2_interface.h"
Victor Miura3a4ad4f82017-12-13 06:03:4530#include "gpu/command_buffer/client/raster_interface.h"
Antoine Labour6283c792018-09-27 16:59:2831#include "gpu/command_buffer/client/shared_image_interface.h"
Antoine Labour6283c792018-09-27 16:59:2832#include "gpu/command_buffer/common/shared_image_trace_utils.h"
33#include "gpu/command_buffer/common/shared_image_usage.h"
hendrikw04cea972014-09-23 20:50:5334#include "third_party/skia/include/core/SkPictureRecorder.h"
reveman47560ab2014-09-18 19:39:2135#include "third_party/skia/include/core/SkSurface.h"
reveman@chromium.orgb5641b92014-02-15 14:21:5836#include "third_party/skia/include/gpu/GrContext.h"
Adrienne Walker436a7752017-08-28 23:33:0937#include "ui/gfx/geometry/axis_transform2d.h"
Khushal49836ab2018-07-25 02:08:4538#include "url/gurl.h"
reveman@chromium.orgb5641b92014-02-15 14:21:5839
40namespace cc {
reveman47560ab2014-09-18 19:39:2141namespace {
42
Eric Karl247f09c2018-03-15 02:06:3643class ScopedSkSurfaceForUnpremultiplyAndDither {
44 public:
45 ScopedSkSurfaceForUnpremultiplyAndDither(
46 viz::RasterContextProvider* context_provider,
Mike Reedf7074ff2019-03-14 14:28:1647 sk_sp<SkColorSpace> color_space,
Eric Karl247f09c2018-03-15 02:06:3648 const gfx::Rect& playback_rect,
49 const gfx::Rect& raster_full_rect,
50 const gfx::Size& max_tile_size,
51 GLuint texture_id,
52 const gfx::Size& texture_size,
Eric Karl247f09c2018-03-15 02:06:3653 bool can_use_lcd_text,
54 int msaa_sample_count)
55 : context_provider_(context_provider),
56 texture_id_(texture_id),
57 offset_(playback_rect.OffsetFromOrigin() -
58 raster_full_rect.OffsetFromOrigin()),
59 size_(playback_rect.size()) {
60 // Determine the |intermediate_size| to use for our 32-bit texture. If we
61 // know the max tile size, use that. This prevents GPU cache explosion due
62 // to using lots of different 32-bit texture sizes. Otherwise just use the
63 // exact size of the target texture.
64 gfx::Size intermediate_size;
65 if (!max_tile_size.IsEmpty()) {
66 DCHECK_GE(max_tile_size.width(), texture_size.width());
67 DCHECK_GE(max_tile_size.height(), texture_size.height());
68 intermediate_size = max_tile_size;
69 } else {
70 intermediate_size = texture_size;
71 }
72
73 // Allocate a 32-bit surface for raster. We will copy from that into our
74 // actual surface in destruction.
Mike Reedf7074ff2019-03-14 14:28:1675 SkImageInfo n32Info = SkImageInfo::MakeN32Premul(intermediate_size.width(),
76 intermediate_size.height(),
77 std::move(color_space));
Eric Karl247f09c2018-03-15 02:06:3678 SkSurfaceProps surface_props =
danakj57baa772018-05-29 15:59:1479 viz::ClientResourceProvider::ScopedSkSurface::ComputeSurfaceProps(
Adrienne Walker7774c3c2018-03-21 23:40:3480 can_use_lcd_text);
Eric Karl247f09c2018-03-15 02:06:3681 surface_ = SkSurface::MakeRenderTarget(
82 context_provider->GrContext(), SkBudgeted::kNo, n32Info,
83 msaa_sample_count, kTopLeft_GrSurfaceOrigin, &surface_props);
84 }
85
86 ~ScopedSkSurfaceForUnpremultiplyAndDither() {
87 // In lost-context cases, |surface_| may be null and there's nothing
88 // meaningful to do here.
89 if (!surface_)
90 return;
91
Greg Daniel0691c672018-04-17 17:21:1492 GrBackendTexture backend_texture =
93 surface_->getBackendTexture(SkSurface::kFlushRead_BackendHandleAccess);
94 if (!backend_texture.isValid()) {
95 return;
96 }
97 GrGLTextureInfo info;
98 if (!backend_texture.getGLTextureInfo(&info)) {
99 return;
100 }
Eric Karl247f09c2018-03-15 02:06:36101 context_provider_->ContextGL()->UnpremultiplyAndDitherCopyCHROMIUM(
Greg Daniel0691c672018-04-17 17:21:14102 info.fID, texture_id_, offset_.x(), offset_.y(), size_.width(),
Eric Karl247f09c2018-03-15 02:06:36103 size_.height());
104 }
105
106 SkSurface* surface() { return surface_.get(); }
107
108 private:
109 viz::RasterContextProvider* context_provider_;
110 GLuint texture_id_;
111 gfx::Vector2d offset_;
112 gfx::Size size_;
113 sk_sp<SkSurface> surface_;
114};
115
Adrienne Walker436a7752017-08-28 23:33:09116static void RasterizeSourceOOP(
117 const RasterSource* raster_source,
118 bool resource_has_previous_content,
Antoine Labour6283c792018-09-27 16:59:28119 gpu::Mailbox* mailbox,
120 const gpu::SyncToken& sync_token,
danakjaf3170e2018-02-09 17:31:58121 GLenum texture_target,
122 bool texture_is_overlay_candidate,
danakjaf3170e2018-02-09 17:31:58123 const gfx::Size& resource_size,
124 viz::ResourceFormat resource_format,
125 const gfx::ColorSpace& color_space,
Adrienne Walker436a7752017-08-28 23:33:09126 const gfx::Rect& raster_full_rect,
127 const gfx::Rect& playback_rect,
128 const gfx::AxisTransform2d& transform,
129 const RasterSource::PlaybackSettings& playback_settings,
Khushalec3ba5dc2019-11-04 22:30:21130 viz::RasterContextProvider* context_provider) {
Victor Miura29b7ea3d2017-12-19 20:23:59131 gpu::raster::RasterInterface* ri = context_provider->RasterInterface();
Antoine Labour6283c792018-09-27 16:59:28132 if (mailbox->IsZero()) {
133 DCHECK(!sync_token.HasData());
134 auto* sii = context_provider->SharedImageInterface();
Peng Huange3b8c1d2019-03-05 17:21:12135 uint32_t flags = gpu::SHARED_IMAGE_USAGE_DISPLAY |
136 gpu::SHARED_IMAGE_USAGE_RASTER |
Jonathan Backera1f3d7c2018-10-16 14:46:32137 gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
Antoine Labour6283c792018-09-27 16:59:28138 if (texture_is_overlay_candidate)
139 flags |= gpu::SHARED_IMAGE_USAGE_SCANOUT;
140 *mailbox = sii->CreateSharedImage(resource_format, resource_size,
141 color_space, flags);
142 ri->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
143 } else {
144 ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
145 }
146
Khushalec3ba5dc2019-11-04 22:30:21147 ri->BeginRasterCHROMIUM(
148 raster_source->background_color(), playback_settings.msaa_sample_count,
149 playback_settings.use_lcd_text, color_space, mailbox->name);
Adrienne Walkerbc5cd82e2017-10-31 21:58:15150 float recording_to_raster_scale =
151 transform.scale() / raster_source->recording_scale_factor();
Adrienne Walker51c8e382018-02-06 20:30:33152 gfx::Size content_size = raster_source->GetContentSize(transform.scale());
Khushalcd8fbb772018-10-16 22:46:14153
Adrienne Walker51c8e382018-02-06 20:30:33154 // TODO(enne): could skip the clear on new textures, as the service side has
155 // to do that anyway. resource_has_previous_content implies that the texture
156 // is not new, but the reverse does not hold, so more plumbing is needed.
Khushal6cabe802019-03-26 17:58:46157 ri->RasterCHROMIUM(
158 raster_source->GetDisplayItemList().get(),
159 playback_settings.image_provider, content_size, raster_full_rect,
160 playback_rect, transform.translation(), recording_to_raster_scale,
161 raster_source->requires_clear(),
162 const_cast<RasterSource*>(raster_source)->max_op_size_hint());
Victor Miura3a4ad4f82017-12-13 06:03:45163 ri->EndRasterCHROMIUM();
Adrienne Walker436a7752017-08-28 23:33:09164
Eric Karl247f09c2018-03-15 02:06:36165 // TODO(ericrk): Handle unpremultiply+dither for 4444 cases.
166 // https://crbug.com/789153
Adrienne Walker436a7752017-08-28 23:33:09167}
168
vmiuraf7c765c2016-12-03 21:02:32169static void RasterizeSource(
sunnyps5d6ff0d02016-06-28 00:40:11170 const RasterSource* raster_source,
171 bool resource_has_previous_content,
Antoine Labour6283c792018-09-27 16:59:28172 gpu::Mailbox* mailbox,
173 const gpu::SyncToken& sync_token,
danakjaf3170e2018-02-09 17:31:58174 GLenum texture_target,
175 bool texture_is_overlay_candidate,
danakjaf3170e2018-02-09 17:31:58176 const gfx::Size& resource_size,
177 viz::ResourceFormat resource_format,
178 const gfx::ColorSpace& color_space,
sunnyps5d6ff0d02016-06-28 00:40:11179 const gfx::Rect& raster_full_rect,
Adrienne Walker436a7752017-08-28 23:33:09180 const gfx::Rect& playback_rect,
trchen178ac912017-04-04 10:11:10181 const gfx::AxisTransform2d& transform,
vmiuraf7c765c2016-12-03 21:02:32182 const RasterSource::PlaybackSettings& playback_settings,
Victor Miura29b7ea3d2017-12-19 20:23:59183 viz::RasterContextProvider* context_provider,
Eric Karl247f09c2018-03-15 02:06:36184 bool unpremultiply_and_dither,
185 const gfx::Size& max_tile_size) {
Victor Miura29b7ea3d2017-12-19 20:23:59186 gpu::raster::RasterInterface* ri = context_provider->RasterInterface();
Antoine Labour6283c792018-09-27 16:59:28187 if (mailbox->IsZero()) {
188 auto* sii = context_provider->SharedImageInterface();
Peng Huange3b8c1d2019-03-05 17:21:12189 uint32_t flags = gpu::SHARED_IMAGE_USAGE_DISPLAY |
190 gpu::SHARED_IMAGE_USAGE_GLES2 |
Antoine Labour6283c792018-09-27 16:59:28191 gpu::SHARED_IMAGE_USAGE_GLES2_FRAMEBUFFER_HINT;
192 if (texture_is_overlay_candidate)
193 flags |= gpu::SHARED_IMAGE_USAGE_SCANOUT;
194 *mailbox = sii->CreateSharedImage(resource_format, resource_size,
195 color_space, flags);
196 ri->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
197 } else {
198 // Wait on the SyncToken that was created on the compositor thread after
199 // making the mailbox. This ensures that the mailbox we consume here is
200 // valid by the time the consume command executes.
201 ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
202 }
Nathan Zabriskie0db1d9e412019-11-19 00:55:17203 GLuint texture_id = ri->CreateAndConsumeForGpuRaster(*mailbox);
Sunny Sachanandanid87c16e2019-11-23 02:48:47204 ri->BeginSharedImageAccessDirectCHROMIUM(
205 texture_id, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
Sunny Sachanandani0cb875e2017-08-15 02:54:52206 {
Justin Novosad60f840e2018-03-21 22:00:50207 ScopedGrContextAccess gr_context_access(context_provider);
danakj57baa772018-05-29 15:59:14208 base::Optional<viz::ClientResourceProvider::ScopedSkSurface> scoped_surface;
Eric Karl247f09c2018-03-15 02:06:36209 base::Optional<ScopedSkSurfaceForUnpremultiplyAndDither>
210 scoped_dither_surface;
211 SkSurface* surface;
Mike Reedf7074ff2019-03-14 14:28:16212 sk_sp<SkColorSpace> sk_color_space = color_space.ToSkColorSpace();
Eric Karl247f09c2018-03-15 02:06:36213 if (!unpremultiply_and_dither) {
Mike Reedf7074ff2019-03-14 14:28:16214 scoped_surface.emplace(context_provider->GrContext(), sk_color_space,
215 texture_id, texture_target, resource_size,
216 resource_format, playback_settings.use_lcd_text,
Khushalec3ba5dc2019-11-04 22:30:21217 playback_settings.msaa_sample_count);
Eric Karl247f09c2018-03-15 02:06:36218 surface = scoped_surface->surface();
219 } else {
220 scoped_dither_surface.emplace(
Mike Reedf7074ff2019-03-14 14:28:16221 context_provider, sk_color_space, playback_rect, raster_full_rect,
222 max_tile_size, texture_id, resource_size,
Khushalec3ba5dc2019-11-04 22:30:21223 playback_settings.use_lcd_text, playback_settings.msaa_sample_count);
Eric Karl247f09c2018-03-15 02:06:36224 surface = scoped_dither_surface->surface();
225 }
Sunny Sachanandani0cb875e2017-08-15 02:54:52226
227 // Allocating an SkSurface will fail after a lost context. Pretend we
228 // rasterized, as the contents of the resource don't matter anymore.
229 if (!surface) {
230 DLOG(ERROR) << "Failed to allocate raster surface";
231 return;
232 }
233
Sunny Sachanandani0cb875e2017-08-15 02:54:52234 SkCanvas* canvas = surface->getCanvas();
235
236 // As an optimization, inform Skia to discard when not doing partial raster.
237 if (raster_full_rect == playback_rect)
238 canvas->discard();
239
Adrienne Walker51c8e382018-02-06 20:30:33240 gfx::Size content_size = raster_source->GetContentSize(transform.scale());
Mike Reedf7074ff2019-03-14 14:28:16241 raster_source->PlaybackToCanvas(canvas, content_size, raster_full_rect,
242 playback_rect, transform,
danakjaf3170e2018-02-09 17:31:58243 playback_settings);
ccameron220942362017-02-06 20:29:19244 }
Sunny Sachanandanid87c16e2019-11-23 02:48:47245 ri->EndSharedImageAccessDirectCHROMIUM(texture_id);
Antoine Labouraeb4bd70e2019-01-16 02:33:28246 ri->DeleteGpuRasterTexture(texture_id);
sunnyps5d6ff0d02016-06-28 00:40:11247}
reveman47560ab2014-09-18 19:39:21248
249} // namespace
vmiura78b69282015-02-14 00:01:17250
danakjaf3170e2018-02-09 17:31:58251// Subclass for InUsePoolResource that holds ownership of a gpu-rastered backing
252// and does cleanup of the backing when destroyed.
253class GpuRasterBufferProvider::GpuRasterBacking
254 : public ResourcePool::GpuBacking {
255 public:
256 ~GpuRasterBacking() override {
Antoine Labour6283c792018-09-27 16:59:28257 if (mailbox.IsZero())
258 return;
259 auto* sii = worker_context_provider->SharedImageInterface();
danakjaf3170e2018-02-09 17:31:58260 if (returned_sync_token.HasData())
Antoine Labour6283c792018-09-27 16:59:28261 sii->DestroySharedImage(returned_sync_token, mailbox);
262 else if (mailbox_sync_token.HasData())
263 sii->DestroySharedImage(mailbox_sync_token, mailbox);
danakjaf3170e2018-02-09 17:31:58264 }
265
Alexandr Ilin0443a8f2018-07-20 20:14:50266 void OnMemoryDump(
267 base::trace_event::ProcessMemoryDump* pmd,
268 const base::trace_event::MemoryAllocatorDumpGuid& buffer_dump_guid,
269 uint64_t tracing_process_id,
270 int importance) const override {
Antoine Labour6283c792018-09-27 16:59:28271 if (mailbox.IsZero())
Alexandr Ilin0443a8f2018-07-20 20:14:50272 return;
273
Antoine Labour6283c792018-09-27 16:59:28274 auto tracing_guid = gpu::GetSharedImageGUIDForTracing(mailbox);
275 pmd->CreateSharedGlobalAllocatorDump(tracing_guid);
276 pmd->AddOwnershipEdge(buffer_dump_guid, tracing_guid, importance);
danakjaf3170e2018-02-09 17:31:58277 }
danakjaf3170e2018-02-09 17:31:58278
Antoine Labour6283c792018-09-27 16:59:28279 // The ContextProvider used to clean up the mailbox
280 viz::RasterContextProvider* worker_context_provider = nullptr;
danakjaf3170e2018-02-09 17:31:58281};
282
sunnyps5d6ff0d02016-06-28 00:40:11283GpuRasterBufferProvider::RasterBufferImpl::RasterBufferImpl(
284 GpuRasterBufferProvider* client,
danakjaf3170e2018-02-09 17:31:58285 const ResourcePool::InUsePoolResource& in_use_resource,
286 GpuRasterBacking* backing,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05287 bool resource_has_previous_content,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48288 bool depends_on_at_raster_decodes,
289 bool depends_on_hardware_accelerated_jpeg_candidates,
290 bool depends_on_hardware_accelerated_webp_candidates)
sunnyps5d6ff0d02016-06-28 00:40:11291 : client_(client),
danakjaf3170e2018-02-09 17:31:58292 backing_(backing),
293 resource_size_(in_use_resource.size()),
294 resource_format_(in_use_resource.format()),
295 color_space_(in_use_resource.color_space()),
296 resource_has_previous_content_(resource_has_previous_content),
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05297 depends_on_at_raster_decodes_(depends_on_at_raster_decodes),
Andres Calderon Jaramillo5057f232019-11-29 23:05:48298 depends_on_hardware_accelerated_jpeg_candidates_(
299 depends_on_hardware_accelerated_jpeg_candidates),
300 depends_on_hardware_accelerated_webp_candidates_(
301 depends_on_hardware_accelerated_webp_candidates),
Sunny Sachanandani2f7fef22018-05-04 01:48:37302 before_raster_sync_token_(backing->returned_sync_token),
danakjaf3170e2018-02-09 17:31:58303 texture_target_(backing->texture_target),
304 texture_is_overlay_candidate_(backing->overlay_candidate),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30305 mailbox_(backing->mailbox) {
306#if defined(OS_CHROMEOS)
307 // Only do this in Chrome OS with OOP-R because:
308 // 1) We will use this timestamp to measure raster scheduling delay and we
309 // only need to collect that data to assess the impact of hardware
310 // acceleration of image decodes which works only on Chrome OS with
311 // OOP-R.
312 // 2) We use CLOCK_MONOTONIC in that OS to get timestamps, so we can assert
313 // certain assumptions.
314 if (client_->enable_oop_rasterization_)
315 creation_time_ = base::TimeTicks::Now();
316#endif
317}
sunnyps5d6ff0d02016-06-28 00:40:11318
319GpuRasterBufferProvider::RasterBufferImpl::~RasterBufferImpl() {
danakjaf3170e2018-02-09 17:31:58320 // This SyncToken was created on the worker context after rastering the
321 // texture content.
322 backing_->mailbox_sync_token = after_raster_sync_token_;
323 if (after_raster_sync_token_.HasData()) {
324 // The returned SyncToken was waited on in Playback. We know Playback
325 // happened if the |after_raster_sync_token_| was set.
326 backing_->returned_sync_token = gpu::SyncToken();
327 }
Antoine Labour6283c792018-09-27 16:59:28328 backing_->mailbox = mailbox_;
sunnyps5d6ff0d02016-06-28 00:40:11329}
330
331void GpuRasterBufferProvider::RasterBufferImpl::Playback(
332 const RasterSource* raster_source,
333 const gfx::Rect& raster_full_rect,
334 const gfx::Rect& raster_dirty_rect,
335 uint64_t new_content_id,
trchen178ac912017-04-04 10:11:10336 const gfx::AxisTransform2d& transform,
Khushal49836ab2018-07-25 02:08:45337 const RasterSource::PlaybackSettings& playback_settings,
338 const GURL& url) {
sunnyps5d6ff0d02016-06-28 00:40:11339 TRACE_EVENT0("cc", "GpuRasterBuffer::Playback");
danakjd1dd03152018-02-23 18:13:46340 // The |before_raster_sync_token_| passed in here was created on the
341 // compositor thread, or given back with the texture for reuse. This call
342 // returns another SyncToken generated on the worker thread to synchronize
343 // with after the raster is complete.
danakjaf3170e2018-02-09 17:31:58344 after_raster_sync_token_ = client_->PlaybackOnWorkerThread(
Antoine Labour6283c792018-09-27 16:59:28345 &mailbox_, texture_target_, texture_is_overlay_candidate_,
346 before_raster_sync_token_, resource_size_, resource_format_, color_space_,
347 resource_has_previous_content_, raster_source, raster_full_rect,
Andres Calderon Jaramillo01d329552019-08-23 17:14:30348 raster_dirty_rect, new_content_id, transform, playback_settings, url,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48349 creation_time_, depends_on_at_raster_decodes_,
350 depends_on_hardware_accelerated_jpeg_candidates_,
351 depends_on_hardware_accelerated_webp_candidates_);
sunnyps5d6ff0d02016-06-28 00:40:11352}
353
prashant.nb4d4f492016-04-29 12:51:28354GpuRasterBufferProvider::GpuRasterBufferProvider(
Xu Xing32549162017-07-17 22:25:43355 viz::ContextProvider* compositor_context_provider,
Victor Miura29b7ea3d2017-12-19 20:23:59356 viz::RasterContextProvider* worker_context_provider,
danakjaf3170e2018-02-09 17:31:58357 bool use_gpu_memory_buffer_resources,
danakja32578c2018-04-25 21:18:36358 viz::ResourceFormat tile_format,
Eric Karl247f09c2018-03-15 02:06:36359 const gfx::Size& max_tile_size,
Eric Karla6ff8862018-04-16 20:21:06360 bool unpremultiply_and_dither_low_bit_depth_tiles,
Khushale898b992018-10-19 22:25:16361 bool enable_oop_rasterization,
Aaron Krajeski6392a86f2019-07-18 13:40:47362 float raster_metric_probability)
danakj0de0c95a2016-05-25 01:42:49363 : compositor_context_provider_(compositor_context_provider),
sunnyps5d6ff0d02016-06-28 00:40:11364 worker_context_provider_(worker_context_provider),
danakjaf3170e2018-02-09 17:31:58365 use_gpu_memory_buffer_resources_(use_gpu_memory_buffer_resources),
danakja32578c2018-04-25 21:18:36366 tile_format_(tile_format),
Eric Karl247f09c2018-03-15 02:06:36367 max_tile_size_(max_tile_size),
Eric Karla6ff8862018-04-16 20:21:06368 unpremultiply_and_dither_low_bit_depth_tiles_(
369 unpremultiply_and_dither_low_bit_depth_tiles),
Khushale898b992018-10-19 22:25:16370 enable_oop_rasterization_(enable_oop_rasterization),
Aaron Krajeski6392a86f2019-07-18 13:40:47371 random_generator_((uint32_t)base::RandUint64()),
372 bernoulli_distribution_(raster_metric_probability) {
sunnyps5d6ff0d02016-06-28 00:40:11373 DCHECK(compositor_context_provider);
374 DCHECK(worker_context_provider);
danakj0de0c95a2016-05-25 01:42:49375}
reveman@chromium.orgb5641b92014-02-15 14:21:58376
sunnyps5d6ff0d02016-06-28 00:40:11377GpuRasterBufferProvider::~GpuRasterBufferProvider() {
sunnyps5d6ff0d02016-06-28 00:40:11378}
reveman@chromium.orgb5641b92014-02-15 14:21:58379
prashant.nb4d4f492016-04-29 12:51:28380std::unique_ptr<RasterBuffer> GpuRasterBufferProvider::AcquireBufferForRaster(
danakj4e871d82018-01-18 21:56:57381 const ResourcePool::InUsePoolResource& resource,
danakj510822aa2015-06-01 20:23:02382 uint64_t resource_content_id,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05383 uint64_t previous_content_id,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48384 bool depends_on_at_raster_decodes,
385 bool depends_on_hardware_accelerated_jpeg_candidates,
386 bool depends_on_hardware_accelerated_webp_candidates) {
danakjaf3170e2018-02-09 17:31:58387 if (!resource.gpu_backing()) {
388 auto backing = std::make_unique<GpuRasterBacking>();
Antoine Labour6283c792018-09-27 16:59:28389 backing->worker_context_provider = worker_context_provider_;
Jonathan Backere29d0232018-10-01 21:59:15390 backing->InitOverlayCandidateAndTextureTarget(
391 resource.format(), compositor_context_provider_->ContextCapabilities(),
392 use_gpu_memory_buffer_resources_);
danakjaf3170e2018-02-09 17:31:58393 resource.set_gpu_backing(std::move(backing));
394 }
395 GpuRasterBacking* backing =
396 static_cast<GpuRasterBacking*>(resource.gpu_backing());
sunnyps5d6ff0d02016-06-28 00:40:11397 bool resource_has_previous_content =
398 resource_content_id && resource_content_id == previous_content_id;
Andres Calderon Jaramillo5057f232019-11-29 23:05:48399 return std::make_unique<RasterBufferImpl>(
400 this, resource, backing, resource_has_previous_content,
401 depends_on_at_raster_decodes,
402 depends_on_hardware_accelerated_jpeg_candidates,
403 depends_on_hardware_accelerated_webp_candidates);
reveman@chromium.orgb5641b92014-02-15 14:21:58404}
405
Sunny Sachanandani5f5419e22017-05-12 20:35:30406void GpuRasterBufferProvider::Flush() {
Victor Miuraff6488612017-12-21 04:16:15407 compositor_context_provider_->ContextSupport()->FlushPendingWork();
Sunny Sachanandani5f5419e22017-05-12 20:35:30408}
409
danakja32578c2018-04-25 21:18:36410viz::ResourceFormat GpuRasterBufferProvider::GetResourceFormat() const {
411 return tile_format_;
prashant.nb4d4f492016-04-29 12:51:28412}
413
danakja32578c2018-04-25 21:18:36414bool GpuRasterBufferProvider::IsResourcePremultiplied() const {
415 return !ShouldUnpremultiplyAndDitherResource(GetResourceFormat());
Eric Karl247f09c2018-03-15 02:06:36416}
417
ericrk7f6a27f2017-01-31 22:34:32418bool GpuRasterBufferProvider::IsResourceReadyToDraw(
danakj4e871d82018-01-18 21:56:57419 const ResourcePool::InUsePoolResource& resource) const {
danakjaf3170e2018-02-09 17:31:58420 const gpu::SyncToken& sync_token = resource.gpu_backing()->mailbox_sync_token;
421 // This SyncToken() should have been set by calling OrderingBarrier() before
422 // calling this.
423 DCHECK(sync_token.HasData());
ericrk7f6a27f2017-01-31 22:34:32424
sunnyps74996292017-03-15 02:35:48425 // IsSyncTokenSignaled is thread-safe, no need for worker context lock.
426 return worker_context_provider_->ContextSupport()->IsSyncTokenSignaled(
ericrk7f6a27f2017-01-31 22:34:32427 sync_token);
428}
429
Khushalec3ba5dc2019-11-04 22:30:21430bool GpuRasterBufferProvider::CanPartialRasterIntoProvidedResource() const {
431 return true;
432}
433
ericrk7f6a27f2017-01-31 22:34:32434uint64_t GpuRasterBufferProvider::SetReadyToDrawCallback(
danakj4e871d82018-01-18 21:56:57435 const std::vector<const ResourcePool::InUsePoolResource*>& resources,
kylechar4bb144d2019-01-11 20:42:07436 base::OnceClosure callback,
ericrk7f6a27f2017-01-31 22:34:32437 uint64_t pending_callback_id) const {
danakjaf3170e2018-02-09 17:31:58438 gpu::SyncToken latest_sync_token;
439 for (const auto* in_use : resources) {
440 const gpu::SyncToken& sync_token =
441 in_use->gpu_backing()->mailbox_sync_token;
442 if (sync_token.release_count() > latest_sync_token.release_count())
443 latest_sync_token = sync_token;
444 }
445 uint64_t callback_id = latest_sync_token.release_count();
ericrk7f6a27f2017-01-31 22:34:32446 DCHECK_NE(callback_id, 0u);
447
448 // If the callback is different from the one the caller is already waiting on,
sunnyps31c92fe2017-02-10 23:46:55449 // pass the callback through to SignalSyncToken. Otherwise the request is
ericrk7f6a27f2017-01-31 22:34:32450 // redundant.
451 if (callback_id != pending_callback_id) {
danakjaf3170e2018-02-09 17:31:58452 // Use the compositor context because we want this callback on the
453 // compositor thread.
454 compositor_context_provider_->ContextSupport()->SignalSyncToken(
kylechar4bb144d2019-01-11 20:42:07455 latest_sync_token, std::move(callback));
ericrk7f6a27f2017-01-31 22:34:32456 }
457
458 return callback_id;
459}
460
sunnyps5d6ff0d02016-06-28 00:40:11461void GpuRasterBufferProvider::Shutdown() {
sunnyps5d6ff0d02016-06-28 00:40:11462}
463
danakjaf3170e2018-02-09 17:31:58464gpu::SyncToken GpuRasterBufferProvider::PlaybackOnWorkerThread(
Antoine Labour6283c792018-09-27 16:59:28465 gpu::Mailbox* mailbox,
danakjaf3170e2018-02-09 17:31:58466 GLenum texture_target,
467 bool texture_is_overlay_candidate,
sunnyps5d6ff0d02016-06-28 00:40:11468 const gpu::SyncToken& sync_token,
danakjaf3170e2018-02-09 17:31:58469 const gfx::Size& resource_size,
470 viz::ResourceFormat resource_format,
471 const gfx::ColorSpace& color_space,
sunnyps5d6ff0d02016-06-28 00:40:11472 bool resource_has_previous_content,
473 const RasterSource* raster_source,
474 const gfx::Rect& raster_full_rect,
475 const gfx::Rect& raster_dirty_rect,
476 uint64_t new_content_id,
trchen178ac912017-04-04 10:11:10477 const gfx::AxisTransform2d& transform,
Khushal49836ab2018-07-25 02:08:45478 const RasterSource::PlaybackSettings& playback_settings,
Andres Calderon Jaramillo01d329552019-08-23 17:14:30479 const GURL& url,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05480 base::TimeTicks raster_buffer_creation_time,
Andres Calderon Jaramillo5057f232019-11-29 23:05:48481 bool depends_on_at_raster_decodes,
482 bool depends_on_hardware_accelerated_jpeg_candidates,
483 bool depends_on_hardware_accelerated_webp_candidates) {
Khushal5d4e0962018-10-18 18:04:07484 PendingRasterQuery query;
Andres Calderon Jaramillo5057f232019-11-29 23:05:48485 query.depends_on_hardware_accelerated_jpeg_candidates =
486 depends_on_hardware_accelerated_jpeg_candidates;
487 query.depends_on_hardware_accelerated_webp_candidates =
488 depends_on_hardware_accelerated_webp_candidates;
Khushal5d4e0962018-10-18 18:04:07489 gpu::SyncToken raster_finished_token = PlaybackOnWorkerThreadInternal(
490 mailbox, texture_target, texture_is_overlay_candidate, sync_token,
491 resource_size, resource_format, color_space,
492 resource_has_previous_content, raster_source, raster_full_rect,
493 raster_dirty_rect, new_content_id, transform, playback_settings, url,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05494 depends_on_at_raster_decodes, &query);
Khushal5d4e0962018-10-18 18:04:07495
Andres Calderon Jaramillo01d329552019-08-23 17:14:30496 if (query.raster_duration_query_id) {
497 if (query.raster_start_query_id)
498 query.raster_buffer_creation_time = raster_buffer_creation_time;
499
Khushal5d4e0962018-10-18 18:04:07500 // Note that it is important to scope the raster context lock to
501 // PlaybackOnWorkerThreadInternal and release it before acquiring this lock
502 // to avoid a deadlock in CheckRasterFinishedQueries which acquires the
503 // raster context lock while holding this lock.
504 base::AutoLock hold(pending_raster_queries_lock_);
505 pending_raster_queries_.push_back(query);
506 }
Andres Calderon Jaramillo01d329552019-08-23 17:14:30507 DCHECK(!query.raster_start_query_id || query.raster_duration_query_id);
Khushal5d4e0962018-10-18 18:04:07508
509 return raster_finished_token;
510}
511
512gpu::SyncToken GpuRasterBufferProvider::PlaybackOnWorkerThreadInternal(
513 gpu::Mailbox* mailbox,
514 GLenum texture_target,
515 bool texture_is_overlay_candidate,
516 const gpu::SyncToken& sync_token,
517 const gfx::Size& resource_size,
518 viz::ResourceFormat resource_format,
519 const gfx::ColorSpace& color_space,
520 bool resource_has_previous_content,
521 const RasterSource* raster_source,
522 const gfx::Rect& raster_full_rect,
523 const gfx::Rect& raster_dirty_rect,
524 uint64_t new_content_id,
525 const gfx::AxisTransform2d& transform,
526 const RasterSource::PlaybackSettings& playback_settings,
527 const GURL& url,
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05528 bool depends_on_at_raster_decodes,
Khushal5d4e0962018-10-18 18:04:07529 PendingRasterQuery* query) {
Victor Miura29b7ea3d2017-12-19 20:23:59530 viz::RasterContextProvider::ScopedRasterContextLock scoped_context(
Khushal49836ab2018-07-25 02:08:45531 worker_context_provider_, url.possibly_invalid_spec().c_str());
Victor Miura29b7ea3d2017-12-19 20:23:59532 gpu::raster::RasterInterface* ri = scoped_context.RasterInterface();
Victor Miura3a4ad4f82017-12-13 06:03:45533 DCHECK(ri);
sunnyps5d6ff0d02016-06-28 00:40:11534
Aaron Krajeski6392a86f2019-07-18 13:40:47535 const bool measure_raster_metric = bernoulli_distribution_(random_generator_);
Khushale898b992018-10-19 22:25:16536
Adrienne Walker436a7752017-08-28 23:33:09537 gfx::Rect playback_rect = raster_full_rect;
538 if (resource_has_previous_content) {
539 playback_rect.Intersect(raster_dirty_rect);
540 }
541 DCHECK(!playback_rect.IsEmpty())
542 << "Why are we rastering a tile that's not dirty?";
543
544 // Log a histogram of the percentage of pixels that were saved due to
545 // partial raster.
546 const char* client_name = GetClientNameForMetrics();
547 float full_rect_size = raster_full_rect.size().GetArea();
548 if (full_rect_size > 0 && client_name) {
549 float fraction_partial_rastered =
550 static_cast<float>(playback_rect.size().GetArea()) / full_rect_size;
551 float fraction_saved = 1.0f - fraction_partial_rastered;
552 UMA_HISTOGRAM_PERCENTAGE(
553 base::StringPrintf("Renderer4.%s.PartialRasterPercentageSaved.Gpu",
554 client_name),
555 100.0f * fraction_saved);
556 }
557
Khushale898b992018-10-19 22:25:16558 if (measure_raster_metric) {
Andres Calderon Jaramillo01d329552019-08-23 17:14:30559#if defined(OS_CHROMEOS)
560 // Use a query to detect when the GPU side is ready to start issuing raster
561 // work to the driver. We will use the resulting timestamp to measure raster
562 // scheduling delay. We only care about this in Chrome OS and when OOP-R is
563 // enabled because we will use this timestamp to measure raster scheduling
564 // delay and we only need to collect that data to assess the impact of
565 // hardware acceleration of image decodes which work only in Chrome OS with
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05566 // OOP-R. Furthermore, we don't count raster work that depends on at-raster
567 // image decodes. This is because we want the delay to always include
568 // image decoding and uploading time, and at-raster decodes should be
569 // relatively rare.
570 if (enable_oop_rasterization_ && !depends_on_at_raster_decodes) {
Andres Calderon Jaramillo01d329552019-08-23 17:14:30571 ri->GenQueriesEXT(1, &query->raster_start_query_id);
572 DCHECK_GT(query->raster_start_query_id, 0u);
573 ri->QueryCounterEXT(query->raster_start_query_id,
574 GL_COMMANDS_ISSUED_TIMESTAMP_CHROMIUM);
575 }
576#endif
Andres Calderon Jaramillob805fa82019-08-24 05:58:19577
578 // Use a query to time the GPU side work for rasterizing this tile.
579 ri->GenQueriesEXT(1, &query->raster_duration_query_id);
580 DCHECK_GT(query->raster_duration_query_id, 0u);
581 ri->BeginQueryEXT(GL_COMMANDS_ISSUED_CHROMIUM,
582 query->raster_duration_query_id);
Khushale898b992018-10-19 22:25:16583 }
Khushalcd8fbb772018-10-16 22:46:14584
585 {
Khushale898b992018-10-19 22:25:16586 base::Optional<base::ElapsedTimer> timer;
587 if (measure_raster_metric)
588 timer.emplace();
Khushalcd8fbb772018-10-16 22:46:14589 if (enable_oop_rasterization_) {
Khushalec3ba5dc2019-11-04 22:30:21590 RasterizeSourceOOP(
591 raster_source, resource_has_previous_content, mailbox, sync_token,
592 texture_target, texture_is_overlay_candidate, resource_size,
593 resource_format, color_space, raster_full_rect, playback_rect,
594 transform, playback_settings, worker_context_provider_);
Khushalcd8fbb772018-10-16 22:46:14595 } else {
596 RasterizeSource(raster_source, resource_has_previous_content, mailbox,
597 sync_token, texture_target, texture_is_overlay_candidate,
598 resource_size, resource_format, color_space,
599 raster_full_rect, playback_rect, transform,
600 playback_settings, worker_context_provider_,
Khushalcd8fbb772018-10-16 22:46:14601 ShouldUnpremultiplyAndDitherResource(resource_format),
602 max_tile_size_);
603 }
Andres Calderon Jaramillo01d329552019-08-23 17:14:30604 if (measure_raster_metric) {
605 query->worker_raster_duration = timer->Elapsed();
606 ri->EndQueryEXT(GL_COMMANDS_ISSUED_CHROMIUM);
607 }
Adrienne Walker436a7752017-08-28 23:33:09608 }
sunnyps5d6ff0d02016-06-28 00:40:11609
Sunny Sachanandani0cb875e2017-08-15 02:54:52610 // Generate sync token for cross context synchronization.
danakj57baa772018-05-29 15:59:14611 return viz::ClientResourceProvider::GenerateSyncTokenHelper(ri);
sunnyps5d6ff0d02016-06-28 00:40:11612}
prashant.nb4d4f492016-04-29 12:51:28613
Eric Karla6ff8862018-04-16 20:21:06614bool GpuRasterBufferProvider::ShouldUnpremultiplyAndDitherResource(
615 viz::ResourceFormat format) const {
616 switch (format) {
617 case viz::RGBA_4444:
618 return unpremultiply_and_dither_low_bit_depth_tiles_;
619 default:
620 return false;
621 }
622}
623
Khushalcd8fbb772018-10-16 22:46:14624#define UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(name, total_time) \
625 UMA_HISTOGRAM_CUSTOM_MICROSECONDS_TIMES( \
626 name, total_time, base::TimeDelta::FromMicroseconds(1), \
627 base::TimeDelta::FromMilliseconds(100), 100);
628
629bool GpuRasterBufferProvider::CheckRasterFinishedQueries() {
Khushal5d4e0962018-10-18 18:04:07630 base::AutoLock hold(pending_raster_queries_lock_);
631 if (pending_raster_queries_.empty())
632 return false;
633
Khushalcd8fbb772018-10-16 22:46:14634 viz::RasterContextProvider::ScopedRasterContextLock scoped_context(
635 worker_context_provider_);
636 auto* ri = scoped_context.RasterInterface();
637
638 auto it = pending_raster_queries_.begin();
639 while (it != pending_raster_queries_.end()) {
Andres Calderon Jaramillo01d329552019-08-23 17:14:30640 GLuint complete = 0;
641 ri->GetQueryObjectuivEXT(it->raster_duration_query_id,
Khushalcd8fbb772018-10-16 22:46:14642 GL_QUERY_RESULT_AVAILABLE_NO_FLUSH_CHROMIUM_EXT,
643 &complete);
644 if (!complete)
645 break;
646
Andres Calderon Jaramillo01d329552019-08-23 17:14:30647#if DCHECK_IS_ON()
648 if (it->raster_start_query_id) {
649 // We issued the GL_COMMANDS_ISSUED_TIMESTAMP_CHROMIUM query prior to the
650 // GL_COMMANDS_ISSUED_CHROMIUM query. Therefore, if the result of the
651 // latter is available, the result of the former should be too.
652 complete = 0;
653 ri->GetQueryObjectuivEXT(it->raster_start_query_id,
654 GL_QUERY_RESULT_AVAILABLE_NO_FLUSH_CHROMIUM_EXT,
655 &complete);
656 DCHECK(complete);
657 }
658#endif
Khushalcd8fbb772018-10-16 22:46:14659
Andres Calderon Jaramillo01d329552019-08-23 17:14:30660 GLuint gpu_raster_duration = 0u;
661 ri->GetQueryObjectuivEXT(it->raster_duration_query_id, GL_QUERY_RESULT_EXT,
662 &gpu_raster_duration);
663 ri->DeleteQueriesEXT(1, &it->raster_duration_query_id);
664
665 base::TimeDelta raster_duration =
666 it->worker_raster_duration +
667 base::TimeDelta::FromMicroseconds(gpu_raster_duration);
Khushalcd8fbb772018-10-16 22:46:14668
669 // It is safe to use the UMA macros here with runtime generated strings
670 // because the client name should be initialized once in the process, before
671 // recording any metrics here.
672 const char* client_name = GetClientNameForMetrics();
Andres Calderon Jaramillo01d329552019-08-23 17:14:30673
674 if (it->raster_start_query_id) {
675 GLuint64 gpu_raster_start_time = 0u;
676 ri->GetQueryObjectui64vEXT(it->raster_start_query_id, GL_QUERY_RESULT_EXT,
677 &gpu_raster_start_time);
678 ri->DeleteQueriesEXT(1, &it->raster_start_query_id);
679
680 // The base::checked_cast<int64_t> should not crash as long as the GPU
681 // process was not compromised: that's because the result of the query
682 // should have been generated using base::TimeDelta::InMicroseconds()
683 // there, so the result should fit in an int64_t.
684 base::TimeDelta raster_scheduling_delay =
685 base::TimeDelta::FromMicroseconds(
686 base::checked_cast<int64_t>(gpu_raster_start_time)) -
687 it->raster_buffer_creation_time.since_origin();
688
689 // We expect the clock we're using to be monotonic, so we shouldn't get a
690 // negative scheduling delay.
691 DCHECK_GE(raster_scheduling_delay.InMicroseconds(), 0u);
692 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
Andres Calderon Jaramillob6b26dc2019-11-25 21:24:05693 base::StringPrintf(
694 "Renderer4.%s.RasterTaskSchedulingDelayNoAtRasterDecodes.All",
695 client_name),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30696 raster_scheduling_delay);
Andres Calderon Jaramillo5057f232019-11-29 23:05:48697 if (it->depends_on_hardware_accelerated_jpeg_candidates) {
698 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
699 base::StringPrintf(
700 "Renderer4.%s.RasterTaskSchedulingDelayNoAtRasterDecodes."
701 "TilesWithJpegHwDecodeCandidates",
702 client_name),
703 raster_scheduling_delay);
704 }
705 if (it->depends_on_hardware_accelerated_webp_candidates) {
706 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
707 base::StringPrintf(
708 "Renderer4.%s.RasterTaskSchedulingDelayNoAtRasterDecodes."
709 "TilesWithWebPHwDecodeCandidates",
710 client_name),
711 raster_scheduling_delay);
712 }
Andres Calderon Jaramillo01d329552019-08-23 17:14:30713 }
714
Khushalcd8fbb772018-10-16 22:46:14715 if (enable_oop_rasterization_) {
716 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
717 base::StringPrintf("Renderer4.%s.RasterTaskTotalDuration.Oop",
718 client_name),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30719 raster_duration);
Khushalcd8fbb772018-10-16 22:46:14720 } else {
721 UMA_HISTOGRAM_RASTER_TIME_CUSTOM_MICROSECONDS(
722 base::StringPrintf("Renderer4.%s.RasterTaskTotalDuration.Gpu",
723 client_name),
Andres Calderon Jaramillo01d329552019-08-23 17:14:30724 raster_duration);
Khushalcd8fbb772018-10-16 22:46:14725 }
726
727 it = pending_raster_queries_.erase(it);
728 }
729
730 return pending_raster_queries_.size() > 0u;
731}
732
reveman@chromium.orgb5641b92014-02-15 14:21:58733} // namespace cc