Add decoding support for HEVC range extension profile on Windows.
This enables range extension support for 420/422/444 8-12 bits on Intel
platforms that support HW decoding of those HEVC profiles.
Bug: 1345568
Change-Id: I23fcab13056404b1ab4974605442504879826381
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3779371
Reviewed-by: Dan Sanders <sandersd@chromium.org>
Reviewed-by: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: Jeffrey Kardatzke <jkardatzke@google.com>
Commit-Queue: Jianlin Qiu <jianlin.qiu@intel.com>
Cr-Commit-Position: refs/heads/main@{#1029606}
diff --git a/media/base/video_types.cc b/media/base/video_types.cc
index fc60c99..e1d35c4 100644
--- a/media/base/video_types.cc
+++ b/media/base/video_types.cc
@@ -90,6 +90,21 @@
return "";
}
+std::string VideoChromaSamplingToString(VideoChromaSampling chroma_sampling) {
+ switch (chroma_sampling) {
+ case VideoChromaSampling::kUnknown:
+ return "unknown chroma sampling";
+ case VideoChromaSampling::k400:
+ return "4:0:0";
+ case VideoChromaSampling::k420:
+ return "4:2:0";
+ case VideoChromaSampling::k444:
+ return "4:4:4";
+ case VideoChromaSampling::k422:
+ return "4:2:2";
+ }
+}
+
std::ostream& operator<<(std::ostream& os, VideoPixelFormat format) {
os << VideoPixelFormatToString(format);
return os;
diff --git a/media/base/video_types.h b/media/base/video_types.h
index 519502bb..bb99f60 100644
--- a/media/base/video_types.h
+++ b/media/base/video_types.h
@@ -93,6 +93,19 @@
PIXEL_FORMAT_YUV444AP10, // Must always be equal to largest entry logged.
};
+// Chroma sampling formats
+enum class VideoChromaSampling : uint8_t {
+ kUnknown = 0,
+ k420, // 4:2:0 chroma channel has 1/2 height/width of luma channel.
+ k422, // 4:2:2 chroma channel has same height & 1/2 width of luma channel.
+ k444, // 4:4:4 chroma channel has same height/width of luma channel.
+ k400, // 4:0:0 monochrome without chroma subsampling..
+};
+
+// Return the name of chroma sampling format as a string.
+MEDIA_SHMEM_EXPORT std::string VideoChromaSamplingToString(
+ VideoChromaSampling chroma_sampling);
+
// Returns the name of a Format as a string.
MEDIA_SHMEM_EXPORT std::string VideoPixelFormatToString(
VideoPixelFormat format);
diff --git a/media/gpu/accelerated_video_decoder.h b/media/gpu/accelerated_video_decoder.h
index 6156931b..5cf9e48d 100644
--- a/media/gpu/accelerated_video_decoder.h
+++ b/media/gpu/accelerated_video_decoder.h
@@ -10,6 +10,7 @@
#include "media/base/decoder_buffer.h"
#include "media/base/video_codecs.h"
+#include "media/base/video_types.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
@@ -72,14 +73,16 @@
// we need a new set of them, or when an error occurs.
[[nodiscard]] virtual DecodeResult Decode() = 0;
- // Return dimensions/visible rectangle/profile/bit depth/required number of
- // pictures that client should be ready to provide for the decoder to function
- // properly (of which up to GetNumReferenceFrames() might be needed for
- // internal decoding). To be used after Decode() returns kConfigChange.
+ // Return dimensions/visible rectangle/profile/bit depth/chroma sampling
+ // format/required number of pictures that client should be ready to provide
+ // for the decoder to function properly (of which up to
+ // GetNumReferenceFrames() might be needed for internal decoding). To be used
+ // after Decode() returns kConfigChange.
virtual gfx::Size GetPicSize() const = 0;
virtual gfx::Rect GetVisibleRect() const = 0;
virtual VideoCodecProfile GetProfile() const = 0;
virtual uint8_t GetBitDepth() const = 0;
+ virtual VideoChromaSampling GetChromaSampling() const = 0;
virtual size_t GetRequiredNumOfPictures() const = 0;
virtual size_t GetNumReferenceFrames() const = 0;
diff --git a/media/gpu/av1_decoder.cc b/media/gpu/av1_decoder.cc
index cc4eb3e..d94e8f6b 100644
--- a/media/gpu/av1_decoder.cc
+++ b/media/gpu/av1_decoder.cc
@@ -510,6 +510,12 @@
return bit_depth_;
}
+VideoChromaSampling AV1Decoder::GetChromaSampling() const {
+ // AV1 decoder does not rely on chroma sampling format for creating or
+ // reconfiguring decoder, so return an unknown format.
+ return VideoChromaSampling::kUnknown;
+}
+
size_t AV1Decoder::GetRequiredNumOfPictures() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
diff --git a/media/gpu/av1_decoder.h b/media/gpu/av1_decoder.h
index beade9c..52b6c38 100644
--- a/media/gpu/av1_decoder.h
+++ b/media/gpu/av1_decoder.h
@@ -124,6 +124,7 @@
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
uint8_t GetBitDepth() const override;
+ VideoChromaSampling GetChromaSampling() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
diff --git a/media/gpu/h264_decoder.cc b/media/gpu/h264_decoder.cc
index 4754a1e..348232c 100644
--- a/media/gpu/h264_decoder.cc
+++ b/media/gpu/h264_decoder.cc
@@ -1668,6 +1668,12 @@
return bit_depth_;
}
+VideoChromaSampling H264Decoder::GetChromaSampling() const {
+ // H264 decoder does not rely on chroma sampling format for creating
+ // or reconfiguring decoder, so return an unknown format.
+ return VideoChromaSampling::kUnknown;
+}
+
size_t H264Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return GetNumReferenceFrames() + kPicsInPipeline;
diff --git a/media/gpu/h264_decoder.h b/media/gpu/h264_decoder.h
index 716e8a1..3262065 100644
--- a/media/gpu/h264_decoder.h
+++ b/media/gpu/h264_decoder.h
@@ -191,6 +191,7 @@
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
uint8_t GetBitDepth() const override;
+ VideoChromaSampling GetChromaSampling() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
diff --git a/media/gpu/h265_decoder.cc b/media/gpu/h265_decoder.cc
index 4280458..52aff20 100644
--- a/media/gpu/h265_decoder.cc
+++ b/media/gpu/h265_decoder.cc
@@ -72,11 +72,6 @@
return false;
}
}
-
-bool IsYUV420Sequence(const H265SPS& sps) {
- // Spec 6.2
- return sps.chroma_format_idc == 1;
-}
} // namespace
H265Decoder::H265Accelerator::H265Accelerator() = default;
@@ -392,6 +387,10 @@
return bit_depth_;
}
+VideoChromaSampling H265Decoder::GetChromaSampling() const {
+ return chroma_sampling_;
+}
+
size_t H265Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return GetNumReferenceFrames() + kPicsInPipeline;
@@ -422,7 +421,9 @@
DVLOG(2) << "New visible rect: " << new_visible_rect.ToString();
visible_rect_ = new_visible_rect;
}
- if (!IsYUV420Sequence(*sps)) {
+
+ VideoChromaSampling new_chroma_sampling = sps->GetChromaSampling();
+ if (!accelerator_->IsChromaSamplingSupported(new_chroma_sampling)) {
DVLOG(1) << "Only YUV 4:2:0 is supported";
return false;
}
@@ -441,18 +442,23 @@
<< ", profile=" << GetProfileName(new_profile);
return false;
}
+
if (pic_size_ != new_pic_size || dpb_.max_num_pics() != sps->max_dpb_size ||
- profile_ != new_profile || bit_depth_ != new_bit_depth) {
+ profile_ != new_profile || bit_depth_ != new_bit_depth ||
+ chroma_sampling_ != new_chroma_sampling) {
if (!Flush())
return false;
DVLOG(1) << "Codec profile: " << GetProfileName(new_profile)
<< ", level(x30): " << sps->profile_tier_level.general_level_idc
<< ", DPB size: " << sps->max_dpb_size
<< ", Picture size: " << new_pic_size.ToString()
- << ", bit_depth: " << base::strict_cast<int>(new_bit_depth);
+ << ", bit_depth: " << base::strict_cast<int>(new_bit_depth)
+ << ", chroma_sampling_format: "
+ << VideoChromaSamplingToString(new_chroma_sampling);
profile_ = new_profile;
bit_depth_ = new_bit_depth;
pic_size_ = new_pic_size;
+ chroma_sampling_ = new_chroma_sampling;
dpb_.set_max_num_pics(sps->max_dpb_size);
if (need_new_buffers)
*need_new_buffers = true;
diff --git a/media/gpu/h265_decoder.h b/media/gpu/h265_decoder.h
index 7a1bce2..973b1301 100644
--- a/media/gpu/h265_decoder.h
+++ b/media/gpu/h265_decoder.h
@@ -150,6 +150,10 @@
// kNotSupported.
virtual Status SetStream(base::span<const uint8_t> stream,
const DecryptConfig* decrypt_config);
+
+ // Indicates whether the accelerator supports bitstreams with
+ // specific chroma subsampling format.
+ virtual bool IsChromaSamplingSupported(VideoChromaSampling format) = 0;
};
H265Decoder(std::unique_ptr<H265Accelerator> accelerator,
@@ -170,6 +174,7 @@
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
uint8_t GetBitDepth() const override;
+ VideoChromaSampling GetChromaSampling() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
@@ -329,6 +334,8 @@
VideoCodecProfile profile_;
// Bit depth of input bitstream.
uint8_t bit_depth_ = 0;
+ // Chroma sampling format of input bitstream
+ VideoChromaSampling chroma_sampling_ = VideoChromaSampling::kUnknown;
const std::unique_ptr<H265Accelerator> accelerator_;
};
diff --git a/media/gpu/h265_decoder_fuzzertest.cc b/media/gpu/h265_decoder_fuzzertest.cc
index 2d3d9ac..fd25e61 100644
--- a/media/gpu/h265_decoder_fuzzertest.cc
+++ b/media/gpu/h265_decoder_fuzzertest.cc
@@ -7,6 +7,7 @@
#include "base/numerics/safe_conversions.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_codecs.h"
+#include "media/base/video_types.h"
#include "media/gpu/h265_decoder.h"
namespace {
@@ -58,6 +59,9 @@
const media::DecryptConfig* decrypt_config) override {
return Status::kOk;
}
+ bool IsChromaSamplingSupported(media::VideoChromaSampling format) override {
+ return format == media::VideoChromaSampling::k420;
+ }
};
} // namespace
diff --git a/media/gpu/h265_decoder_unittest.cc b/media/gpu/h265_decoder_unittest.cc
index 60b0e5e..3660b9b 100644
--- a/media/gpu/h265_decoder_unittest.cc
+++ b/media/gpu/h265_decoder_unittest.cc
@@ -118,7 +118,9 @@
MOCK_METHOD2(SetStream,
Status(base::span<const uint8_t> stream,
const DecryptConfig* decrypt_config));
-
+ bool IsChromaSamplingSupported(VideoChromaSampling format) override {
+ return format == VideoChromaSampling::k420;
+ }
void Reset() override {}
};
diff --git a/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc b/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
index c923042..ed28370 100644
--- a/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
+++ b/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
@@ -59,6 +59,11 @@
return new VaapiH265Picture(std::move(va_surface));
}
+bool H265VaapiVideoDecoderDelegate::IsChromaSamplingSupported(
+ VideoChromaSampling chroma_sampling) {
+ return chroma_sampling == VideoChromaSampling::k420;
+}
+
DecodeStatus H265VaapiVideoDecoderDelegate::SubmitFrameMetadata(
const H265SPS* sps,
const H265PPS* pps,
diff --git a/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h b/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
index 09adc1d0..9c699998 100644
--- a/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
+++ b/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
@@ -62,6 +62,7 @@
void Reset() override;
Status SetStream(base::span<const uint8_t> stream,
const DecryptConfig* decrypt_config) override;
+ bool IsChromaSamplingSupported(VideoChromaSampling chroma_sampling) override;
private:
void FillVAPicture(VAPictureHEVC* va_pic, scoped_refptr<H265Picture> pic);
diff --git a/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index b44f2e3..9f7c6900 100644
--- a/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -64,6 +64,7 @@
MOCK_CONST_METHOD0(GetPicSize, gfx::Size());
MOCK_CONST_METHOD0(GetProfile, VideoCodecProfile());
MOCK_CONST_METHOD0(GetBitDepth, uint8_t());
+ MOCK_CONST_METHOD0(GetChromaSampling, VideoChromaSampling());
MOCK_CONST_METHOD0(GetVisibleRect, gfx::Rect());
MOCK_CONST_METHOD0(GetRequiredNumOfPictures, size_t());
MOCK_CONST_METHOD0(GetNumReferenceFrames, size_t());
diff --git a/media/gpu/vp8_decoder.cc b/media/gpu/vp8_decoder.cc
index 94e908e..c499357f 100644
--- a/media/gpu/vp8_decoder.cc
+++ b/media/gpu/vp8_decoder.cc
@@ -186,6 +186,12 @@
return 8u;
}
+VideoChromaSampling VP8Decoder::GetChromaSampling() const {
+ // VP8 decoder currently does not rely on chroma sampling format for
+ // creating/reconfiguring decoder, so return an unknown format.
+ return VideoChromaSampling::kUnknown;
+}
+
size_t VP8Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return kNumVp8ReferenceBuffers + kPicsInPipeline;
diff --git a/media/gpu/vp8_decoder.h b/media/gpu/vp8_decoder.h
index 8b87e6b..a1acdcb 100644
--- a/media/gpu/vp8_decoder.h
+++ b/media/gpu/vp8_decoder.h
@@ -77,6 +77,7 @@
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
uint8_t GetBitDepth() const override;
+ VideoChromaSampling GetChromaSampling() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
diff --git a/media/gpu/vp9_decoder.cc b/media/gpu/vp9_decoder.cc
index f396e03..60328ee 100644
--- a/media/gpu/vp9_decoder.cc
+++ b/media/gpu/vp9_decoder.cc
@@ -408,6 +408,12 @@
return bit_depth_;
}
+VideoChromaSampling VP9Decoder::GetChromaSampling() const {
+ // VP9 decoder currently does not rely on chroma sampling format for
+ // creating/reconfiguring decoder, so return an unknown format.
+ return VideoChromaSampling::kUnknown;
+}
+
size_t VP9Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return kPicsInPipeline + GetNumReferenceFrames();
diff --git a/media/gpu/vp9_decoder.h b/media/gpu/vp9_decoder.h
index 4cbfdec..da1898a 100644
--- a/media/gpu/vp9_decoder.h
+++ b/media/gpu/vp9_decoder.h
@@ -134,6 +134,7 @@
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
uint8_t GetBitDepth() const override;
+ VideoChromaSampling GetChromaSampling() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
diff --git a/media/gpu/windows/d3d11_decoder_configurator.cc b/media/gpu/windows/d3d11_decoder_configurator.cc
index 2fcc930..6151962 100644
--- a/media/gpu/windows/d3d11_decoder_configurator.cc
+++ b/media/gpu/windows/d3d11_decoder_configurator.cc
@@ -15,6 +15,7 @@
#include "media/gpu/windows/av1_guids.h"
#include "media/gpu/windows/d3d11_copying_texture_wrapper.h"
#include "media/gpu/windows/d3d11_status.h"
+#include "media/gpu/windows/supported_profile_helpers.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gl/direct_composition_support.h"
@@ -40,6 +41,7 @@
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config,
uint8_t bit_depth,
+ VideoChromaSampling chroma_sampling,
MediaLog* media_log,
bool use_shared_handle) {
// Decoder swap chains do not support shared resources. More info in
@@ -47,8 +49,24 @@
// handle, we disable decode swap chain support if shared handle is enabled.
const bool supports_nv12_decode_swap_chain =
gl::DirectCompositionDecodeSwapChainSupported() && !use_shared_handle;
- const auto decoder_dxgi_format =
- bit_depth == 8 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010;
+
+ DXGI_FORMAT decoder_dxgi_format = DXGI_FORMAT_UNKNOWN;
+ // Assume YUV420 format.
+ switch (bit_depth) {
+ case 8:
+ decoder_dxgi_format = DXGI_FORMAT_NV12;
+ break;
+ case 10:
+ decoder_dxgi_format = DXGI_FORMAT_P010;
+ break;
+ case 12:
+ decoder_dxgi_format = DXGI_FORMAT_P016;
+ break;
+ default:
+ NOTREACHED() << "Unsupported bit depth: " << bit_depth;
+ return nullptr;
+ }
+
GUID decoder_guid = {};
if (config.codec() == VideoCodec::kH264) {
decoder_guid = D3D11_DECODER_PROFILE_H264_VLD_NOFGT;
@@ -68,6 +86,48 @@
decoder_guid = D3D11_DECODER_PROFILE_HEVC_VLD_MAIN;
} else if (config.profile() == HEVCPROFILE_MAIN10) {
decoder_guid = D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10;
+ } else if (config.profile() == HEVCPROFILE_REXT) {
+ // TODO(crbug.com/1345568): Enable 8-bit 444 decoding when AYUV
+ // is added into video pixel format histogram enumerations.
+ if (bit_depth == 8) {
+ if (chroma_sampling == VideoChromaSampling::k420) {
+ decoder_guid = DXVA_ModeHEVC_VLD_Main_Intel;
+ decoder_dxgi_format = DXGI_FORMAT_NV12;
+ } else {
+ MEDIA_LOG(INFO, media_log)
+ << "D3D11VideoDecoder does not support HEVC range extension "
+ << config.codec() << " with chroma subsampling format "
+ << VideoChromaSamplingToString(chroma_sampling) << " and bit depth "
+ << base::strict_cast<int>(bit_depth);
+ return nullptr;
+ }
+ } else if (bit_depth == 10) {
+ if (chroma_sampling == VideoChromaSampling::k420) {
+ decoder_guid = DXVA_ModeHEVC_VLD_Main10_Intel;
+ decoder_dxgi_format = DXGI_FORMAT_P010;
+ } else if (chroma_sampling == VideoChromaSampling::k422) {
+ decoder_guid = DXVA_ModeHEVC_VLD_Main422_10_Intel;
+ decoder_dxgi_format = DXGI_FORMAT_Y210;
+ } else if (chroma_sampling == VideoChromaSampling::k444) {
+ decoder_guid = DXVA_ModeHEVC_VLD_Main444_10_Intel;
+ decoder_dxgi_format = DXGI_FORMAT_Y410;
+ }
+ } else if (bit_depth == 12) {
+ // TODO(crbug.com/1345568): Enable 12-bit 422/444 decoding.
+ // 12-bit decoding with 422 & 444 format does not work well
+ // on Intel platforms.
+ if (chroma_sampling == VideoChromaSampling::k420) {
+ decoder_guid = DXVA_ModeHEVC_VLD_Main12_Intel;
+ decoder_dxgi_format = DXGI_FORMAT_P016;
+ } else {
+ MEDIA_LOG(INFO, media_log)
+ << "D3D11VideoDecoder does not support HEVC range extension "
+ << config.codec() << " with chroma subsampling format "
+ << VideoChromaSamplingToString(chroma_sampling) << " and bit depth "
+ << base::strict_cast<int>(bit_depth);
+ return nullptr;
+ }
+ }
}
#endif // BUILDFLAG(ENABLE_HEVC_PARSER_AND_HW_DECODER)
else {
@@ -78,7 +138,7 @@
MEDIA_LOG(INFO, media_log)
<< "D3D11VideoDecoder is using " << GetProfileName(config.profile())
- << " / " << (decoder_dxgi_format == DXGI_FORMAT_NV12 ? "NV12" : "P010");
+ << " / " << VideoChromaSamplingToString(chroma_sampling);
return std::make_unique<D3D11DecoderConfigurator>(
decoder_dxgi_format, decoder_guid, config.coded_size(),
diff --git a/media/gpu/windows/d3d11_decoder_configurator.h b/media/gpu/windows/d3d11_decoder_configurator.h
index 8e5adfc..7814bf5 100644
--- a/media/gpu/windows/d3d11_decoder_configurator.h
+++ b/media/gpu/windows/d3d11_decoder_configurator.h
@@ -37,6 +37,7 @@
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config,
uint8_t bit_depth,
+ VideoChromaSampling chroma_sampling,
MediaLog* media_log,
bool use_shared_handle);
diff --git a/media/gpu/windows/d3d11_decoder_configurator_unittest.cc b/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
index bf56ca1..a75f6e12 100644
--- a/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
+++ b/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
@@ -43,10 +43,11 @@
prefs.enable_zero_copy_dxgi_video = zero_copy_enabled;
gpu::GpuDriverBugWorkarounds workarounds;
workarounds.disable_dxgi_zero_copy_video = false;
+ VideoChromaSampling chroma_sampling = VideoChromaSampling::k420;
auto media_log = std::make_unique<NullMediaLog>();
- return D3D11DecoderConfigurator::Create(prefs, workarounds, config,
- bit_depth, media_log.get(),
- false /*use_shared_handle*/);
+ return D3D11DecoderConfigurator::Create(
+ prefs, workarounds, config, bit_depth, chroma_sampling, media_log.get(),
+ false /*use_shared_handle*/);
}
};
diff --git a/media/gpu/windows/d3d11_h265_accelerator.cc b/media/gpu/windows/d3d11_h265_accelerator.cc
index 3a20e9a..d0ba121 100644
--- a/media/gpu/windows/d3d11_h265_accelerator.cc
+++ b/media/gpu/windows/d3d11_h265_accelerator.cc
@@ -92,6 +92,13 @@
return base::MakeRefCounted<D3D11H265Picture>(picture);
}
+bool D3D11H265Accelerator::IsChromaSamplingSupported(
+ VideoChromaSampling chroma_sampling) {
+ return chroma_sampling == VideoChromaSampling::k420 ||
+ chroma_sampling == VideoChromaSampling::k422 ||
+ chroma_sampling == VideoChromaSampling::k444;
+}
+
DecoderStatus D3D11H265Accelerator::SubmitFrameMetadata(
const H265SPS* sps,
const H265PPS* pps,
@@ -191,58 +198,59 @@
}
void D3D11H265Accelerator::FillPicParamsWithConstants(
- DXVA_PicParams_HEVC* pic) {
+ DXVA_PicParams_HEVC_Rext* pic) {
// According to DXVA spec section 2.2, this optional 1-bit flag
// has no meaning when used for CurrPic so always configure to 0.
- pic->CurrPic.AssociatedFlag = 0;
+ pic->main.CurrPic.AssociatedFlag = 0;
// num_tile_columns_minus1 and num_tile_rows_minus1 will only
// be set if tiles are enabled. Set to 0 by default.
- pic->num_tile_columns_minus1 = 0;
- pic->num_tile_rows_minus1 = 0;
+ pic->main.num_tile_columns_minus1 = 0;
+ pic->main.num_tile_rows_minus1 = 0;
// Host decoder may set this to 1 if sps_max_num_reorder_pics is 0,
// but there is no requirement that NoPicReorderingFlag must be
// derived from it. So we always set it to 0 here.
- pic->NoPicReorderingFlag = 0;
+ pic->main.NoPicReorderingFlag = 0;
// Must be set to 0 in absence of indication whether B slices are used
// or not, and it does not affect the decoding process.
- pic->NoBiPredFlag = 0;
+ pic->main.NoBiPredFlag = 0;
// Shall be set to 0 and accelerators shall ignore its value.
- pic->ReservedBits1 = 0;
+ pic->main.ReservedBits1 = 0;
// Bit field added to enable DWORD alignment and should be set to 0.
- pic->ReservedBits2 = 0;
+ pic->main.ReservedBits2 = 0;
// Should always be set to 0.
- pic->ReservedBits3 = 0;
+ pic->main.ReservedBits3 = 0;
// Should be set to 0 and ignored by accelerators
- pic->ReservedBits4 = 0;
+ pic->main.ReservedBits4 = 0;
// Should always be set to 0.
- pic->ReservedBits5 = 0;
+ pic->main.ReservedBits5 = 0;
// Should always be set to 0.
- pic->ReservedBits6 = 0;
+ pic->main.ReservedBits6 = 0;
// Should always be set to 0.
- pic->ReservedBits7 = 0;
+ pic->main.ReservedBits7 = 0;
}
#define ARG_SEL(_1, _2, NAME, ...) NAME
-#define SPS_TO_PP1(a) pic_param->a = sps->a;
-#define SPS_TO_PP2(a, b) pic_param->a = sps->b;
+#define SPS_TO_PP1(a) (pic_param->main).a = sps->a;
+#define SPS_TO_PPEXT(a) pic_param->a = sps->a;
+#define SPS_TO_PP2(a, b) (pic_param->main).a = sps->b;
#define SPS_TO_PP(...) ARG_SEL(__VA_ARGS__, SPS_TO_PP2, SPS_TO_PP1)(__VA_ARGS__)
-void D3D11H265Accelerator::PicParamsFromSPS(DXVA_PicParams_HEVC* pic_param,
+void D3D11H265Accelerator::PicParamsFromSPS(DXVA_PicParams_HEVC_Rext* pic_param,
const H265SPS* sps) {
// Refer to formula 7-14 and 7-16 of HEVC spec.
int min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3;
- pic_param->PicWidthInMinCbsY =
+ (pic_param->main).PicWidthInMinCbsY =
sps->pic_width_in_luma_samples >> min_cb_log2_size_y;
- pic_param->PicHeightInMinCbsY =
+ (pic_param->main).PicHeightInMinCbsY =
sps->pic_height_in_luma_samples >> min_cb_log2_size_y;
// wFormatAndSequenceInfoFlags from SPS
SPS_TO_PP(chroma_format_idc);
@@ -251,6 +259,9 @@
SPS_TO_PP(bit_depth_chroma_minus8);
SPS_TO_PP(log2_max_pic_order_cnt_lsb_minus4);
+ if (sps->profile_tier_level.general_profile_idc == 4) {
+ is_rext_ = true;
+ }
// HEVC DXVA spec does not clearly state which slot
// in sps->sps_max_dec_pic_buffering_minus1 should
// be used here. However section A4.1 of HEVC spec
@@ -258,7 +269,7 @@
// indicating the maximum DPB size if level is not
// 8.5.
int highest_tid = sps->sps_max_sub_layers_minus1;
- pic_param->sps_max_dec_pic_buffering_minus1 =
+ (pic_param->main).sps_max_dec_pic_buffering_minus1 =
sps->sps_max_dec_pic_buffering_minus1[highest_tid];
SPS_TO_PP(log2_min_luma_coding_block_size_minus3);
@@ -291,15 +302,29 @@
SPS_TO_PP(long_term_ref_pics_present_flag);
SPS_TO_PP(sps_temporal_mvp_enabled_flag);
SPS_TO_PP(strong_intra_smoothing_enabled_flag);
+
+ if (sps->sps_range_extension_flag) {
+ SPS_TO_PPEXT(transform_skip_rotation_enabled_flag);
+ SPS_TO_PPEXT(transform_skip_context_enabled_flag);
+ SPS_TO_PPEXT(implicit_rdpcm_enabled_flag);
+ SPS_TO_PPEXT(explicit_rdpcm_enabled_flag);
+ SPS_TO_PPEXT(extended_precision_processing_flag);
+ SPS_TO_PPEXT(intra_smoothing_disabled_flag);
+ SPS_TO_PPEXT(high_precision_offsets_enabled_flag);
+ SPS_TO_PPEXT(persistent_rice_adaptation_enabled_flag);
+ SPS_TO_PPEXT(cabac_bypass_alignment_enabled_flag);
+ }
}
#undef SPS_TO_PP
+#undef SPS_TO_PPEXT
#undef SPS_TO_PP2
#undef SPS_TO_PP1
-#define PPS_TO_PP1(a) pic_param->a = pps->a;
-#define PPS_TO_PP2(a, b) pic_param->a = pps->b;
+#define PPS_TO_PPEXT(a) pic_param->a = pps->a;
+#define PPS_TO_PP1(a) (pic_param->main).a = pps->a;
+#define PPS_TO_PP2(a, b) (pic_param->main).a = pps->b;
#define PPS_TO_PP(...) ARG_SEL(__VA_ARGS__, PPS_TO_PP2, PPS_TO_PP1)(__VA_ARGS__)
-void D3D11H265Accelerator::PicParamsFromPPS(DXVA_PicParams_HEVC* pic_param,
+void D3D11H265Accelerator::PicParamsFromPPS(DXVA_PicParams_HEVC_Rext* pic_param,
const H265PPS* pps) {
PPS_TO_PP(num_ref_idx_l0_default_active_minus1);
PPS_TO_PP(num_ref_idx_l1_default_active_minus1);
@@ -350,57 +375,75 @@
PPS_TO_PP(pps_tc_offset_div2);
PPS_TO_PP(log2_parallel_merge_level_minus2);
+ if (pps->pps_range_extension_flag) {
+ PPS_TO_PPEXT(cross_component_prediction_enabled_flag);
+ PPS_TO_PPEXT(chroma_qp_offset_list_enabled_flag);
+ if (pps->chroma_qp_offset_list_enabled_flag) {
+ PPS_TO_PPEXT(diff_cu_chroma_qp_offset_depth);
+ PPS_TO_PPEXT(chroma_qp_offset_list_len_minus1);
+ for (int i = 0; i <= pps->chroma_qp_offset_list_len_minus1; i++) {
+ PPS_TO_PPEXT(cb_qp_offset_list[i]);
+ PPS_TO_PPEXT(cr_qp_offset_list[i]);
+ }
+ }
+ PPS_TO_PPEXT(log2_sao_offset_scale_luma);
+ PPS_TO_PPEXT(log2_sao_offset_scale_chroma);
+ if (pps->transform_skip_enabled_flag) {
+ PPS_TO_PPEXT(log2_max_transform_skip_block_size_minus2);
+ }
+ }
return;
}
+#undef PPS_TO_PPEXT
#undef PPS_TO_PP
#undef PPS_TO_PP2
#undef PPS_TO_PP1
#undef ARG_SEL
void D3D11H265Accelerator::PicParamsFromSliceHeader(
- DXVA_PicParams_HEVC* pic_param,
+ DXVA_PicParams_HEVC_Rext* pic_param,
const H265SPS* sps,
const H265SliceHeader* slice_hdr) {
// IDR_W_RADL and IDR_N_LP NALUs do not contain st_rps in slice header.
// Otherwise if short_term_ref_pic_set_sps_flag is 1, host decoder
// shall set ucNumDeltaPocsOfRefRpsIdx to 0.
if (slice_hdr->short_term_ref_pic_set_sps_flag || !slice_hdr->st_rps_bits) {
- pic_param->ucNumDeltaPocsOfRefRpsIdx = 0;
- pic_param->wNumBitsForShortTermRPSInSlice = 0;
+ pic_param->main.ucNumDeltaPocsOfRefRpsIdx = 0;
+ pic_param->main.wNumBitsForShortTermRPSInSlice = 0;
} else {
- pic_param->ucNumDeltaPocsOfRefRpsIdx =
+ pic_param->main.ucNumDeltaPocsOfRefRpsIdx =
slice_hdr->GetStRefPicSet(sps).num_delta_pocs;
- pic_param->wNumBitsForShortTermRPSInSlice = slice_hdr->st_rps_bits;
+ pic_param->main.wNumBitsForShortTermRPSInSlice = slice_hdr->st_rps_bits;
}
- pic_param->IrapPicFlag = slice_hdr->irap_pic;
+ pic_param->main.IrapPicFlag = slice_hdr->irap_pic;
auto nal_unit_type = slice_hdr->nal_unit_type;
- pic_param->IdrPicFlag = (nal_unit_type == H265NALU::IDR_W_RADL ||
- nal_unit_type == H265NALU::IDR_N_LP);
- pic_param->IntraPicFlag = slice_hdr->irap_pic;
+ pic_param->main.IdrPicFlag = (nal_unit_type == H265NALU::IDR_W_RADL ||
+ nal_unit_type == H265NALU::IDR_N_LP);
+ pic_param->main.IntraPicFlag = slice_hdr->irap_pic;
}
-void D3D11H265Accelerator::PicParamsFromPic(DXVA_PicParams_HEVC* pic_param,
+void D3D11H265Accelerator::PicParamsFromPic(DXVA_PicParams_HEVC_Rext* pic_param,
D3D11H265Picture* pic) {
- pic_param->CurrPicOrderCntVal = pic->pic_order_cnt_val_;
- pic_param->CurrPic.Index7Bits = pic->picture_index_;
+ pic_param->main.CurrPicOrderCntVal = pic->pic_order_cnt_val_;
+ pic_param->main.CurrPic.Index7Bits = pic->picture_index_;
}
bool D3D11H265Accelerator::PicParamsFromRefLists(
- DXVA_PicParams_HEVC* pic_param,
+ DXVA_PicParams_HEVC_Rext* pic_param,
const H265Picture::Vector& ref_pic_set_lt_curr,
const H265Picture::Vector& ref_pic_set_st_curr_after,
const H265Picture::Vector& ref_pic_set_st_curr_before) {
constexpr int kDxvaInvalidRefPicIndex = 0xFF;
constexpr unsigned kStLtRpsSize = 8;
- std::fill_n(pic_param->RefPicSetStCurrBefore, kStLtRpsSize,
+ std::fill_n(pic_param->main.RefPicSetStCurrBefore, kStLtRpsSize,
kDxvaInvalidRefPicIndex);
- std::fill_n(pic_param->RefPicSetStCurrAfter, kStLtRpsSize,
+ std::fill_n(pic_param->main.RefPicSetStCurrAfter, kStLtRpsSize,
kDxvaInvalidRefPicIndex);
- std::fill_n(pic_param->RefPicSetLtCurr, kStLtRpsSize,
+ std::fill_n(pic_param->main.RefPicSetLtCurr, kStLtRpsSize,
kDxvaInvalidRefPicIndex);
std::copy(ref_frame_pocs_, ref_frame_pocs_ + kMaxRefPicListSize - 1,
- pic_param->PicOrderCntValList);
+ pic_param->main.PicOrderCntValList);
size_t idx = 0;
for (auto& it : ref_pic_set_st_curr_before) {
@@ -416,7 +459,7 @@
DLOG(ERROR) << "Invalid RefPicSetStCurrBefore size.";
return false;
}
- pic_param->RefPicSetStCurrBefore[idx++] = poc_index;
+ pic_param->main.RefPicSetStCurrBefore[idx++] = poc_index;
}
idx = 0;
for (auto& it : ref_pic_set_st_curr_after) {
@@ -432,7 +475,7 @@
DLOG(ERROR) << "Invalid RefPicSetStCurrAfter size.";
return false;
}
- pic_param->RefPicSetStCurrAfter[idx++] = poc_index;
+ pic_param->main.RefPicSetStCurrAfter[idx++] = poc_index;
}
idx = 0;
for (auto& it : ref_pic_set_lt_curr) {
@@ -448,7 +491,7 @@
DLOG(ERROR) << "Invalid RefPicSetLtCurr size.";
return false;
}
- pic_param->RefPicSetLtCurr[idx++] = poc_index;
+ pic_param->main.RefPicSetLtCurr[idx++] = poc_index;
}
return true;
@@ -467,7 +510,7 @@
const uint8_t* data,
size_t size,
const std::vector<SubsampleEntry>& subsamples) {
- DXVA_PicParams_HEVC pic_param = {};
+ DXVA_PicParams_HEVC_Rext pic_param = {};
D3D11H265Picture* d3d11_pic = pic->AsD3D11H265Picture();
if (!d3d11_pic) {
@@ -479,7 +522,8 @@
PicParamsFromPPS(&pic_param, pps);
PicParamsFromSliceHeader(&pic_param, sps, slice_hdr);
PicParamsFromPic(&pic_param, d3d11_pic);
- memcpy(pic_param.RefPicList, ref_frame_list_, sizeof pic_param.RefPicList);
+ memcpy(pic_param.main.RefPicList, ref_frame_list_,
+ sizeof pic_param.main.RefPicList);
if (!PicParamsFromRefLists(&pic_param, ref_pic_set_lt_curr,
ref_pic_set_st_curr_after,
@@ -487,7 +531,8 @@
return DecoderStatus::kFail;
}
- pic_param.StatusReportFeedbackNumber = current_status_report_feedback_num_++;
+ pic_param.main.StatusReportFeedbackNumber =
+ current_status_report_feedback_num_++;
UINT buffer_size;
void* buffer;
@@ -679,7 +724,11 @@
VideoContextWrapper::VideoBufferWrapper buffers[4] = {};
buffers[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
buffers[0].DataOffset = 0;
- buffers[0].DataSize = sizeof(DXVA_PicParams_HEVC);
+ if (is_rext_) {
+ buffers[0].DataSize = sizeof(DXVA_PicParams_HEVC_Rext);
+ } else {
+ buffers[0].DataSize = sizeof(DXVA_PicParams_HEVC);
+ }
buffers[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
buffers[1].DataOffset = 0;
buffers[1].DataSize = sizeof(slice_info_[0]) * slice_info_.size();
diff --git a/media/gpu/windows/d3d11_h265_accelerator.h b/media/gpu/windows/d3d11_h265_accelerator.h
index 8397622..f094b9c 100644
--- a/media/gpu/windows/d3d11_h265_accelerator.h
+++ b/media/gpu/windows/d3d11_h265_accelerator.h
@@ -34,6 +34,77 @@
class D3D11H265Accelerator;
class MediaLog;
+// Picture Parameters DXVA buffer struct for Rext/Scc is not specified in DXVA
+// spec. The below structures come from Intel platform DDI definition, so they
+// are currently Intel specific.
+// For NVidia and AMD platforms supporting HEVC Rext & Scc, it is expected
+// the picture param information included in below structures is sufficient
+// for underlying drivers supporting range extension/Scc.
+#pragma pack(push, 1)
+typedef struct {
+ DXVA_PicParams_HEVC main;
+
+ // HEVC Range Extension. Fields are named the same as in HEVC spec.
+ union {
+ struct {
+ UINT32 transform_skip_rotation_enabled_flag : 1;
+ UINT32 transform_skip_context_enabled_flag : 1;
+ UINT32 implicit_rdpcm_enabled_flag : 1;
+ UINT32 explicit_rdpcm_enabled_flag : 1;
+ UINT32 extended_precision_processing_flag : 1;
+ UINT32 intra_smoothing_disabled_flag : 1;
+ UINT32 high_precision_offsets_enabled_flag : 1;
+ UINT32 persistent_rice_adaptation_enabled_flag : 1;
+ UINT32 cabac_bypass_alignment_enabled_flag : 1;
+ UINT32 cross_component_prediction_enabled_flag : 1;
+ UINT32 chroma_qp_offset_list_enabled_flag : 1;
+ // Indicates if luma bit depth equals to 16. If its value is 1, the
+ // corresponding bit_depth_luma_minus8 must be set to 0.
+ UINT32 BitDepthLuma16 : 1;
+ // Indicates if chroma bit depth equals to 16. If its value is 1, the
+ // corresponding bit_depth_chroma_minus8 must be set to 0.
+ UINT32 BitDepthChroma16 : 1;
+ UINT32 ReservedBits8 : 19;
+ };
+ UINT32 dwRangeExtensionFlags;
+ };
+
+ UCHAR diff_cu_chroma_qp_offset_depth; // [0..3]
+ UCHAR chroma_qp_offset_list_len_minus1; // [0..5]
+ UCHAR log2_sao_offset_scale_luma; // [0..6]
+ UCHAR log2_sao_offset_scale_chroma; // [0..6]
+ UCHAR log2_max_transform_skip_block_size_minus2;
+ CHAR cb_qp_offset_list[6]; // [-12..12]
+ CHAR cr_qp_offset_list[6]; // [-12..12]
+} DXVA_PicParams_HEVC_Rext;
+
+typedef struct {
+ DXVA_PicParams_HEVC_Rext main_rext;
+
+ // HEVC Screen Content Coding. Fields are named the same as in HEVC spec.
+ union {
+ struct {
+ UINT32 pps_curr_pic_ref_enabled_flag : 1;
+ UINT32 palette_mode_enabled_flag : 1;
+ UINT32 motion_vector_resolution_control_idc : 2;
+ UINT32 intra_boundary_filtering_disabled_flag : 1;
+ UINT32 residual_adaptive_coloour_transform_enabled_flag : 1;
+ UINT32 pps_slice_act_qp_offsets_present_flag : 1;
+ UINT32 ReservedBits9 : 25;
+ };
+ UINT dwSccExtensionFlags;
+ };
+
+ UCHAR palette_max_size; // [0..64]
+ UCHAR delta_palette_max_predictor_size; // [0..128]
+ UCHAR PredictorPaletteSize; // [0..127]
+ USHORT PredictorPaletteEntries[3][128];
+ CHAR pps_act_y_qp_offset_plus5; // [-7..17]
+ CHAR pps_act_cb_qp_offset_plus5; // [-7..17]
+ CHAR pps_act_cr_qp_offset_plus3; // [-9..15]
+} DXVA_PicParams_HEVC_SCC;
+#pragma pack(pop)
+
class D3D11H265Accelerator : public H265Decoder::H265Accelerator {
public:
D3D11H265Accelerator(D3D11VideoDecoderClient* client,
@@ -68,32 +139,36 @@
Status SubmitDecode(scoped_refptr<H265Picture> pic) override;
void Reset() override;
bool OutputPicture(scoped_refptr<H265Picture> pic) override;
+ bool IsChromaSamplingSupported(VideoChromaSampling chroma_sampling) override;
private:
bool SubmitSliceData();
bool RetrieveBitstreamBuffer();
// Gets a pic params struct with the constant fields set.
- void FillPicParamsWithConstants(DXVA_PicParams_HEVC* pic_param);
+ void FillPicParamsWithConstants(DXVA_PicParams_HEVC_Rext* pic_param);
// Populate the pic params with fields from the SPS structure.
- void PicParamsFromSPS(DXVA_PicParams_HEVC* pic_param, const H265SPS* sps);
+ void PicParamsFromSPS(DXVA_PicParams_HEVC_Rext* pic_param,
+ const H265SPS* sps);
// Populate the pic params with fields from the PPS structure.
- void PicParamsFromPPS(DXVA_PicParams_HEVC* pic_param, const H265PPS* pps);
+ void PicParamsFromPPS(DXVA_PicParams_HEVC_Rext* pic_param,
+ const H265PPS* pps);
// Populate the pic params with fields from the slice header structure.
- void PicParamsFromSliceHeader(DXVA_PicParams_HEVC* pic_param,
+ void PicParamsFromSliceHeader(DXVA_PicParams_HEVC_Rext* pic_param,
const H265SPS* sps,
const H265SliceHeader* slice_hdr);
// Populate the pic params with fields from the picture passed in.
- void PicParamsFromPic(DXVA_PicParams_HEVC* pic_param, D3D11H265Picture* pic);
+ void PicParamsFromPic(DXVA_PicParams_HEVC_Rext* pic_param,
+ D3D11H265Picture* pic);
// Populate the pic params with fields from ref_pic_set_lt_curr,
// ref_pic_set_st_curr_after and ref_pic_set_st_curr_before
bool PicParamsFromRefLists(
- DXVA_PicParams_HEVC* pic_param,
+ DXVA_PicParams_HEVC_Rext* pic_param,
const H265Picture::Vector& ref_pic_set_lt_curr,
const H265Picture::Vector& ref_pic_set_st_curr_after,
const H265Picture::Vector& ref_pic_set_st_curr_before);
@@ -119,6 +194,8 @@
int ref_frame_pocs_[kMaxRefPicListSize];
base::flat_map<int, int> poc_index_into_ref_pic_list_;
bool use_scaling_lists_ = false;
+ // If current stream is encoded with range extension profile.
+ bool is_rext_ = false;
// Information that's accumulated during slices and submitted at the end
std::vector<DXVA_Slice_HEVC_Short> slice_info_;
diff --git a/media/gpu/windows/d3d11_texture_selector.cc b/media/gpu/windows/d3d11_texture_selector.cc
index fbf38c08..7c9d135 100644
--- a/media/gpu/windows/d3d11_texture_selector.cc
+++ b/media/gpu/windows/d3d11_texture_selector.cc
@@ -40,6 +40,31 @@
return true;
}
+const char* DxgiFormatToString(DXGI_FORMAT format) {
+ switch (format) {
+ case DXGI_FORMAT_Y416:
+ return "Y416";
+ case DXGI_FORMAT_Y216:
+ return "Y216";
+ case DXGI_FORMAT_P016:
+ return "P016";
+ case DXGI_FORMAT_NV12:
+ return "NV12";
+ case DXGI_FORMAT_P010:
+ return "P010";
+ case DXGI_FORMAT_Y210:
+ return "Y210";
+ case DXGI_FORMAT_AYUV:
+ return "AYUV";
+ case DXGI_FORMAT_Y410:
+ return "Y410";
+ case DXGI_FORMAT_YUY2:
+ return "YUY2";
+ default:
+ return "UNKNOWN";
+ }
+}
+
// static
std::unique_ptr<TextureSelector> TextureSelector::Create(
const gpu::GpuPreferences& gpu_preferences,
@@ -84,8 +109,14 @@
}
break;
}
- case DXGI_FORMAT_P010: {
- MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder producing P010";
+ case DXGI_FORMAT_P010:
+ case DXGI_FORMAT_Y416:
+ case DXGI_FORMAT_Y216:
+ case DXGI_FORMAT_P016:
+ case DXGI_FORMAT_Y410:
+ case DXGI_FORMAT_Y210: {
+ MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder producing "
+ << DxgiFormatToString(decoder_output_format);
if (hdr_output_mode == HDRMode::kSDROnly &&
supports_fmt(DXGI_FORMAT_B8G8R8A8_UNORM)) {
output_dxgi_format = DXGI_FORMAT_B8G8R8A8_UNORM;
diff --git a/media/gpu/windows/d3d11_texture_wrapper.cc b/media/gpu/windows/d3d11_texture_wrapper.cc
index 09366552..fa6989e 100644
--- a/media/gpu/windows/d3d11_texture_wrapper.cc
+++ b/media/gpu/windows/d3d11_texture_wrapper.cc
@@ -27,6 +27,11 @@
switch (dxgi_format) {
case DXGI_FORMAT_NV12:
case DXGI_FORMAT_P010:
+ case DXGI_FORMAT_Y210:
+ case DXGI_FORMAT_Y410:
+ case DXGI_FORMAT_P016:
+ case DXGI_FORMAT_Y216:
+ case DXGI_FORMAT_Y416:
case DXGI_FORMAT_B8G8R8A8_UNORM:
case DXGI_FORMAT_R10G10B10A2_UNORM:
case DXGI_FORMAT_R16G16B16A16_FLOAT:
@@ -38,8 +43,14 @@
size_t NumPlanes(DXGI_FORMAT dxgi_format) {
switch (dxgi_format) {
+ case DXGI_FORMAT_Y210:
+ case DXGI_FORMAT_Y410:
+ case DXGI_FORMAT_Y216:
+ case DXGI_FORMAT_Y416:
+ return 3;
case DXGI_FORMAT_NV12:
case DXGI_FORMAT_P010:
+ case DXGI_FORMAT_P016:
return 2;
case DXGI_FORMAT_B8G8R8A8_UNORM:
case DXGI_FORMAT_R10G10B10A2_UNORM:
diff --git a/media/gpu/windows/d3d11_video_decoder.cc b/media/gpu/windows/d3d11_video_decoder.cc
index 114c080f..abcdff84 100644
--- a/media/gpu/windows/d3d11_video_decoder.cc
+++ b/media/gpu/windows/d3d11_video_decoder.cc
@@ -213,13 +213,15 @@
D3D11Status::Or<ComD3D11VideoDecoder> D3D11VideoDecoder::CreateD3D11Decoder() {
// By default we assume outputs are 8-bit for SDR color spaces and 10 bit for
- // HDR color spaces (or VP9.2, or HEVC Main10) with HBD capable codecs (the
- // decoder doesn't support H264PROFILE_HIGH10PROFILE). We'll get a config
- // change once we know the real bit depth if this turns out to be wrong.
+ // HDR color spaces (or VP9.2, or HEVC Main10, or HEVC Rext) with HBD capable
+ // codecs (the decoder doesn't support H264PROFILE_HIGH10PROFILE). We'll get
+ // a config change once we know the real bit depth if this turns out to be
+ // wrong.
bit_depth_ =
accelerated_video_decoder_
? accelerated_video_decoder_->GetBitDepth()
: (config_.profile() == VP9PROFILE_PROFILE2 ||
+ config_.profile() == HEVCPROFILE_REXT ||
config_.profile() == HEVCPROFILE_MAIN10 ||
(config_.color_space_info().ToGfxColorSpace().IsHDR() &&
config_.codec() != VideoCodec::kH264)
@@ -231,8 +233,8 @@
// TODO: supported check?
decoder_configurator_ = D3D11DecoderConfigurator::Create(
- gpu_preferences_, gpu_workarounds_, config_, bit_depth_, media_log_.get(),
- use_shared_handle);
+ gpu_preferences_, gpu_workarounds_, config_, bit_depth_, chroma_sampling_,
+ media_log_.get(), use_shared_handle);
if (!decoder_configurator_)
return D3D11Status::Codes::kDecoderUnsupportedProfile;
@@ -293,6 +295,7 @@
break;
}
}
+
if (!found)
return D3D11Status::Codes::kDecoderUnsupportedConfig;
@@ -626,20 +629,26 @@
const auto new_bit_depth = accelerated_video_decoder_->GetBitDepth();
const auto new_profile = accelerated_video_decoder_->GetProfile();
const auto new_coded_size = accelerated_video_decoder_->GetPicSize();
+ const auto new_chroma_sampling =
+ accelerated_video_decoder_->GetChromaSampling();
if (new_profile == config_.profile() &&
new_coded_size == config_.coded_size() &&
- new_bit_depth == bit_depth_ && !picture_buffers_.size()) {
+ new_bit_depth == bit_depth_ && !picture_buffers_.size() &&
+ new_chroma_sampling == chroma_sampling_) {
continue;
}
// Update the config.
MEDIA_LOG(INFO, media_log_)
<< "D3D11VideoDecoder config change: profile: "
- << static_cast<int>(new_profile) << " coded_size: ("
- << new_coded_size.width() << ", " << new_coded_size.height() << ")";
+ << static_cast<int>(new_profile) << " chroma_sampling_format: "
+ << VideoChromaSamplingToString(new_chroma_sampling)
+ << " coded_size: (" << new_coded_size.width() << ", "
+ << new_coded_size.height() << ")";
profile_ = new_profile;
config_.set_profile(profile_);
config_.set_coded_size(new_coded_size);
+ chroma_sampling_ = new_chroma_sampling;
// Replace the decoder, and clear any picture buffers we have. It's okay
// if we don't have any picture buffer yet; this might be before the
diff --git a/media/gpu/windows/d3d11_video_decoder.h b/media/gpu/windows/d3d11_video_decoder.h
index a3ec976..c838c9a 100644
--- a/media/gpu/windows/d3d11_video_decoder.h
+++ b/media/gpu/windows/d3d11_video_decoder.h
@@ -23,6 +23,7 @@
#include "media/base/status.h"
#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
+#include "media/base/video_types.h"
#include "media/gpu/command_buffer_helper.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/windows/d3d11_com_defs.h"
@@ -312,6 +313,10 @@
// need to recreate the decoder.
uint8_t bit_depth_ = 8u;
+ // The currently configured chroma sampling format on the accelerator. When
+ // this changes we need to recreate the decoder.
+ VideoChromaSampling chroma_sampling_ = VideoChromaSampling::k420;
+
base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_{this};
};
diff --git a/media/gpu/windows/d3d11_video_device_format_support.h b/media/gpu/windows/d3d11_video_device_format_support.h
index 01dae897..8472044 100644
--- a/media/gpu/windows/d3d11_video_device_format_support.h
+++ b/media/gpu/windows/d3d11_video_device_format_support.h
@@ -14,7 +14,7 @@
namespace media {
// Helper class for Checking whether a video can be processed in any given
-// DXVI_FORMAT.
+// DXGI_FORMAT.
class MEDIA_GPU_EXPORT FormatSupportChecker {
public:
// |device| may be null, mostly for tests.
diff --git a/media/gpu/windows/supported_profile_helpers.cc b/media/gpu/windows/supported_profile_helpers.cc
index b8c799d..47cbb84d 100644
--- a/media/gpu/windows/supported_profile_helpers.cc
+++ b/media/gpu/windows/supported_profile_helpers.cc
@@ -230,6 +230,16 @@
video_device.Get(), profile_id, kModernResolutions);
continue;
}
+ // For range extensions only test main10_422 with P010, and apply
+ // the same resolution range to main420 & main10_YUV420. Ideally we
+ // should be also testing against NV12 & Y210 for YUV422, and Y410 for
+ // YUV444 8/10/12 bit.
+ if (profile_id == DXVA_ModeHEVC_VLD_Main422_10_Intel) {
+ supported_resolutions[HEVCPROFILE_REXT] =
+ GetResolutionsForGUID(video_device.Get(), profile_id,
+ kModernResolutions, DXGI_FORMAT_P010);
+ continue;
+ }
if (profile_id == D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10) {
supported_resolutions[HEVCPROFILE_MAIN10] =
GetResolutionsForGUID(video_device.Get(), profile_id,
diff --git a/media/gpu/windows/supported_profile_helpers.h b/media/gpu/windows/supported_profile_helpers.h
index 6e521d0..e99379b 100644
--- a/media/gpu/windows/supported_profile_helpers.h
+++ b/media/gpu/windows/supported_profile_helpers.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_GPU_WINDOWS_SUPPORTED_PROFILE_HELPERS_H_
#define MEDIA_GPU_WINDOWS_SUPPORTED_PROFILE_HELPERS_H_
+#include <initguid.h>
+
#include "base/containers/flat_map.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "media/base/video_codecs.h"
@@ -14,6 +16,107 @@
namespace media {
+#if BUILDFLAG(ENABLE_HEVC_PARSER_AND_HW_DECODER)
+// Vendor defined GUIDs for video decoder devices.
+// Intel specific HEVC decoders. SCC decoders not added here.
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main_Intel,
+ 0x8c56eb1e,
+ 0x2b47,
+ 0x466f,
+ 0x8d,
+ 0x33,
+ 0x7d,
+ 0xbc,
+ 0xd6,
+ 0x3f,
+ 0x3d,
+ 0xf2);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main10_Intel,
+ 0x75fc75f7,
+ 0xc589,
+ 0x4a07,
+ 0xa2,
+ 0x5b,
+ 0x72,
+ 0xe0,
+ 0x3b,
+ 0x03,
+ 0x83,
+ 0xb3);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main12_Intel,
+ 0x8ff8a3aa,
+ 0xc456,
+ 0x4132,
+ 0xb6,
+ 0xef,
+ 0x69,
+ 0xd9,
+ 0xdd,
+ 0x72,
+ 0x57,
+ 0x1d);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main422_10_Intel,
+ 0xe484dcb8,
+ 0xcac9,
+ 0x4859,
+ 0x99,
+ 0xf5,
+ 0x5c,
+ 0x0d,
+ 0x45,
+ 0x06,
+ 0x90,
+ 0x89);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main422_12_Intel,
+ 0xc23dd857,
+ 0x874b,
+ 0x423c,
+ 0xb6,
+ 0xe0,
+ 0x82,
+ 0xce,
+ 0xaa,
+ 0x9b,
+ 0x11,
+ 0x8a);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main444_Intel,
+ 0x41a5af96,
+ 0xe415,
+ 0x4b0c,
+ 0x9d,
+ 0x03,
+ 0x90,
+ 0x78,
+ 0x58,
+ 0xe2,
+ 0x3e,
+ 0x78);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main444_10_Intel,
+ 0x6a6a81ba,
+ 0x912a,
+ 0x485d,
+ 0xb5,
+ 0x7f,
+ 0xcc,
+ 0xd2,
+ 0xd3,
+ 0x7b,
+ 0x8d,
+ 0x94);
+DEFINE_GUID(DXVA_ModeHEVC_VLD_Main444_12_Intel,
+ 0x5b08e35d,
+ 0x0c66,
+ 0x4c51,
+ 0xa6,
+ 0xf1,
+ 0x89,
+ 0xd0,
+ 0x0c,
+ 0xb2,
+ 0xc1,
+ 0x97);
+#endif // BUILDFLAG(ENABLE_HEVC_PARSER_AND_HW_DECODER)
+
struct SupportedResolutionRange {
gfx::Size min_resolution;
gfx::Size max_landscape_resolution;
diff --git a/media/test/data/README.md b/media/test/data/README.md
index 48e983f0..134625d 100644
--- a/media/test/data/README.md
+++ b/media/test/data/README.md
@@ -1194,6 +1194,36 @@
ffmpeg -i bear-1280x720.mp4 -vcodec hevc -pix_fmt yuv420p10le bear-1280x720-hevc-10bit.mp4
```
+#### bear-1280x720-hevc-10bit-422.mp4
+HEVC video stream with 10-bit 422 range extension profile, generated with
+```
+ffmpeg -i bear-1280x720.mp4 -vcodec libx265 -pix_fmt yuv422p10le bear-1280x720-hevc-10bit-422.mp4
+```
+
+#### bear-1280x720-hevc-10bit-444.mp4
+HEVC video stream with 10-bit 444 range extension profile, generated with
+```
+ffmpeg -i bear-1280x720.mp4 -vcodec libx265 -pix_fmt yuv444p10le bear-1280x720-hevc-10bit-444.mp4
+```
+
+#### bear-1280x720-hevc-12bit-420.mp4
+HEVC video stream with 12-bit 420 range extension profile, generated with
+```
+ffmpeg -i bear-1280x720.mp4 -vcodec libx265 -pix_fmt yuv420p12le bear-1280x720-hevc-12bit-420.mp4
+```
+
+#### bear-1280x720-hevc-12bit-422.mp4
+HEVC video stream with 12-bit 422 range extension profile, generated with
+```
+ffmpeg -i bear-1280x720.mp4 -vcodec libx265 -pix_fmt yuv422p12le bear-1280x720-hevc-12bit-422.mp4
+```
+
+#### bear-1280x720-hevc-12bit-444.mp4
+HEVC video stream with 12-bit 444 range extension profile, generated with
+```
+ffmpeg -i bear-1280x720.mp4 -vcodec libx265 -pix_fmt yuv444p12le bear-1280x720-hevc-12bit-444.mp4
+```
+
### Multi-track MP4 file
(c) copyright 2008, Blender Foundation / www.bigbuckbunny.org
diff --git a/media/test/data/bear-1280x720-hevc-10bit-422.mp4 b/media/test/data/bear-1280x720-hevc-10bit-422.mp4
new file mode 100644
index 0000000..4c198dd
--- /dev/null
+++ b/media/test/data/bear-1280x720-hevc-10bit-422.mp4
Binary files differ
diff --git a/media/test/data/bear-1280x720-hevc-10bit-444.mp4 b/media/test/data/bear-1280x720-hevc-10bit-444.mp4
new file mode 100644
index 0000000..0af30a2
--- /dev/null
+++ b/media/test/data/bear-1280x720-hevc-10bit-444.mp4
Binary files differ
diff --git a/media/test/data/bear-1280x720-hevc-12bit-420.mp4 b/media/test/data/bear-1280x720-hevc-12bit-420.mp4
new file mode 100644
index 0000000..baec739
--- /dev/null
+++ b/media/test/data/bear-1280x720-hevc-12bit-420.mp4
Binary files differ
diff --git a/media/test/data/bear-1280x720-hevc-12bit-422.mp4 b/media/test/data/bear-1280x720-hevc-12bit-422.mp4
new file mode 100644
index 0000000..4334d032
--- /dev/null
+++ b/media/test/data/bear-1280x720-hevc-12bit-422.mp4
Binary files differ
diff --git a/media/test/data/bear-1280x720-hevc-12bit-444.mp4 b/media/test/data/bear-1280x720-hevc-12bit-444.mp4
new file mode 100644
index 0000000..bd25fa2
--- /dev/null
+++ b/media/test/data/bear-1280x720-hevc-12bit-444.mp4
Binary files differ
diff --git a/media/video/h265_parser.cc b/media/video/h265_parser.cc
index 46e840f..29ded0b3 100644
--- a/media/video/h265_parser.cc
+++ b/media/video/h265_parser.cc
@@ -12,6 +12,7 @@
#include "base/bits.h"
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/decrypt_config.h"
#include "media/base/video_codecs.h"
@@ -269,6 +270,22 @@
: gfx::ColorSpace::RangeID::LIMITED);
}
+VideoChromaSampling H265SPS::GetChromaSampling() const {
+ switch (chroma_format_idc) {
+ case 0:
+ return VideoChromaSampling::k400;
+ case 1:
+ return VideoChromaSampling::k420;
+ case 2:
+ return VideoChromaSampling::k422;
+ case 3:
+ return VideoChromaSampling::k444;
+ default:
+ NOTREACHED();
+ return VideoChromaSampling::kUnknown;
+ }
+}
+
bool H265SliceHeader::IsISlice() const {
return slice_type == kSliceTypeI;
}
diff --git a/media/video/h265_parser.h b/media/video/h265_parser.h
index 0dce696..20b7a0a1 100644
--- a/media/video/h265_parser.h
+++ b/media/video/h265_parser.h
@@ -16,6 +16,7 @@
#include "media/base/media_export.h"
#include "media/base/ranges.h"
#include "media/base/video_color_space.h"
+#include "media/base/video_types.h"
#include "media/video/h264_bit_reader.h"
#include "media/video/h264_parser.h"
#include "media/video/h265_nalu_parser.h"
@@ -218,6 +219,7 @@
gfx::Size GetCodedSize() const;
gfx::Rect GetVisibleRect() const;
VideoColorSpace GetColorSpace() const;
+ VideoChromaSampling GetChromaSampling() const;
};
struct MEDIA_EXPORT H265PPS {