Switch AudioCodec and VideoCodec enumerations to using kMaxValue.
kMaxValue is the chosen pattern by the metrics team,
so we should switch to it so we can stop writing kAudioCodecMax + 1 etc.
Bug: 1209006
Change-Id: If7383903ba751438cc87128513bb64801305301e
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2924562
Reviewed-by: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: Sergey Volk <servolk@chromium.org>
Reviewed-by: Avi Drissman <avi@chromium.org>
Reviewed-by: Jun Mukai <mukai@chromium.org>
Reviewed-by: Ahmed Fakhry <afakhry@chromium.org>
Reviewed-by: Will Harris <wfh@chromium.org>
Reviewed-by: Sergey Ulanov <sergeyu@chromium.org>
Reviewed-by: Guido Urdaneta <guidou@chromium.org>
Reviewed-by: Chrome Cunningham <chcunningham@chromium.org>
Reviewed-by: Colin Blundell <blundell@chromium.org>
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#911608}
diff --git a/ash/services/recording/recording_encoder_muxer.cc b/ash/services/recording/recording_encoder_muxer.cc
index 3130403..60d30ed 100644
--- a/ash/services/recording/recording_encoder_muxer.cc
+++ b/ash/services/recording/recording_encoder_muxer.cc
@@ -225,7 +225,7 @@
const base::FilePath& webm_file_path,
OnFailureCallback on_failure_callback)
: webm_muxer_(
- media::kCodecOpus,
+ media::AudioCodec::kOpus,
/*has_video_=*/true,
/*has_audio_=*/!!audio_input_params,
std::make_unique<RecordingMuxerDelegate>(webm_file_path, this)),
@@ -332,9 +332,9 @@
absl::optional<media::VideoEncoder::CodecDescription> codec_description) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- media::WebmMuxer::VideoParameters params(video_visible_rect_sizes_.front(),
- kMaxFrameRate, media::kCodecVP8,
- kColorSpace);
+ media::WebmMuxer::VideoParameters params(
+ video_visible_rect_sizes_.front(), kMaxFrameRate, media::VideoCodec::kVP8,
+ kColorSpace);
video_visible_rect_sizes_.pop();
// TODO(crbug.com/1143798): Explore changing the WebmMuxer so it doesn't work
diff --git a/chrome/common/media/cdm_registration.cc b/chrome/common/media/cdm_registration.cc
index 28107be..912cee9 100644
--- a/chrome/common/media/cdm_registration.cc
+++ b/chrome/common/media/cdm_registration.cc
@@ -120,11 +120,11 @@
// Not specifying any profiles to indicate that all relevant profiles
// should be considered supported.
const std::vector<media::VideoCodecProfile> kAllProfiles = {};
- capability.video_codecs.emplace(media::VideoCodec::kCodecVP8, kAllProfiles);
- capability.video_codecs.emplace(media::VideoCodec::kCodecVP9, kAllProfiles);
- capability.video_codecs.emplace(media::VideoCodec::kCodecAV1, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kVP8, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kVP9, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kAV1, kAllProfiles);
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- capability.video_codecs.emplace(media::VideoCodec::kCodecH264, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kH264, kAllProfiles);
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
// Both encryption schemes are supported on ChromeOS.
@@ -250,19 +250,18 @@
// decrypt-and-decode. Not specifying any profiles to indicate that all
// relevant profiles should be considered supported.
const std::vector<media::VideoCodecProfile> kAllProfiles = {};
- capability.video_codecs.emplace(media::VideoCodec::kCodecVP9, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kVP9, kAllProfiles);
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- capability.video_codecs.emplace(media::VideoCodec::kCodecH264, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kH264, kAllProfiles);
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
#if BUILDFLAG(IS_CHROMEOS_LACROS)
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kLacrosEnablePlatformHevc)) {
- capability.video_codecs.emplace(media::VideoCodec::kCodecHEVC,
- kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kHEVC, kAllProfiles);
}
#else
- capability.video_codecs.emplace(media::VideoCodec::kCodecHEVC, kAllProfiles);
+ capability.video_codecs.emplace(media::VideoCodec::kHEVC, kAllProfiles);
#endif // BUILDFLAG(IS_CHROMEOS_LACROS)
#endif
diff --git a/chrome/renderer/media/chrome_key_systems.cc b/chrome/renderer/media/chrome_key_systems.cc
index 56364b0..cd32161 100644
--- a/chrome/renderer/media/chrome_key_systems.cc
+++ b/chrome/renderer/media/chrome_key_systems.cc
@@ -161,7 +161,7 @@
supported_vp9_codecs |= media::EME_CODEC_VP9_PROFILE2;
break;
default:
- DVLOG(1) << "Unexpected " << GetCodecName(media::VideoCodec::kCodecVP9)
+ DVLOG(1) << "Unexpected " << GetCodecName(media::VideoCodec::kVP9)
<< " profile: " << GetProfileName(profile);
break;
}
@@ -196,7 +196,7 @@
supported_hevc_codecs |= media::EME_CODEC_HEVC_PROFILE_MAIN10;
break;
default:
- DVLOG(1) << "Unexpected " << GetCodecName(media::VideoCodec::kCodecHEVC)
+ DVLOG(1) << "Unexpected " << GetCodecName(media::VideoCodec::kHEVC)
<< " profile: " << GetProfileName(profile);
break;
}
@@ -211,17 +211,17 @@
for (const auto& codec : capability.audio_codecs) {
switch (codec) {
- case media::AudioCodec::kCodecOpus:
+ case media::AudioCodec::kOpus:
supported_codecs |= media::EME_CODEC_OPUS;
break;
- case media::AudioCodec::kCodecVorbis:
+ case media::AudioCodec::kVorbis:
supported_codecs |= media::EME_CODEC_VORBIS;
break;
- case media::AudioCodec::kCodecFLAC:
+ case media::AudioCodec::kFLAC:
supported_codecs |= media::EME_CODEC_FLAC;
break;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case media::AudioCodec::kCodecAAC:
+ case media::AudioCodec::kAAC:
supported_codecs |= media::EME_CODEC_AAC;
break;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -235,23 +235,23 @@
// with some video codecs.
for (const auto& codec : capability.video_codecs) {
switch (codec.first) {
- case media::VideoCodec::kCodecVP8:
+ case media::VideoCodec::kVP8:
supported_codecs |= media::EME_CODEC_VP8;
break;
- case media::VideoCodec::kCodecVP9:
+ case media::VideoCodec::kVP9:
supported_codecs |= GetVP9Codecs(codec.second);
break;
break;
- case media::VideoCodec::kCodecAV1:
+ case media::VideoCodec::kAV1:
supported_codecs |= media::EME_CODEC_AV1;
break;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case media::VideoCodec::kCodecH264:
+ case media::VideoCodec::kH264:
supported_codecs |= media::EME_CODEC_AVC1;
break;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case media::VideoCodec::kCodecHEVC:
+ case media::VideoCodec::kHEVC:
supported_codecs |= GetHevcCodecs(codec.second);
break;
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
diff --git a/chrome/services/media_gallery_util/video_thumbnail_parser.cc b/chrome/services/media_gallery_util/video_thumbnail_parser.cc
index 8fab6e0e..316ea51b 100644
--- a/chrome/services/media_gallery_util/video_thumbnail_parser.cc
+++ b/chrome/services/media_gallery_util/video_thumbnail_parser.cc
@@ -61,7 +61,7 @@
!BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
// H264 currently needs to be decoded in GPU process when no software decoder
// is provided.
- if (config.codec() == media::VideoCodec::kCodecH264) {
+ if (config.codec() == media::VideoCodec::kH264) {
std::move(video_frame_callback)
.Run(success,
chrome::mojom::VideoFrameData::NewEncodedData(std::move(data)),
@@ -70,8 +70,8 @@
}
#endif
- if (config.codec() != media::VideoCodec::kCodecVP8 &&
- config.codec() != media::VideoCodec::kCodecVP9) {
+ if (config.codec() != media::VideoCodec::kVP8 &&
+ config.codec() != media::VideoCodec::kVP9) {
std::move(video_frame_callback)
.Run(false, chrome::mojom::VideoFrameData::New(), absl::nullopt);
return;
diff --git a/chromecast/media/base/media_codec_support.cc b/chromecast/media/base/media_codec_support.cc
index 056e66c..edd24f1 100644
--- a/chromecast/media/base/media_codec_support.cc
+++ b/chromecast/media/base/media_codec_support.cc
@@ -13,25 +13,25 @@
AudioCodec ToCastAudioCodec(const ::media::AudioCodec codec) {
switch (codec) {
- case ::media::kCodecAAC:
+ case ::media::AudioCodec::kAAC:
return kCodecAAC;
- case ::media::kCodecMP3:
+ case ::media::AudioCodec::kMP3:
return kCodecMP3;
- case ::media::kCodecPCM:
+ case ::media::AudioCodec::kPCM:
return kCodecPCM;
- case ::media::kCodecPCM_S16BE:
+ case ::media::AudioCodec::kPCM_S16BE:
return kCodecPCM_S16BE;
- case ::media::kCodecVorbis:
+ case ::media::AudioCodec::kVorbis:
return kCodecVorbis;
- case ::media::kCodecOpus:
+ case ::media::AudioCodec::kOpus:
return kCodecOpus;
- case ::media::kCodecEAC3:
+ case ::media::AudioCodec::kEAC3:
return kCodecEAC3;
- case ::media::kCodecAC3:
+ case ::media::AudioCodec::kAC3:
return kCodecAC3;
- case ::media::kCodecFLAC:
+ case ::media::AudioCodec::kFLAC:
return kCodecFLAC;
- case ::media::kCodecMpegHAudio:
+ case ::media::AudioCodec::kMpegHAudio:
return kCodecMpegHAudio;
default:
LOG(ERROR) << "Unsupported audio codec " << codec;
@@ -42,15 +42,15 @@
VideoCodec ToCastVideoCodec(const ::media::VideoCodec video_codec,
const ::media::VideoCodecProfile codec_profile) {
switch (video_codec) {
- case ::media::kCodecH264:
+ case ::media::VideoCodec::kH264:
return kCodecH264;
- case ::media::kCodecVP8:
+ case ::media::VideoCodec::kVP8:
return kCodecVP8;
- case ::media::kCodecVP9:
+ case ::media::VideoCodec::kVP9:
return kCodecVP9;
- case ::media::kCodecHEVC:
+ case ::media::VideoCodec::kHEVC:
return kCodecHEVC;
- case ::media::kCodecDolbyVision:
+ case ::media::VideoCodec::kDolbyVision:
if (codec_profile == ::media::DOLBYVISION_PROFILE0 ||
codec_profile == ::media::DOLBYVISION_PROFILE9) {
return kCodecDolbyVisionH264;
@@ -62,7 +62,7 @@
}
LOG(ERROR) << "Unsupported video codec profile " << codec_profile;
break;
- case ::media::kCodecAV1:
+ case ::media::VideoCodec::kAV1:
return kCodecAV1;
default:
LOG(ERROR) << "Unsupported video codec " << video_codec;
diff --git a/chromecast/media/cma/base/decoder_config_adapter.cc b/chromecast/media/cma/base/decoder_config_adapter.cc
index 3e14af0..f0e9d133 100644
--- a/chromecast/media/cma/base/decoder_config_adapter.cc
+++ b/chromecast/media/cma/base/decoder_config_adapter.cc
@@ -21,25 +21,25 @@
// unsupported codec will be converted to chromecast::media::kCodecUnknown.
AudioCodec ToAudioCodec(const ::media::AudioCodec audio_codec) {
switch (audio_codec) {
- case ::media::kCodecAAC:
+ case ::media::AudioCodec::kAAC:
return kCodecAAC;
- case ::media::kCodecMP3:
+ case ::media::AudioCodec::kMP3:
return kCodecMP3;
- case ::media::kCodecPCM:
+ case ::media::AudioCodec::kPCM:
return kCodecPCM;
- case ::media::kCodecPCM_S16BE:
+ case ::media::AudioCodec::kPCM_S16BE:
return kCodecPCM_S16BE;
- case ::media::kCodecVorbis:
+ case ::media::AudioCodec::kVorbis:
return kCodecVorbis;
- case ::media::kCodecOpus:
+ case ::media::AudioCodec::kOpus:
return kCodecOpus;
- case ::media::kCodecFLAC:
+ case ::media::AudioCodec::kFLAC:
return kCodecFLAC;
- case ::media::kCodecEAC3:
+ case ::media::AudioCodec::kEAC3:
return kCodecEAC3;
- case ::media::kCodecAC3:
+ case ::media::AudioCodec::kAC3:
return kCodecAC3;
- case ::media::kCodecMpegHAudio:
+ case ::media::AudioCodec::kMpegHAudio:
return kCodecMpegHAudio;
default:
LOG(ERROR) << "Unsupported audio codec " << audio_codec;
@@ -109,29 +109,29 @@
const chromecast::media::AudioCodec codec) {
switch (codec) {
case kAudioCodecUnknown:
- return ::media::kUnknownAudioCodec;
+ return ::media::AudioCodec::kUnknown;
case kCodecAAC:
- return ::media::kCodecAAC;
+ return ::media::AudioCodec::kAAC;
case kCodecMP3:
- return ::media::kCodecMP3;
+ return ::media::AudioCodec::kMP3;
case kCodecPCM:
- return ::media::kCodecPCM;
+ return ::media::AudioCodec::kPCM;
case kCodecPCM_S16BE:
- return ::media::kCodecPCM_S16BE;
+ return ::media::AudioCodec::kPCM_S16BE;
case kCodecVorbis:
- return ::media::kCodecVorbis;
+ return ::media::AudioCodec::kVorbis;
case kCodecOpus:
- return ::media::kCodecOpus;
+ return ::media::AudioCodec::kOpus;
case kCodecFLAC:
- return ::media::kCodecFLAC;
+ return ::media::AudioCodec::kFLAC;
case kCodecEAC3:
- return ::media::kCodecEAC3;
+ return ::media::AudioCodec::kEAC3;
case kCodecAC3:
- return ::media::kCodecAC3;
+ return ::media::AudioCodec::kAC3;
case kCodecMpegHAudio:
- return ::media::kCodecMpegHAudio;
+ return ::media::AudioCodec::kMpegHAudio;
default:
- return ::media::kUnknownAudioCodec;
+ return ::media::AudioCodec::kUnknown;
}
}
diff --git a/chromecast/media/cma/base/demuxer_stream_for_test.cc b/chromecast/media/cma/base/demuxer_stream_for_test.cc
index 73240257..cb52ecb 100644
--- a/chromecast/media/cma/base/demuxer_stream_for_test.cc
+++ b/chromecast/media/cma/base/demuxer_stream_for_test.cc
@@ -54,7 +54,7 @@
gfx::Rect visible_rect(640, 480);
gfx::Size natural_size(640, 480);
return ::media::VideoDecoderConfig(
- ::media::kCodecH264, ::media::VIDEO_CODEC_PROFILE_UNKNOWN,
+ ::media::VideoCodec::kH264, ::media::VIDEO_CODEC_PROFILE_UNKNOWN,
::media::VideoDecoderConfig::AlphaMode::kIsOpaque,
::media::VideoColorSpace(), ::media::kNoTransformation, coded_size,
visible_rect, natural_size, ::media::EmptyExtraData(),
diff --git a/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc b/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
index 413974b..767ef6b0 100644
--- a/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
+++ b/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
@@ -132,7 +132,7 @@
if (have_audio_) {
::media::AudioDecoderConfig audio_config(
- ::media::kCodecMP3, ::media::kSampleFormatS16,
+ ::media::AudioCodec::kMP3, ::media::kSampleFormatS16,
::media::CHANNEL_LAYOUT_STEREO, 44100, ::media::EmptyExtraData(),
::media::EncryptionScheme::kUnencrypted);
AvPipelineClient client;
@@ -151,7 +151,7 @@
if (have_video_) {
std::vector<::media::VideoDecoderConfig> video_configs;
video_configs.push_back(::media::VideoDecoderConfig(
- ::media::kCodecH264, ::media::H264PROFILE_MAIN,
+ ::media::VideoCodec::kH264, ::media::H264PROFILE_MAIN,
::media::VideoDecoderConfig::AlphaMode::kIsOpaque,
::media::VideoColorSpace(), ::media::kNoTransformation,
gfx::Size(640, 480), gfx::Rect(0, 0, 640, 480), gfx::Size(640, 480),
diff --git a/chromecast/media/cma/test/mock_frame_provider.cc b/chromecast/media/cma/test/mock_frame_provider.cc
index 4a96c764..4b58a6b 100644
--- a/chromecast/media/cma/test/mock_frame_provider.cc
+++ b/chromecast/media/cma/test/mock_frame_provider.cc
@@ -80,14 +80,14 @@
gfx::Rect visible_rect(640, 480);
gfx::Size natural_size(640, 480);
video_config = ::media::VideoDecoderConfig(
- ::media::kCodecH264, ::media::VIDEO_CODEC_PROFILE_UNKNOWN,
+ ::media::VideoCodec::kH264, ::media::VIDEO_CODEC_PROFILE_UNKNOWN,
::media::VideoDecoderConfig::AlphaMode::kIsOpaque,
::media::VideoColorSpace(), ::media::kNoTransformation, coded_size,
visible_rect, natural_size, ::media::EmptyExtraData(),
::media::EncryptionScheme::kUnencrypted);
audio_config = ::media::AudioDecoderConfig(
- ::media::kCodecAAC, ::media::kSampleFormatS16,
+ ::media::AudioCodec::kAAC, ::media::kSampleFormatS16,
::media::CHANNEL_LAYOUT_STEREO, 44100, ::media::EmptyExtraData(),
::media::EncryptionScheme::kUnencrypted);
}
diff --git a/chromecast/media/gpu/cast_gpu_factory_impl.cc b/chromecast/media/gpu/cast_gpu_factory_impl.cc
index 383362a..0a90dff 100644
--- a/chromecast/media/gpu/cast_gpu_factory_impl.cc
+++ b/chromecast/media/gpu/cast_gpu_factory_impl.cc
@@ -139,7 +139,7 @@
::media::GpuVideoAcceleratorFactories::Supported
CastGpuFactoryImpl::IsDecoderConfigSupported(
const ::media::VideoDecoderConfig& config) {
- if (config.codec() == ::media::VideoCodec::kCodecH264) {
+ if (config.codec() == ::media::VideoCodec::kH264) {
return Supported::kTrue;
}
return Supported::kFalse;
diff --git a/chromecast/renderer/cast_content_renderer_client.cc b/chromecast/renderer/cast_content_renderer_client.cc
index 52b1487e..f79a18a 100644
--- a/chromecast/renderer/cast_content_renderer_client.cc
+++ b/chromecast/renderer/cast_content_renderer_client.cc
@@ -66,9 +66,11 @@
namespace shell {
namespace {
bool IsSupportedBitstreamAudioCodecHelper(::media::AudioCodec codec, int mask) {
- return (codec == ::media::kCodecAC3 && (kBitstreamAudioCodecAc3 & mask)) ||
- (codec == ::media::kCodecEAC3 && (kBitstreamAudioCodecEac3 & mask)) ||
- (codec == ::media::kCodecMpegHAudio &&
+ return (codec == ::media::AudioCodec::kAC3 &&
+ (kBitstreamAudioCodecAc3 & mask)) ||
+ (codec == ::media::AudioCodec::kEAC3 &&
+ (kBitstreamAudioCodecEac3 & mask)) ||
+ (codec == ::media::AudioCodec::kMpegHAudio &&
(kBitstreamAudioCodecMpegHAudio & mask));
}
} // namespace
@@ -256,15 +258,15 @@
// No ATV device we know of has (E)AC3 decoder, so it relies on the audio sink
// device.
- if (type.codec == ::media::kCodecEAC3) {
+ if (type.codec == ::media::AudioCodec::kEAC3) {
return kBitstreamAudioCodecEac3 &
supported_bitstream_audio_codecs_info_.codecs;
}
- if (type.codec == ::media::kCodecAC3) {
+ if (type.codec == ::media::AudioCodec::kAC3) {
return kBitstreamAudioCodecAc3 &
supported_bitstream_audio_codecs_info_.codecs;
}
- if (type.codec == ::media::kCodecMpegHAudio) {
+ if (type.codec == ::media::AudioCodec::kMpegHAudio) {
return kBitstreamAudioCodecMpegHAudio &
supported_bitstream_audio_codecs_info_.codecs;
}
diff --git a/chromecast/ui/media_overlay_impl.cc b/chromecast/ui/media_overlay_impl.cc
index f6d9140..372b9f4 100644
--- a/chromecast/ui/media_overlay_impl.cc
+++ b/chromecast/ui/media_overlay_impl.cc
@@ -193,9 +193,9 @@
void MediaOverlayImpl::OnAudioPipelineInitialized(
media::MediaPipelineImpl* pipeline,
const ::media::AudioDecoderConfig& config) {
- if (config.codec() == ::media::AudioCodec::kCodecAC3 ||
- config.codec() == ::media::AudioCodec::kCodecEAC3 ||
- config.codec() == ::media::AudioCodec::kCodecMpegHAudio) {
+ if (config.codec() == ::media::AudioCodec::kAC3 ||
+ config.codec() == ::media::AudioCodec::kEAC3 ||
+ config.codec() == ::media::AudioCodec::kMpegHAudio) {
passthrough_pipelines_.insert(pipeline);
}
diff --git a/components/cast_streaming/browser/cast_streaming_session_unittest.cc b/components/cast_streaming/browser/cast_streaming_session_unittest.cc
index 3cdf0739..159b20a 100644
--- a/components/cast_streaming/browser/cast_streaming_session_unittest.cc
+++ b/components/cast_streaming/browser/cast_streaming_session_unittest.cc
@@ -19,7 +19,7 @@
media::AudioDecoderConfig GetDefaultAudioConfig() {
return media::AudioDecoderConfig(
- media::AudioCodec::kCodecOpus, media::SampleFormat::kSampleFormatF32,
+ media::AudioCodec::kOpus, media::SampleFormat::kSampleFormatF32,
media::ChannelLayout::CHANNEL_LAYOUT_STEREO,
48000 /* samples_per_second */, media::EmptyExtraData(),
media::EncryptionScheme::kUnencrypted);
@@ -30,7 +30,7 @@
const gfx::Rect kVideoRect(kVideoSize);
return media::VideoDecoderConfig(
- media::VideoCodec::kCodecVP8, media::VideoCodecProfile::VP8PROFILE_MIN,
+ media::VideoCodec::kVP8, media::VideoCodecProfile::VP8PROFILE_MIN,
media::VideoDecoderConfig::AlphaMode::kIsOpaque, media::VideoColorSpace(),
media::VideoTransformation(), kVideoSize, kVideoRect, kVideoSize,
media::EmptyExtraData(), media::EncryptionScheme::kUnencrypted);
diff --git a/components/cast_streaming/browser/test/cast_streaming_test_receiver.cc b/components/cast_streaming/browser/test/cast_streaming_test_receiver.cc
index cfc927f..3282e88 100644
--- a/components/cast_streaming/browser/test/cast_streaming_test_receiver.cc
+++ b/components/cast_streaming/browser/test/cast_streaming_test_receiver.cc
@@ -19,10 +19,10 @@
VLOG(1) << __func__;
auto stream_config =
std::make_unique<cast_streaming::ReceiverSession::AVConstraints>(
- ToVideoCaptureConfigCodecs(media::VideoCodec::kCodecH264,
- media::VideoCodec::kCodecVP8),
- ToAudioCaptureConfigCodecs(media::AudioCodec::kCodecAAC,
- media::AudioCodec::kCodecOpus));
+ ToVideoCaptureConfigCodecs(media::VideoCodec::kH264,
+ media::VideoCodec::kVP8),
+ ToAudioCaptureConfigCodecs(media::AudioCodec::kAAC,
+ media::AudioCodec::kOpus));
receiver_session_.Start(this, std::move(stream_config),
std::move(message_port),
base::SequencedTaskRunnerHandle::Get());
diff --git a/components/cast_streaming/public/config_conversions.cc b/components/cast_streaming/public/config_conversions.cc
index 77cc948..246230f 100644
--- a/components/cast_streaming/public/config_conversions.cc
+++ b/components/cast_streaming/public/config_conversions.cc
@@ -39,36 +39,36 @@
openscreen::cast::AudioCodec codec) {
switch (codec) {
case openscreen::cast::AudioCodec::kAac:
- return media::AudioCodec::kCodecAAC;
+ return media::AudioCodec::kAAC;
case openscreen::cast::AudioCodec::kOpus:
- return media::AudioCodec::kCodecOpus;
+ return media::AudioCodec::kOpus;
case openscreen::cast::AudioCodec::kNotSpecified:
break;
}
NOTREACHED();
- return media::AudioCodec::kUnknownAudioCodec;
+ return media::AudioCodec::kUnknown;
}
media::VideoCodec ToVideoDecoderConfigCodec(
openscreen::cast::VideoCodec codec) {
switch (codec) {
case openscreen::cast::VideoCodec::kH264:
- return media::VideoCodec::kCodecH264;
+ return media::VideoCodec::kH264;
case openscreen::cast::VideoCodec::kVp8:
- return media::VideoCodec::kCodecVP8;
+ return media::VideoCodec::kVP8;
case openscreen::cast::VideoCodec::kHevc:
- return media::VideoCodec::kCodecHEVC;
+ return media::VideoCodec::kHEVC;
case openscreen::cast::VideoCodec::kVp9:
- return media::VideoCodec::kCodecVP9;
+ return media::VideoCodec::kVP9;
case openscreen::cast::VideoCodec::kAv1:
- return media::VideoCodec::kCodecAV1;
+ return media::VideoCodec::kAV1;
case openscreen::cast::VideoCodec::kNotSpecified:
break;
}
NOTREACHED();
- return media::VideoCodec::kUnknownVideoCodec;
+ return media::VideoCodec::kUnknown;
}
} // namespace
@@ -76,9 +76,9 @@
openscreen::cast::AudioCodec ToAudioCaptureConfigCodec(
media::AudioCodec codec) {
switch (codec) {
- case media::AudioCodec::kCodecAAC:
+ case media::AudioCodec::kAAC:
return openscreen::cast::AudioCodec::kAac;
- case media::AudioCodec::kCodecOpus:
+ case media::AudioCodec::kOpus:
return openscreen::cast::AudioCodec::kOpus;
default:
break;
@@ -91,13 +91,13 @@
openscreen::cast::VideoCodec ToVideoCaptureConfigCodec(
media::VideoCodec codec) {
switch (codec) {
- case media::VideoCodec::kCodecH264:
+ case media::VideoCodec::kH264:
return openscreen::cast::VideoCodec::kH264;
- case media::VideoCodec::kCodecVP8:
+ case media::VideoCodec::kVP8:
return openscreen::cast::VideoCodec::kVp8;
- case media::VideoCodec::kCodecHEVC:
+ case media::VideoCodec::kHEVC:
return openscreen::cast::VideoCodec::kHevc;
- case media::VideoCodec::kCodecVP9:
+ case media::VideoCodec::kVP9:
return openscreen::cast::VideoCodec::kVp9;
default:
break;
diff --git a/components/cast_streaming/public/config_conversions_unittest.cc b/components/cast_streaming/public/config_conversions_unittest.cc
index b8fd2faf..122bf0ac 100644
--- a/components/cast_streaming/public/config_conversions_unittest.cc
+++ b/components/cast_streaming/public/config_conversions_unittest.cc
@@ -109,15 +109,14 @@
TEST(ConfigConversionsTest, AudioConfigCodecConversion) {
auto capture_config = CreateAudioCaptureConfig();
- auto decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecAAC,
- media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
+ auto decoder_config = CreateAudioDecoderConfig(
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
capture_config.codec = openscreen::cast::AudioCodec::kOpus;
decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecOpus,
+ CreateAudioDecoderConfig(media::AudioCodec::kOpus,
media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
@@ -125,16 +124,14 @@
TEST(ConfigConversionsTest, AudioConfigChannelsConversion) {
auto capture_config = CreateAudioCaptureConfig();
- auto decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecAAC,
- media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
+ auto decoder_config = CreateAudioDecoderConfig(
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
capture_config.channels = 1;
- decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecAAC,
- media::ChannelLayout::CHANNEL_LAYOUT_MONO, 42);
+ decoder_config = CreateAudioDecoderConfig(
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_MONO, 42);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
@@ -143,30 +140,27 @@
TEST(ConfigConversionsTest, AudioConfigSampleRateConversion) {
auto capture_config = CreateAudioCaptureConfig();
- auto decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecAAC,
- media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
+ auto decoder_config = CreateAudioDecoderConfig(
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 42);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
capture_config.sample_rate = 1234;
decoder_config = CreateAudioDecoderConfig(
- media::AudioCodec::kCodecAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO,
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO,
1234);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
capture_config.sample_rate = -1;
- decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecAAC,
- media::ChannelLayout::CHANNEL_LAYOUT_STEREO, -1);
+ decoder_config = CreateAudioDecoderConfig(
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO, -1);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
capture_config.sample_rate = 0;
- decoder_config =
- CreateAudioDecoderConfig(media::AudioCodec::kCodecAAC,
- media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 0);
+ decoder_config = CreateAudioDecoderConfig(
+ media::AudioCodec::kAAC, media::ChannelLayout::CHANNEL_LAYOUT_STEREO, 0);
ValidateAudioConfig(ToAudioDecoderConfig(capture_config), decoder_config);
ValidateAudioConfig(ToAudioCaptureConfig(decoder_config), capture_config);
}
@@ -176,29 +170,29 @@
const int height = 720;
auto capture_config = CreateVideoCaptureConfig();
auto decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecH264,
- media::VideoCodecProfile::H264PROFILE_BASELINE, width, height);
+ media::VideoCodec::kH264, media::VideoCodecProfile::H264PROFILE_BASELINE,
+ width, height);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
capture_config.codec = openscreen::cast::VideoCodec::kVp8;
decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecVP8, media::VideoCodecProfile::VP8PROFILE_MIN,
- width, height);
+ media::VideoCodec::kVP8, media::VideoCodecProfile::VP8PROFILE_MIN, width,
+ height);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
capture_config.codec = openscreen::cast::VideoCodec::kHevc;
decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecHEVC, media::VideoCodecProfile::HEVCPROFILE_MAIN,
+ media::VideoCodec::kHEVC, media::VideoCodecProfile::HEVCPROFILE_MAIN,
width, height);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
capture_config.codec = openscreen::cast::VideoCodec::kVp9;
decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecVP9,
- media::VideoCodecProfile::VP9PROFILE_PROFILE0, width, height);
+ media::VideoCodec::kVP9, media::VideoCodecProfile::VP9PROFILE_PROFILE0,
+ width, height);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
}
@@ -206,8 +200,8 @@
TEST(ConfigConversionsTest, VideoConfigResolutionConversion) {
auto capture_config = CreateVideoCaptureConfig();
auto decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecH264,
- media::VideoCodecProfile::H264PROFILE_BASELINE, 1080, 720);
+ media::VideoCodec::kH264, media::VideoCodecProfile::H264PROFILE_BASELINE,
+ 1080, 720);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
@@ -216,24 +210,24 @@
capture_config.resolutions[0].width = 42;
capture_config.resolutions[0].height = 16;
decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecH264,
- media::VideoCodecProfile::H264PROFILE_BASELINE, 42, 16);
+ media::VideoCodec::kH264, media::VideoCodecProfile::H264PROFILE_BASELINE,
+ 42, 16);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
capture_config.resolutions[0].width = 1;
capture_config.resolutions[0].height = 2;
decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecH264,
- media::VideoCodecProfile::H264PROFILE_BASELINE, 1, 2);
+ media::VideoCodec::kH264, media::VideoCodecProfile::H264PROFILE_BASELINE,
+ 1, 2);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
capture_config.resolutions[0].width = 0;
capture_config.resolutions[0].height = 0;
decoder_config = CreateVideoDecoderConfig(
- media::VideoCodec::kCodecH264,
- media::VideoCodecProfile::H264PROFILE_BASELINE, 0, 0);
+ media::VideoCodec::kH264, media::VideoCodecProfile::H264PROFILE_BASELINE,
+ 0, 0);
ValidateVideoConfig(ToVideoDecoderConfig(capture_config), decoder_config);
ValidateVideoConfig(ToVideoCaptureConfig(decoder_config), capture_config);
}
diff --git a/components/cdm/browser/cdm_message_filter_android.cc b/components/cdm/browser/cdm_message_filter_android.cc
index 84ca02d..9028c50 100644
--- a/components/cdm/browser/cdm_message_filter_android.cc
+++ b/components/cdm/browser/cdm_message_filter_android.cc
@@ -40,22 +40,22 @@
};
const CodecInfo<media::VideoCodec> kWebMVideoCodecsToQuery[] = {
- {media::EME_CODEC_VP8, media::kCodecVP8},
- {media::EME_CODEC_VP9_PROFILE0, media::kCodecVP9},
+ {media::EME_CODEC_VP8, media::VideoCodec::kVP8},
+ {media::EME_CODEC_VP9_PROFILE0, media::VideoCodec::kVP9},
// Checking for EME_CODEC_VP9_PROFILE2 is handled in code below.
};
const CodecInfo<media::VideoCodec> kMP4VideoCodecsToQuery[] = {
- {media::EME_CODEC_VP9_PROFILE0, media::kCodecVP9},
+ {media::EME_CODEC_VP9_PROFILE0, media::VideoCodec::kVP9},
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {media::EME_CODEC_AVC1, media::kCodecH264},
+ {media::EME_CODEC_AVC1, media::VideoCodec::kH264},
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- {media::EME_CODEC_HEVC_PROFILE_MAIN, media::kCodecHEVC},
+ {media::EME_CODEC_HEVC_PROFILE_MAIN, media::VideoCodec::kHEVC},
#endif
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
- {media::EME_CODEC_DOLBY_VISION_AVC, media::kCodecDolbyVision},
+ {media::EME_CODEC_DOLBY_VISION_AVC, media::VideoCodec::kDolbyVision},
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- {media::EME_CODEC_DOLBY_VISION_HEVC, media::kCodecDolbyVision},
+ {media::EME_CODEC_DOLBY_VISION_HEVC, media::VideoCodec::kDolbyVision},
#endif
#endif
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -64,16 +64,16 @@
// Vorbis is not supported. See http://crbug.com/710924 for details.
const CodecInfo<media::AudioCodec> kWebMAudioCodecsToQuery[] = {
- {media::EME_CODEC_OPUS, media::kCodecOpus},
+ {media::EME_CODEC_OPUS, media::AudioCodec::kOpus},
};
const CodecInfo<media::AudioCodec> kMP4AudioCodecsToQuery[] = {
- {media::EME_CODEC_FLAC, media::kCodecFLAC},
+ {media::EME_CODEC_FLAC, media::AudioCodec::kFLAC},
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {media::EME_CODEC_AAC, media::kCodecAAC},
+ {media::EME_CODEC_AAC, media::AudioCodec::kAAC},
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
- {media::EME_CODEC_AC3, media::kCodecAC3},
- {media::EME_CODEC_EAC3, media::kCodecEAC3},
+ {media::EME_CODEC_AC3, media::AudioCodec::kAC3},
+ {media::EME_CODEC_EAC3, media::AudioCodec::kEAC3},
#endif
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
};
@@ -128,7 +128,7 @@
auto iter =
std::find_if(profiles.begin(), profiles.end(),
[](const media::CodecProfileLevel& profile) {
- return profile.codec == media::kCodecVP9 &&
+ return profile.codec == media::VideoCodec::kVP9 &&
profile.profile == media::VP9PROFILE_PROFILE2;
});
if (iter != profiles.end()) {
@@ -143,7 +143,7 @@
auto iter =
std::find_if(profiles.begin(), profiles.end(),
[](const media::CodecProfileLevel& profile) {
- return profile.codec == media::kCodecHEVC &&
+ return profile.codec == media::VideoCodec::kHEVC &&
profile.profile == media::HEVCPROFILE_MAIN10;
});
if (iter != profiles.end()) {
diff --git a/components/cdm/common/cdm_manifest.cc b/components/cdm/common/cdm_manifest.cc
index 4b2fe24..2378886c 100644
--- a/components/cdm/common/cdm_manifest.cc
+++ b/components/cdm/common/cdm_manifest.cc
@@ -232,14 +232,14 @@
const std::vector<media::VideoCodecProfile> kAllProfiles = {};
for (const auto& codec : supported_codecs) {
if (codec == kCdmSupportedCodecVp8) {
- result.emplace(media::VideoCodec::kCodecVP8, kAllProfiles);
+ result.emplace(media::VideoCodec::kVP8, kAllProfiles);
} else if (codec == kCdmSupportedCodecVp9) {
- result.emplace(media::VideoCodec::kCodecVP9, kAllProfiles);
+ result.emplace(media::VideoCodec::kVP9, kAllProfiles);
} else if (codec == kCdmSupportedCodecAv1) {
- result.emplace(media::VideoCodec::kCodecAV1, kAllProfiles);
+ result.emplace(media::VideoCodec::kAV1, kAllProfiles);
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
} else if (codec == kCdmSupportedCodecAvc1) {
- result.emplace(media::VideoCodec::kCodecH264, kAllProfiles);
+ result.emplace(media::VideoCodec::kH264, kAllProfiles);
#endif
}
}
diff --git a/components/cdm/common/cdm_manifest_unittest.cc b/components/cdm/common/cdm_manifest_unittest.cc
index 50602dde..18087bc 100644
--- a/components/cdm/common/cdm_manifest_unittest.cc
+++ b/components/cdm/common/cdm_manifest_unittest.cc
@@ -186,13 +186,13 @@
CdmCapability capability;
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
CheckVideoCodecs(capability.video_codecs,
- {media::VideoCodec::kCodecVP8, media::VideoCodec::kCodecVP9,
- media::VideoCodec::kCodecAV1});
+ {media::VideoCodec::kVP8, media::VideoCodec::kVP9,
+ media::VideoCodec::kAV1});
CheckAudioCodecs(capability.audio_codecs, {
- media::AudioCodec::kCodecOpus, media::AudioCodec::kCodecVorbis,
- media::AudioCodec::kCodecFLAC,
+ media::AudioCodec::kOpus, media::AudioCodec::kVorbis,
+ media::AudioCodec::kFLAC,
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- media::AudioCodec::kCodecAAC,
+ media::AudioCodec::kAAC,
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
});
CheckEncryptionSchemes(
@@ -209,10 +209,10 @@
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
CheckVideoCodecs(capability.video_codecs, {});
CheckAudioCodecs(capability.audio_codecs, {
- media::AudioCodec::kCodecOpus, media::AudioCodec::kCodecVorbis,
- media::AudioCodec::kCodecFLAC,
+ media::AudioCodec::kOpus, media::AudioCodec::kVorbis,
+ media::AudioCodec::kFLAC,
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- media::AudioCodec::kCodecAAC,
+ media::AudioCodec::kAAC,
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
});
CheckEncryptionSchemes(capability.encryption_schemes,
@@ -229,26 +229,26 @@
CdmCapability capability;
manifest.SetStringKey(kCdmCodecsListName, "vp8");
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
- CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kCodecVP8});
+ CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kVP8});
}
{
CdmCapability capability;
manifest.SetStringKey(kCdmCodecsListName, "vp09");
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
- CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kCodecVP9});
+ CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kVP9});
}
{
CdmCapability capability;
manifest.SetStringKey(kCdmCodecsListName, "av01");
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
- CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kCodecAV1});
+ CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kAV1});
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
{
CdmCapability capability;
manifest.SetStringKey(kCdmCodecsListName, "avc1");
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
- CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kCodecH264});
+ CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kH264});
}
#endif
{
@@ -256,9 +256,9 @@
CdmCapability capability;
manifest.SetStringKey(kCdmCodecsListName, "vp8,vp09,av01");
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
- CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kCodecVP8,
- media::VideoCodec::kCodecVP9,
- media::VideoCodec::kCodecAV1});
+ CheckVideoCodecs(capability.video_codecs,
+ {media::VideoCodec::kVP8, media::VideoCodec::kVP9,
+ media::VideoCodec::kAV1});
}
{
// Empty codecs list result in empty list.
@@ -272,7 +272,7 @@
CdmCapability capability;
manifest.SetStringKey(kCdmCodecsListName, "invalid,av01");
EXPECT_TRUE(ParseCdmManifest(manifest, &capability));
- CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kCodecAV1});
+ CheckVideoCodecs(capability.video_codecs, {media::VideoCodec::kAV1});
}
{
// Legacy: "vp9.0" was used to support VP9 profile 0 (no profile 2 support).
@@ -411,8 +411,8 @@
EXPECT_TRUE(version.IsValid());
EXPECT_EQ(version.GetString(), kVersion);
CheckVideoCodecs(capability.video_codecs,
- {media::VideoCodec::kCodecVP8, media::VideoCodec::kCodecVP9,
- media::VideoCodec::kCodecAV1});
+ {media::VideoCodec::kVP8, media::VideoCodec::kVP9,
+ media::VideoCodec::kAV1});
CheckEncryptionSchemes(
capability.encryption_schemes,
{media::EncryptionScheme::kCenc, media::EncryptionScheme::kCbcs});
diff --git a/content/browser/media/cdm_registry_impl_unittest.cc b/content/browser/media/cdm_registry_impl_unittest.cc
index a1d47c9..bf2d22a 100644
--- a/content/browser/media/cdm_registry_impl_unittest.cc
+++ b/content/browser/media/cdm_registry_impl_unittest.cc
@@ -80,7 +80,8 @@
protected:
media::CdmCapability GetTestCdmCapability() {
return media::CdmCapability(
- {media::kCodecVorbis}, {{media::kCodecVP8, {}}, {media::kCodecVP9, {}}},
+ {media::AudioCodec::kVorbis},
+ {{media::VideoCodec::kVP8, {}}, {media::VideoCodec::kVP9, {}}},
{EncryptionScheme::kCenc},
{CdmSessionType::kTemporary, CdmSessionType::kPersistentLicense});
}
@@ -139,8 +140,8 @@
EXPECT_EQ(kVersion1, cdm.version.GetString());
EXPECT_EQ(kTestPath, cdm.path.MaybeAsASCII());
EXPECT_EQ(kTestFileSystemId, cdm.file_system_id);
- EXPECT_AUDIO_CODECS(AudioCodec::kCodecVorbis);
- EXPECT_VIDEO_CODECS(VideoCodec::kCodecVP8, VideoCodec::kCodecVP9);
+ EXPECT_AUDIO_CODECS(AudioCodec::kVorbis);
+ EXPECT_VIDEO_CODECS(VideoCodec::kVP8, VideoCodec::kVP9);
EXPECT_ENCRYPTION_SCHEMES(EncryptionScheme::kCenc);
EXPECT_SESSION_TYPES(CdmSessionType::kTemporary,
CdmSessionType::kPersistentLicense);
@@ -217,7 +218,7 @@
EXPECT_EQ(kVersion1, cdm.version.GetString());
EXPECT_EQ(kTestPath, cdm.path.MaybeAsASCII());
EXPECT_EQ(kTestFileSystemId, cdm.file_system_id);
- EXPECT_VIDEO_CODECS(VideoCodec::kCodecVP8, VideoCodec::kCodecVP9);
+ EXPECT_VIDEO_CODECS(VideoCodec::kVP8, VideoCodec::kVP9);
EXPECT_ENCRYPTION_SCHEMES(EncryptionScheme::kCenc);
EXPECT_SESSION_TYPES(CdmSessionType::kTemporary,
CdmSessionType::kPersistentLicense);
diff --git a/content/browser/media/key_system_support_impl.cc b/content/browser/media/key_system_support_impl.cc
index 680f6e1..fede644 100644
--- a/content/browser/media/key_system_support_impl.cc
+++ b/content/browser/media/key_system_support_impl.cc
@@ -67,17 +67,17 @@
media::CdmCapability::VideoCodecMap video_codecs;
for (const auto& codec : overridden_codecs) {
if (codec == "vp8")
- video_codecs[media::VideoCodec::kCodecVP8] = {};
+ video_codecs[media::VideoCodec::kVP8] = {};
else if (codec == "vp9")
- video_codecs[media::VideoCodec::kCodecVP9] = {};
+ video_codecs[media::VideoCodec::kVP9] = {};
else if (codec == "avc1")
- video_codecs[media::VideoCodec::kCodecH264] = {};
+ video_codecs[media::VideoCodec::kH264] = {};
else if (codec == "hevc")
- video_codecs[media::VideoCodec::kCodecHEVC] = {};
+ video_codecs[media::VideoCodec::kHEVC] = {};
else if (codec == "mp4a")
- audio_codecs.push_back(media::AudioCodec::kCodecAAC);
+ audio_codecs.push_back(media::AudioCodec::kAAC);
else if (codec == "vorbis")
- audio_codecs.push_back(media::AudioCodec::kCodecVorbis);
+ audio_codecs.push_back(media::AudioCodec::kVorbis);
else
DVLOG(1) << "Unsupported codec specified on command line: " << codec;
}
diff --git a/content/browser/media/key_system_support_impl_unittest.cc b/content/browser/media/key_system_support_impl_unittest.cc
index 5a5684a..608b4803 100644
--- a/content/browser/media/key_system_support_impl_unittest.cc
+++ b/content/browser/media/key_system_support_impl_unittest.cc
@@ -114,8 +114,7 @@
media::CdmCapability TestCdmCapability() {
return media::CdmCapability(
- {AudioCodec::kCodecVorbis},
- {{VideoCodec::kCodecVP8, {}}, {VideoCodec::kCodecVP9, {}}},
+ {AudioCodec::kVorbis}, {{VideoCodec::kVP8, {}}, {VideoCodec::kVP9, {}}},
{EncryptionScheme::kCenc, EncryptionScheme::kCbcs},
{CdmSessionType::kTemporary, CdmSessionType::kPersistentLicense});
}
@@ -195,8 +194,8 @@
EXPECT_TRUE(IsSupported("KeySystem"));
EXPECT_TRUE(capability_->sw_secure_capability);
EXPECT_FALSE(capability_->hw_secure_capability);
- EXPECT_AUDIO_CODECS(AudioCodec::kCodecVorbis);
- EXPECT_VIDEO_CODECS(VideoCodec::kCodecVP8, VideoCodec::kCodecVP9);
+ EXPECT_AUDIO_CODECS(AudioCodec::kVorbis);
+ EXPECT_VIDEO_CODECS(VideoCodec::kVP8, VideoCodec::kVP9);
EXPECT_ENCRYPTION_SCHEMES(EncryptionScheme::kCenc, EncryptionScheme::kCbcs);
EXPECT_SESSION_TYPES(CdmSessionType::kTemporary,
CdmSessionType::kPersistentLicense);
@@ -223,8 +222,8 @@
EXPECT_TRUE(IsSupported("KeySystem"));
EXPECT_FALSE(capability_->sw_secure_capability);
EXPECT_TRUE(capability_->hw_secure_capability);
- EXPECT_HW_SECURE_AUDIO_CODECS(AudioCodec::kCodecVorbis);
- EXPECT_HW_SECURE_VIDEO_CODECS(VideoCodec::kCodecVP8, VideoCodec::kCodecVP9);
+ EXPECT_HW_SECURE_AUDIO_CODECS(AudioCodec::kVorbis);
+ EXPECT_HW_SECURE_VIDEO_CODECS(VideoCodec::kVP8, VideoCodec::kVP9);
EXPECT_HW_SECURE_ENCRYPTION_SCHEMES(EncryptionScheme::kCenc,
EncryptionScheme::kCbcs);
EXPECT_HW_SECURE_SESSION_TYPES(CdmSessionType::kTemporary,
@@ -234,19 +233,19 @@
TEST_F(KeySystemSupportImplTest, Profiles) {
Register("KeySystem",
media::CdmCapability(
- {AudioCodec::kCodecVorbis},
- {{VideoCodec::kCodecVP9,
+ {AudioCodec::kVorbis},
+ {{VideoCodec::kVP9,
{media::VP9PROFILE_PROFILE0, media::VP9PROFILE_PROFILE2}}},
{EncryptionScheme::kCenc}, {CdmSessionType::kTemporary}));
EXPECT_TRUE(IsSupported("KeySystem"));
EXPECT_TRUE(capability_->sw_secure_capability);
- EXPECT_VIDEO_CODECS(VideoCodec::kCodecVP9);
+ EXPECT_VIDEO_CODECS(VideoCodec::kVP9);
EXPECT_TRUE(base::Contains(
- capability_->sw_secure_capability->video_codecs[VideoCodec::kCodecVP9],
+ capability_->sw_secure_capability->video_codecs[VideoCodec::kVP9],
media::VP9PROFILE_PROFILE0));
EXPECT_TRUE(base::Contains(
- capability_->sw_secure_capability->video_codecs[VideoCodec::kCodecVP9],
+ capability_->sw_secure_capability->video_codecs[VideoCodec::kVP9],
media::VP9PROFILE_PROFILE2));
}
diff --git a/content/browser/media/media_browsertest.cc b/content/browser/media/media_browsertest.cc
index 2d031af..5436a4f 100644
--- a/content/browser/media/media_browsertest.cc
+++ b/content/browser/media/media_browsertest.cc
@@ -398,7 +398,7 @@
IN_PROC_BROWSER_TEST_P(MediaTest, AudioOnly_XHE_AAC_MP4) {
if (media::IsSupportedAudioType(
- {media::kCodecAAC, media::AudioCodecProfile::kXHE_AAC})) {
+ {media::AudioCodec::kAAC, media::AudioCodecProfile::kXHE_AAC})) {
PlayAudio("noise-xhe-aac.mp4", GetParam());
}
}
diff --git a/content/browser/media/media_source_browsertest.cc b/content/browser/media/media_source_browsertest.cc
index 27d92d4..8b18047 100644
--- a/content/browser/media/media_source_browsertest.cc
+++ b/content/browser/media/media_source_browsertest.cc
@@ -125,7 +125,7 @@
IN_PROC_BROWSER_TEST_F(MediaSourceTest, Playback_AudioOnly_XHE_AAC_MP4) {
if (media::IsSupportedAudioType(
- {media::kCodecAAC, media::AudioCodecProfile::kXHE_AAC})) {
+ {media::AudioCodec::kAAC, media::AudioCodecProfile::kXHE_AAC})) {
TestSimplePlayback("noise-xhe-aac.mp4", media::kEnded);
}
}
diff --git a/content/renderer/pepper/video_decoder_shim.cc b/content/renderer/pepper/video_decoder_shim.cc
index a3ad7bf2..ad98cba0 100644
--- a/content/renderer/pepper/video_decoder_shim.cc
+++ b/content/renderer/pepper/video_decoder_shim.cc
@@ -48,7 +48,7 @@
bool IsCodecSupported(media::VideoCodec codec) {
#if BUILDFLAG(ENABLE_LIBVPX)
- if (codec == media::kCodecVP9)
+ if (codec == media::VideoCodec::kVP9)
return true;
#endif
@@ -158,7 +158,7 @@
DCHECK(!decoder_);
#if BUILDFLAG(ENABLE_LIBVPX) || BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
#if BUILDFLAG(ENABLE_LIBVPX)
- if (config.codec() == media::kCodecVP9) {
+ if (config.codec() == media::VideoCodec::kVP9) {
decoder_ = std::make_unique<media::VpxVideoDecoder>();
} else
#endif // BUILDFLAG(ENABLE_LIBVPX)
@@ -341,14 +341,14 @@
return false;
}
- media::VideoCodec codec = media::kUnknownVideoCodec;
+ media::VideoCodec codec = media::VideoCodec::kUnknown;
if (vda_config.profile <= media::H264PROFILE_MAX)
- codec = media::kCodecH264;
+ codec = media::VideoCodec::kH264;
else if (vda_config.profile <= media::VP8PROFILE_MAX)
- codec = media::kCodecVP8;
+ codec = media::VideoCodec::kVP8;
else if (vda_config.profile <= media::VP9PROFILE_MAX)
- codec = media::kCodecVP9;
- DCHECK_NE(codec, media::kUnknownVideoCodec);
+ codec = media::VideoCodec::kVP9;
+ DCHECK_NE(codec, media::VideoCodec::kUnknown);
if (!IsCodecSupported(codec))
return false;
diff --git a/fuchsia/engine/browser/cast_streaming_browsertest.cc b/fuchsia/engine/browser/cast_streaming_browsertest.cc
index 209fe405b..f756b1e 100644
--- a/fuchsia/engine/browser/cast_streaming_browsertest.cc
+++ b/fuchsia/engine/browser/cast_streaming_browsertest.cc
@@ -26,7 +26,7 @@
media::AudioDecoderConfig GetDefaultAudioConfig() {
return media::AudioDecoderConfig(
- media::AudioCodec::kCodecOpus, media::SampleFormat::kSampleFormatF32,
+ media::AudioCodec::kOpus, media::SampleFormat::kSampleFormatF32,
media::ChannelLayout::CHANNEL_LAYOUT_STEREO,
48000 /* samples_per_second */, media::EmptyExtraData(),
media::EncryptionScheme::kUnencrypted);
@@ -37,7 +37,7 @@
const gfx::Rect kVideoRect(kVideoSize);
return media::VideoDecoderConfig(
- media::VideoCodec::kCodecVP8, media::VideoCodecProfile::VP8PROFILE_MIN,
+ media::VideoCodec::kVP8, media::VideoCodecProfile::VP8PROFILE_MIN,
media::VideoDecoderConfig::AlphaMode::kIsOpaque, media::VideoColorSpace(),
media::VideoTransformation(), kVideoSize, kVideoRect, kVideoSize,
media::EmptyExtraData(), media::EncryptionScheme::kUnencrypted);
diff --git a/fuchsia/engine/browser/receiver_session_client.cc b/fuchsia/engine/browser/receiver_session_client.cc
index 8ce8b53..5b68032 100644
--- a/fuchsia/engine/browser/receiver_session_client.cc
+++ b/fuchsia/engine/browser/receiver_session_client.cc
@@ -29,10 +29,10 @@
// out by build flags.
auto stream_config =
std::make_unique<cast_streaming::ReceiverSession::AVConstraints>(
- cast_streaming::ToVideoCaptureConfigCodecs(
- media::VideoCodec::kCodecH264, media::VideoCodec::kCodecVP8),
- cast_streaming::ToAudioCaptureConfigCodecs(
- media::AudioCodec::kCodecAAC, media::AudioCodec::kCodecOpus));
+ cast_streaming::ToVideoCaptureConfigCodecs(media::VideoCodec::kH264,
+ media::VideoCodec::kVP8),
+ cast_streaming::ToAudioCaptureConfigCodecs(media::AudioCodec::kAAC,
+ media::AudioCodec::kOpus));
receiver_session_ = cast_streaming::ReceiverSession::Create(
std::move(stream_config),
diff --git a/fuchsia/engine/renderer/web_engine_content_renderer_client.cc b/fuchsia/engine/renderer/web_engine_content_renderer_client.cc
index 23e828f0..6a6b011 100644
--- a/fuchsia/engine/renderer/web_engine_content_renderer_client.cc
+++ b/fuchsia/engine/renderer/web_engine_content_renderer_client.cc
@@ -34,10 +34,10 @@
bool IsSupportedHardwareVideoCodec(const media::VideoType& type) {
// TODO(crbug.com/1013412): Replace these hardcoded checks with a query to the
// fuchsia.mediacodec FIDL service.
- if (type.codec == media::kCodecH264 && type.level <= 41)
+ if (type.codec == media::VideoCodec::kH264 && type.level <= 41)
return true;
- if (type.codec == media::kCodecVP9 && type.level <= 40)
+ if (type.codec == media::VideoCodec::kVP9 && type.level <= 40)
return true;
return false;
@@ -175,19 +175,19 @@
media::SupportedCodecs supported_video_codecs = 0;
constexpr uint8_t kUnknownCodecLevel = 0;
if (IsSupportedHardwareVideoCodec(media::VideoType{
- media::kCodecVP9, media::VP9PROFILE_PROFILE0, kUnknownCodecLevel,
- media::VideoColorSpace::REC709()})) {
+ media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE0,
+ kUnknownCodecLevel, media::VideoColorSpace::REC709()})) {
supported_video_codecs |= media::EME_CODEC_VP9_PROFILE0;
}
if (IsSupportedHardwareVideoCodec(media::VideoType{
- media::kCodecVP9, media::VP9PROFILE_PROFILE2, kUnknownCodecLevel,
- media::VideoColorSpace::REC709()})) {
+ media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE2,
+ kUnknownCodecLevel, media::VideoColorSpace::REC709()})) {
supported_video_codecs |= media::EME_CODEC_VP9_PROFILE2;
}
if (IsSupportedHardwareVideoCodec(media::VideoType{
- media::kCodecH264, media::H264PROFILE_MAIN, kUnknownCodecLevel,
+ media::VideoCodec::kH264, media::H264PROFILE_MAIN, kUnknownCodecLevel,
media::VideoColorSpace::REC709()})) {
supported_video_codecs |= media::EME_CODEC_AVC1;
}
diff --git a/media/base/android/java/src/org/chromium/media/CodecProfileLevelList.java b/media/base/android/java/src/org/chromium/media/CodecProfileLevelList.java
index 5ab85d3..b88c644b 100644
--- a/media/base/android/java/src/org/chromium/media/CodecProfileLevelList.java
+++ b/media/base/android/java/src/org/chromium/media/CodecProfileLevelList.java
@@ -75,17 +75,17 @@
private static class UnsupportedCodecProfileException extends RuntimeException {}
private static int getCodecFromMime(String mime) {
- if (mime.endsWith("vp9")) return VideoCodec.CODEC_VP9;
- if (mime.endsWith("vp8")) return VideoCodec.CODEC_VP8;
- if (mime.endsWith("avc")) return VideoCodec.CODEC_H264;
- if (mime.endsWith("hevc")) return VideoCodec.CODEC_HEVC;
- if (mime.endsWith("dolby-vision")) return VideoCodec.CODEC_DOLBY_VISION;
+ if (mime.endsWith("vp9")) return VideoCodec.VP9;
+ if (mime.endsWith("vp8")) return VideoCodec.VP8;
+ if (mime.endsWith("avc")) return VideoCodec.H264;
+ if (mime.endsWith("hevc")) return VideoCodec.HEVC;
+ if (mime.endsWith("dolby-vision")) return VideoCodec.DOLBY_VISION;
throw new UnsupportedCodecProfileException();
}
private static int mediaCodecProfileToChromiumMediaProfile(int codec, int profile) {
switch (codec) {
- case VideoCodec.CODEC_H264:
+ case VideoCodec.H264:
switch (profile) {
case CodecProfileLevel.AVCProfileBaseline:
return VideoCodecProfile.H264PROFILE_BASELINE;
@@ -104,14 +104,14 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_VP8:
+ case VideoCodec.VP8:
switch (profile) {
case CodecProfileLevel.VP8ProfileMain:
return VideoCodecProfile.VP8PROFILE_ANY;
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_VP9:
+ case VideoCodec.VP9:
switch (profile) {
case CodecProfileLevel.VP9Profile0:
return VideoCodecProfile.VP9PROFILE_PROFILE0;
@@ -126,7 +126,7 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_HEVC:
+ case VideoCodec.HEVC:
switch (profile) {
case CodecProfileLevel.HEVCProfileMain:
return VideoCodecProfile.HEVCPROFILE_MAIN;
@@ -136,7 +136,7 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_DOLBY_VISION:
+ case VideoCodec.DOLBY_VISION:
switch (profile) {
// Profile 0, 1, 2, 3, 6 are not supported for new applications.
case CodecProfileLevel.DolbyVisionProfileDvheDtr:
@@ -159,7 +159,7 @@
private static int mediaCodecLevelToChromiumMediaLevel(int codec, int level) {
switch (codec) {
- case VideoCodec.CODEC_H264:
+ case VideoCodec.H264:
switch (level) {
case CodecProfileLevel.AVCLevel1:
return 10;
@@ -196,7 +196,7 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_VP8:
+ case VideoCodec.VP8:
switch (level) {
case CodecProfileLevel.VP8Level_Version0:
return 0;
@@ -209,7 +209,7 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_VP9:
+ case VideoCodec.VP9:
switch (level) {
case CodecProfileLevel.VP9Level1:
return 10;
@@ -242,7 +242,7 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_HEVC:
+ case VideoCodec.HEVC:
switch (level) {
case CodecProfileLevel.HEVCHighTierLevel1:
case CodecProfileLevel.HEVCMainTierLevel1:
@@ -286,7 +286,7 @@
default:
throw new UnsupportedCodecProfileException();
}
- case VideoCodec.CODEC_DOLBY_VISION:
+ case VideoCodec.DOLBY_VISION:
switch (level) {
case CodecProfileLevel.DolbyVisionLevelHd24:
return 1;
diff --git a/media/base/android/java/src/org/chromium/media/MediaCodecUtil.java b/media/base/android/java/src/org/chromium/media/MediaCodecUtil.java
index 2b1f745..0c241d25 100644
--- a/media/base/android/java/src/org/chromium/media/MediaCodecUtil.java
+++ b/media/base/android/java/src/org/chromium/media/MediaCodecUtil.java
@@ -278,7 +278,7 @@
if (videoCapabilities.getBitrateRange().contains(bitrate)) {
// Assume all platforms before N only support VP9 profile 0.
profileLevels.addCodecProfileLevel(
- VideoCodec.CODEC_VP9, VideoCodecProfile.VP9PROFILE_PROFILE0, level);
+ VideoCodec.VP9, VideoCodecProfile.VP9PROFILE_PROFILE0, level);
}
}
}
diff --git a/media/base/android/media_codec_bridge_impl.cc b/media/base/android/media_codec_bridge_impl.cc
index 54d9ede..0470083 100644
--- a/media/base/android/media_codec_bridge_impl.cc
+++ b/media/base/android/media_codec_bridge_impl.cc
@@ -73,11 +73,11 @@
const size_t extra_data_size = config.extra_data().size();
*output_frame_has_adts_header = false;
- if (extra_data_size == 0 && config.codec() != kCodecOpus)
+ if (extra_data_size == 0 && config.codec() != AudioCodec::kOpus)
return true;
switch (config.codec()) {
- case kCodecVorbis: {
+ case AudioCodec::kVorbis: {
if (extra_data[0] != 2) {
LOG(ERROR) << "Invalid number of vorbis headers before the codec "
<< "header: " << extra_data[0];
@@ -118,7 +118,7 @@
extra_data + extra_data_size);
break;
}
- case kCodecFLAC: {
+ case AudioCodec::kFLAC: {
// According to MediaCodec spec, CSB buffer #0 for FLAC should be:
// "fLaC", the FLAC stream marker in ASCII, followed by the STREAMINFO
// block (the mandatory metadata block), optionally followed by any number
@@ -131,13 +131,13 @@
extra_data + extra_data_size);
break;
}
- case kCodecAAC: {
+ case AudioCodec::kAAC: {
output_csd0->assign(extra_data, extra_data + extra_data_size);
*output_frame_has_adts_header =
config.profile() != AudioCodecProfile::kXHE_AAC;
break;
}
- case kCodecOpus: {
+ case AudioCodec::kOpus: {
if (!extra_data || extra_data_size == 0 || codec_delay_ns < 0 ||
seek_preroll_ns < 0) {
LOG(ERROR) << "Invalid Opus Header";
diff --git a/media/base/android/media_codec_bridge_impl.h b/media/base/android/media_codec_bridge_impl.h
index 5b306ba..31306f3 100644
--- a/media/base/android/media_codec_bridge_impl.h
+++ b/media/base/android/media_codec_bridge_impl.h
@@ -32,7 +32,7 @@
VideoCodecConfig();
~VideoCodecConfig();
- VideoCodec codec = kUnknownVideoCodec;
+ VideoCodec codec = VideoCodec::kUnknown;
CodecType codec_type = CodecType::kAny;
@@ -78,7 +78,7 @@
// Creates and starts a new MediaCodec configured for encoding. Returns
// nullptr on failure.
static std::unique_ptr<MediaCodecBridge> CreateVideoEncoder(
- VideoCodec codec, // e.g. media::kCodecVP8
+ VideoCodec codec, // e.g. media::VideoCodec::kVP8
const gfx::Size& size, // input frame size
int bit_rate, // bits/second
int frame_rate, // frames/second
diff --git a/media/base/android/media_codec_bridge_impl_unittest.cc b/media/base/android/media_codec_bridge_impl_unittest.cc
index 60328e5..7814f4f 100644
--- a/media/base/android/media_codec_bridge_impl_unittest.cc
+++ b/media/base/android/media_codec_bridge_impl_unittest.cc
@@ -290,7 +290,7 @@
SKIP_TEST_IF_MEDIA_CODEC_IS_NOT_AVAILABLE();
VideoCodecConfig config;
- config.codec = kCodecH264;
+ config.codec = VideoCodec::kH264;
config.codec_type = CodecType::kAny;
config.initial_expected_coded_size = gfx::Size(640, 480);
@@ -301,7 +301,7 @@
SKIP_TEST_IF_MEDIA_CODEC_IS_NOT_AVAILABLE();
std::unique_ptr<media::MediaCodecBridge> media_codec =
- MediaCodecBridgeImpl::CreateAudioDecoder(NewAudioConfig(kCodecMP3),
+ MediaCodecBridgeImpl::CreateAudioDecoder(NewAudioConfig(AudioCodec::kMP3),
nullptr);
ASSERT_THAT(media_codec, NotNull());
@@ -360,9 +360,10 @@
// The first byte of the header is not 0x02.
std::vector<uint8_t> invalid_first_byte = {{0x00, 0xff, 0xff, 0xff, 0xff}};
- ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecVorbis, invalid_first_byte), nullptr),
- IsNull());
+ ASSERT_THAT(
+ MediaCodecBridgeImpl::CreateAudioDecoder(
+ NewAudioConfig(AudioCodec::kVorbis, invalid_first_byte), nullptr),
+ IsNull());
// Size of the header is too large.
size_t large_size = 8 * 1024 * 1024 + 2;
@@ -370,7 +371,7 @@
large_header.front() = 0x02;
large_header.back() = 0xfe;
ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecVorbis, large_header), nullptr),
+ NewAudioConfig(AudioCodec::kVorbis, large_header), nullptr),
IsNull());
}
@@ -380,15 +381,15 @@
std::vector<uint8_t> dummy_extra_data = {{0, 0}};
// Codec Delay is < 0.
- ASSERT_THAT(
- MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecOpus, dummy_extra_data, base::TimeDelta(), -1),
- nullptr),
- IsNull());
+ ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
+ NewAudioConfig(AudioCodec::kOpus, dummy_extra_data,
+ base::TimeDelta(), -1),
+ nullptr),
+ IsNull());
// Seek Preroll is < 0.
ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecOpus, dummy_extra_data,
+ NewAudioConfig(AudioCodec::kOpus, dummy_extra_data,
base::TimeDelta::FromMicroseconds(-1)),
nullptr),
IsNull());
@@ -401,7 +402,7 @@
}
VideoCodecConfig config;
- config.codec = kCodecVP8;
+ config.codec = VideoCodec::kVP8;
config.codec_type = CodecType::kAny;
config.initial_expected_coded_size = gfx::Size(320, 240);
@@ -430,11 +431,11 @@
TEST(MediaCodecBridgeTest, CreateUnsupportedCodec) {
EXPECT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kUnknownAudioCodec), nullptr),
+ NewAudioConfig(AudioCodec::kUnknown), nullptr),
IsNull());
VideoCodecConfig config;
- config.codec = kUnknownVideoCodec;
+ config.codec = VideoCodec::kUnknown;
config.codec_type = CodecType::kAny;
config.initial_expected_coded_size = gfx::Size(320, 240);
EXPECT_THAT(MediaCodecBridgeImpl::CreateVideoDecoder(config), IsNull());
@@ -465,7 +466,7 @@
std::unique_ptr<MediaCodecBridge> media_codec(
MediaCodecBridgeImpl::CreateVideoEncoder(
- kCodecH264, gfx::Size(width, height), bit_rate, frame_rate,
+ VideoCodec::kH264, gfx::Size(width, height), bit_rate, frame_rate,
i_frame_interval, color_format));
ASSERT_THAT(media_codec, NotNull());
diff --git a/media/base/android/media_codec_util.cc b/media/base/android/media_codec_util.cc
index 84e4c1b..d9bbb10 100644
--- a/media/base/android/media_codec_util.cc
+++ b/media/base/android/media_codec_util.cc
@@ -127,19 +127,19 @@
return kBitstreamAudioMimeType;
switch (codec) {
- case kCodecMP3:
+ case AudioCodec::kMP3:
return kMp3MimeType;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return kVorbisMimeType;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return kFLACMimeType;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return kOpusMimeType;
- case kCodecAAC:
+ case AudioCodec::kAAC:
return kAacMimeType;
- case kCodecAC3:
+ case AudioCodec::kAC3:
return kAc3MimeType;
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return kEac3MimeType;
default:
return std::string();
@@ -149,17 +149,17 @@
// static
std::string MediaCodecUtil::CodecToAndroidMimeType(VideoCodec codec) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return kAvcMimeType;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return kHevcMimeType;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return kVp8MimeType;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return kVp9MimeType;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return kDolbyVisionMimeType;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return kAv1MimeType;
default:
return std::string();
@@ -309,7 +309,7 @@
// static
bool MediaCodecUtil::IsPassthroughAudioFormat(AudioCodec codec) {
- return codec == kCodecAC3 || codec == kCodecEAC3;
+ return codec == AudioCodec::kAC3 || codec == AudioCodec::kEAC3;
}
// static
@@ -376,7 +376,7 @@
// MediaTek hardware vp8 is known slower than the software implementation.
if (base::StartsWith(codec_name, "OMX.MTK.", base::CompareCase::SENSITIVE)) {
- if (codec == kCodecVP8) {
+ if (codec == VideoCodec::kVP8) {
// We may still reject VP8 hardware decoding later on certain chipsets,
// see isDecoderSupportedForDevice(). We don't have the the chipset ID
// here to check now though.
diff --git a/media/base/audio_codecs.cc b/media/base/audio_codecs.cc
index 06a9e73c..9010981 100644
--- a/media/base/audio_codecs.cc
+++ b/media/base/audio_codecs.cc
@@ -4,6 +4,8 @@
#include "media/base/audio_codecs.h"
+#include <ostream>
+
#include "base/strings/string_util.h"
namespace media {
@@ -11,39 +13,39 @@
// These names come from src/third_party/ffmpeg/libavcodec/codec_desc.c
std::string GetCodecName(AudioCodec codec) {
switch (codec) {
- case kUnknownAudioCodec:
+ case AudioCodec::kUnknown:
return "unknown";
- case kCodecAAC:
+ case AudioCodec::kAAC:
return "aac";
- case kCodecMP3:
+ case AudioCodec::kMP3:
return "mp3";
- case kCodecPCM:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
return "pcm";
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return "vorbis";
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return "flac";
- case kCodecAMR_NB:
+ case AudioCodec::kAMR_NB:
return "amr_nb";
- case kCodecAMR_WB:
+ case AudioCodec::kAMR_WB:
return "amr_wb";
- case kCodecPCM_MULAW:
+ case AudioCodec::kPCM_MULAW:
return "pcm_mulaw";
- case kCodecGSM_MS:
+ case AudioCodec::kGSM_MS:
return "gsm_ms";
- case kCodecOpus:
+ case AudioCodec::kOpus:
return "opus";
- case kCodecPCM_ALAW:
+ case AudioCodec::kPCM_ALAW:
return "pcm_alaw";
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return "eac3";
- case kCodecALAC:
+ case AudioCodec::kALAC:
return "alac";
- case kCodecAC3:
+ case AudioCodec::kAC3:
return "ac3";
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return "mpeg-h-audio";
}
}
@@ -59,28 +61,32 @@
AudioCodec StringToAudioCodec(const std::string& codec_id) {
if (codec_id == "aac")
- return kCodecAAC;
+ return AudioCodec::kAAC;
if (codec_id == "ac-3" || codec_id == "mp4a.A5" || codec_id == "mp4a.a5")
- return kCodecAC3;
+ return AudioCodec::kAC3;
if (codec_id == "ec-3" || codec_id == "mp4a.A6" || codec_id == "mp4a.a6")
- return kCodecEAC3;
+ return AudioCodec::kEAC3;
if (codec_id == "mp3" || codec_id == "mp4a.69" || codec_id == "mp4a.6B")
- return kCodecMP3;
+ return AudioCodec::kMP3;
if (codec_id == "alac")
- return kCodecALAC;
+ return AudioCodec::kALAC;
if (codec_id == "flac")
- return kCodecFLAC;
+ return AudioCodec::kFLAC;
if (base::StartsWith(codec_id, "mhm1.", base::CompareCase::SENSITIVE) ||
base::StartsWith(codec_id, "mha1.", base::CompareCase::SENSITIVE)) {
- return kCodecMpegHAudio;
+ return AudioCodec::kMpegHAudio;
}
if (codec_id == "opus")
- return kCodecOpus;
+ return AudioCodec::kOpus;
if (codec_id == "vorbis")
- return kCodecVorbis;
+ return AudioCodec::kVorbis;
if (base::StartsWith(codec_id, "mp4a.40.", base::CompareCase::SENSITIVE))
- return kCodecAAC;
- return kUnknownAudioCodec;
+ return AudioCodec::kAAC;
+ return AudioCodec::kUnknown;
+}
+
+std::ostream& operator<<(std::ostream& os, const AudioCodec& codec) {
+ return os << GetCodecName(codec);
}
} // namespace media
diff --git a/media/base/audio_codecs.h b/media/base/audio_codecs.h
index 5eb5ddc..f679513 100644
--- a/media/base/audio_codecs.h
+++ b/media/base/audio_codecs.h
@@ -10,36 +10,36 @@
namespace media {
-enum AudioCodec {
+enum class AudioCodec {
// These values are histogrammed over time; do not change their ordinal
// values. When deleting a codec replace it with a dummy value; when adding a
- // codec, do so at the bottom before kAudioCodecMax, and update the value of
- // kAudioCodecMax to equal the new codec.
- kUnknownAudioCodec = 0,
- kCodecAAC = 1,
- kCodecMP3 = 2,
- kCodecPCM = 3,
- kCodecVorbis = 4,
- kCodecFLAC = 5,
- kCodecAMR_NB = 6,
- kCodecAMR_WB = 7,
- kCodecPCM_MULAW = 8,
- kCodecGSM_MS = 9,
- kCodecPCM_S16BE = 10,
- kCodecPCM_S24BE = 11,
- kCodecOpus = 12,
- kCodecEAC3 = 13,
- kCodecPCM_ALAW = 14,
- kCodecALAC = 15,
- kCodecAC3 = 16,
- kCodecMpegHAudio = 17,
+ // codec, do so at the bottom before kMaxValue, and update the value of
+ // kMaxValue to equal the new codec.
+ kUnknown = 0,
+ kAAC = 1,
+ kMP3 = 2,
+ kPCM = 3,
+ kVorbis = 4,
+ kFLAC = 5,
+ kAMR_NB = 6,
+ kAMR_WB = 7,
+ kPCM_MULAW = 8,
+ kGSM_MS = 9,
+ kPCM_S16BE = 10,
+ kPCM_S24BE = 11,
+ kOpus = 12,
+ kEAC3 = 13,
+ kPCM_ALAW = 14,
+ kALAC = 15,
+ kAC3 = 16,
+ kMpegHAudio = 17,
// DO NOT ADD RANDOM AUDIO CODECS!
//
// The only acceptable time to add a new codec is if there is production code
// that uses said codec in the same CL.
// Must always be equal to the largest entry ever logged.
- kAudioCodecMax = kCodecMpegHAudio,
+ kMaxValue = kMpegHAudio,
};
enum class AudioCodecProfile {
@@ -55,6 +55,8 @@
std::string MEDIA_EXPORT GetCodecName(AudioCodec codec);
std::string MEDIA_EXPORT GetProfileName(AudioCodecProfile profile);
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
+ const AudioCodec& codec);
MEDIA_EXPORT AudioCodec StringToAudioCodec(const std::string& codec_id);
} // namespace media
diff --git a/media/base/audio_decoder_config.cc b/media/base/audio_decoder_config.cc
index 1fcbb41..9aa7d036 100644
--- a/media/base/audio_decoder_config.cc
+++ b/media/base/audio_decoder_config.cc
@@ -54,7 +54,7 @@
AudioDecoderConfig::~AudioDecoderConfig() = default;
bool AudioDecoderConfig::IsValidConfig() const {
- return codec_ != kUnknownAudioCodec &&
+ return codec_ != AudioCodec::kUnknown &&
channel_layout_ != CHANNEL_LAYOUT_UNSUPPORTED &&
bytes_per_channel_ > 0 &&
bytes_per_channel_ <= limits::kMaxBytesPerSample &&
diff --git a/media/base/audio_decoder_config.h b/media/base/audio_decoder_config.h
index 1ec4ed0..4032049d 100644
--- a/media/base/audio_decoder_config.h
+++ b/media/base/audio_decoder_config.h
@@ -126,7 +126,7 @@
// Mandatory parameters passed in constructor:
- AudioCodec codec_ = kUnknownAudioCodec;
+ AudioCodec codec_ = AudioCodec::kUnknown;
SampleFormat sample_format_ = kUnknownSampleFormat;
ChannelLayout channel_layout_ = CHANNEL_LAYOUT_UNSUPPORTED;
int samples_per_second_ = 0;
diff --git a/media/base/demuxer_memory_limit_cast.cc b/media/base/demuxer_memory_limit_cast.cc
index a4c2775..3020f48 100644
--- a/media/base/demuxer_memory_limit_cast.cc
+++ b/media/base/demuxer_memory_limit_cast.cc
@@ -17,10 +17,10 @@
DCHECK(audio_config->IsValidConfig());
switch (audio_config->codec()) {
- case kCodecEAC3:
- case kCodecAC3:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kAC3:
return internal::kDemuxerStreamAudioMemoryLimitMedium;
- case kCodecAAC:
+ case AudioCodec::kAAC:
if (ChannelLayoutToChannelCount(audio_config->channel_layout()) >= 5) {
return internal::kDemuxerStreamAudioMemoryLimitMedium;
}
@@ -42,9 +42,9 @@
}
DCHECK(video_config->IsValidConfig());
switch (video_config->codec()) {
- case kCodecVP9:
- case kCodecHEVC:
- case kCodecDolbyVision:
+ case VideoCodec::kVP9:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kDolbyVision:
return internal::kDemuxerStreamVideoMemoryLimitMedium;
default:
return internal::kDemuxerStreamVideoMemoryLimitLow;
diff --git a/media/base/demuxer_memory_limit_cast_unittest.cc b/media/base/demuxer_memory_limit_cast_unittest.cc
index 4dbe6d43..2f150da 100644
--- a/media/base/demuxer_memory_limit_cast_unittest.cc
+++ b/media/base/demuxer_memory_limit_cast_unittest.cc
@@ -21,28 +21,28 @@
internal::kDemuxerStreamAudioMemoryLimitLow);
AudioDecoderConfig audio_config_opus(
- AudioCodec::kCodecOpus, SampleFormat::kSampleFormatS16,
+ AudioCodec::kOpus, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_opus),
internal::kDemuxerStreamAudioMemoryLimitLow);
AudioDecoderConfig audio_config_ac3(
- AudioCodec::kCodecAC3, SampleFormat::kSampleFormatS16,
+ AudioCodec::kAC3, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_ac3),
internal::kDemuxerStreamAudioMemoryLimitMedium);
AudioDecoderConfig audio_config_aac_1(
- AudioCodec::kCodecAAC, SampleFormat::kSampleFormatS16,
+ AudioCodec::kAAC, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_5_0, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_aac_1),
internal::kDemuxerStreamAudioMemoryLimitMedium);
AudioDecoderConfig audio_config_aac_2(
- AudioCodec::kCodecAAC, SampleFormat::kSampleFormatS16,
+ AudioCodec::kAAC, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_aac_2),
@@ -61,7 +61,7 @@
internal::kDemuxerStreamVideoMemoryLimitLow);
VideoDecoderConfig video_config(
- kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, kCodedSize, kVisibleRect, kNaturalSize,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
@@ -75,7 +75,7 @@
Demuxer::DemuxerTypes::kMediaUrlDemuxer, &video_config),
internal::kDemuxerStreamVideoMemoryLimitLow);
- video_config.Initialize(kCodecVP9, VIDEO_CODEC_PROFILE_UNKNOWN,
+ video_config.Initialize(VideoCodec::kVP9, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize,
kVisibleRect, kNaturalSize, EmptyExtraData(),
diff --git a/media/base/fake_demuxer_stream.cc b/media/base/fake_demuxer_stream.cc
index 76865ea..7a9fadd 100644
--- a/media/base/fake_demuxer_stream.cc
+++ b/media/base/fake_demuxer_stream.cc
@@ -172,7 +172,7 @@
void FakeDemuxerStream::UpdateVideoDecoderConfig() {
const gfx::Rect kVisibleRect(next_size_.width(), next_size_.height());
video_decoder_config_.Initialize(
- kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, next_size_, kVisibleRect, next_size_, EmptyExtraData(),
is_encrypted_ ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted);
diff --git a/media/base/ipc/media_param_traits_macros.h b/media/base/ipc/media_param_traits_macros.h
index 0d06c00..0343a90 100644
--- a/media/base/ipc/media_param_traits_macros.h
+++ b/media/base/ipc/media_param_traits_macros.h
@@ -53,7 +53,7 @@
IPC_ENUM_TRAITS_MAX_VALUE(blink::WebFullscreenVideoStatus,
blink::WebFullscreenVideoStatus::kMaxValue)
-IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodec, media::AudioCodec::kAudioCodecMax)
+IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodec, media::AudioCodec::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodecProfile,
media::AudioCodecProfile::kMaxValue)
@@ -117,7 +117,7 @@
IPC_ENUM_TRAITS_MAX_VALUE(media::SampleFormat, media::kSampleFormatMax)
-IPC_ENUM_TRAITS_MAX_VALUE(media::VideoCodec, media::kVideoCodecMax)
+IPC_ENUM_TRAITS_MAX_VALUE(media::VideoCodec, media::VideoCodec::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::WaitingReason, media::WaitingReason::kMaxValue)
diff --git a/media/base/key_systems.cc b/media/base/key_systems.cc
index 9bfc335f..9205d7a 100644
--- a/media/base/key_systems.cc
+++ b/media/base/key_systems.cc
@@ -63,19 +63,19 @@
EmeCodec ToAudioEmeCodec(AudioCodec codec) {
switch (codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
return EME_CODEC_AAC;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return EME_CODEC_VORBIS;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return EME_CODEC_FLAC;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return EME_CODEC_OPUS;
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return EME_CODEC_EAC3;
- case kCodecAC3:
+ case AudioCodec::kAC3:
return EME_CODEC_AC3;
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return EME_CODEC_MPEG_H_AUDIO;
default:
DVLOG(1) << "Unsupported AudioCodec " << codec;
@@ -85,11 +85,11 @@
EmeCodec ToVideoEmeCodec(VideoCodec codec, VideoCodecProfile profile) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return EME_CODEC_AVC1;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return EME_CODEC_VP8;
- case kCodecVP9:
+ case VideoCodec::kVP9:
// ParseVideoCodecString() returns VIDEO_CODEC_PROFILE_UNKNOWN for "vp9"
// and "vp9.0". Since these codecs are essentially the same as profile 0,
// return EME_CODEC_VP9_PROFILE0.
@@ -102,14 +102,14 @@
// Profile 1 and 3 not supported by EME. See https://crbug.com/898298.
return EME_CODEC_NONE;
}
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
// Only handle Main and Main10 profiles for HEVC.
if (profile == HEVCPROFILE_MAIN)
return EME_CODEC_HEVC_PROFILE_MAIN;
if (profile == HEVCPROFILE_MAIN10)
return EME_CODEC_HEVC_PROFILE_MAIN10;
return EME_CODEC_NONE;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
// Only profiles 0, 4, 5, 7, 8, 9 are valid. Profile 0 and 9 are encoded
// based on AVC while profile 4, 5, 7 and 8 are based on HEVC.
if (profile == DOLBYVISION_PROFILE0 || profile == DOLBYVISION_PROFILE9) {
@@ -122,7 +122,7 @@
} else {
return EME_CODEC_NONE;
}
- case kCodecAV1:
+ case VideoCodec::kAV1:
return EME_CODEC_AV1;
default:
DVLOG(1) << "Unsupported VideoCodec " << codec;
@@ -381,7 +381,7 @@
return iter->second;
if (media_type == EmeMediaType::AUDIO) {
- AudioCodec audio_codec = kUnknownAudioCodec;
+ AudioCodec audio_codec = AudioCodec::kUnknown;
ParseAudioCodecString(container_mime_type, codec_string, &is_ambiguous,
&audio_codec);
DVLOG(3) << "Audio codec = " << audio_codec;
@@ -396,7 +396,7 @@
// exceptions where we need to know the profile. For example, for VP9, there
// are older CDMs only supporting profile 0, hence EmeCodec differentiate
// between VP9 profile 0 and higher profiles.
- VideoCodec video_codec = kUnknownVideoCodec;
+ VideoCodec video_codec = VideoCodec::kUnknown;
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
uint8_t level = 0;
VideoColorSpace color_space;
diff --git a/media/base/media_types.cc b/media/base/media_types.cc
index ceaddc7f..a33fb235 100644
--- a/media/base/media_types.cc
+++ b/media/base/media_types.cc
@@ -28,26 +28,26 @@
switch (config.codec()) {
// These have no notion of level.
- case kUnknownVideoCodec:
- case kCodecTheora:
- case kCodecVP8:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kTheora:
+ case VideoCodec::kVP8:
// These use non-numeric levels, aren't part of our mime code, and
// are ancient with very limited support.
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecMPEG4:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kMPEG4:
break;
- case kCodecH264:
- case kCodecVP9:
- case kCodecHEVC:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP9:
+ case VideoCodec::kHEVC:
// 10 is the level_idc for level 1.0.
level = 10;
break;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
// Dolby doesn't do decimals, so 1 is just 1.
level = 1;
break;
- case kCodecAV1:
+ case VideoCodec::kAV1:
// Strangely, AV1 starts at 2.0.
level = 20;
break;
diff --git a/media/base/mime_util_internal.cc b/media/base/mime_util_internal.cc
index ccfe8e9..2a0ee05 100644
--- a/media/base/mime_util_internal.cc
+++ b/media/base/mime_util_internal.cc
@@ -168,51 +168,51 @@
AudioCodec MimeUtilToAudioCodec(MimeUtil::Codec codec) {
switch (codec) {
case MimeUtil::PCM:
- return kCodecPCM;
+ return AudioCodec::kPCM;
case MimeUtil::MP3:
- return kCodecMP3;
+ return AudioCodec::kMP3;
case MimeUtil::AC3:
- return kCodecAC3;
+ return AudioCodec::kAC3;
case MimeUtil::EAC3:
- return kCodecEAC3;
+ return AudioCodec::kEAC3;
case MimeUtil::MPEG2_AAC:
case MimeUtil::MPEG4_AAC:
case MimeUtil::MPEG4_XHE_AAC:
- return kCodecAAC;
+ return AudioCodec::kAAC;
case MimeUtil::MPEG_H_AUDIO:
- return kCodecMpegHAudio;
+ return AudioCodec::kMpegHAudio;
case MimeUtil::VORBIS:
- return kCodecVorbis;
+ return AudioCodec::kVorbis;
case MimeUtil::OPUS:
- return kCodecOpus;
+ return AudioCodec::kOpus;
case MimeUtil::FLAC:
- return kCodecFLAC;
+ return AudioCodec::kFLAC;
default:
break;
}
- return kUnknownAudioCodec;
+ return AudioCodec::kUnknown;
}
VideoCodec MimeUtilToVideoCodec(MimeUtil::Codec codec) {
switch (codec) {
case MimeUtil::AV1:
- return kCodecAV1;
+ return VideoCodec::kAV1;
case MimeUtil::H264:
- return kCodecH264;
+ return VideoCodec::kH264;
case MimeUtil::HEVC:
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
case MimeUtil::VP8:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case MimeUtil::VP9:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case MimeUtil::THEORA:
- return kCodecTheora;
+ return VideoCodec::kTheora;
case MimeUtil::DOLBY_VISION:
- return kCodecDolbyVision;
+ return VideoCodec::kDolbyVision;
default:
break;
}
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
}
SupportsType MimeUtil::AreSupportedCodecs(
@@ -470,7 +470,7 @@
*out_level = parsed_results[0].video_level;
*out_color_space = parsed_results[0].video_color_space;
- if (*out_codec == kUnknownVideoCodec) {
+ if (*out_codec == VideoCodec::kUnknown) {
DVLOG(3) << __func__ << " Codec string " << codec_id
<< " is not a VIDEO codec.";
return false;
@@ -504,7 +504,7 @@
*out_is_ambiguous = parsed_results[0].is_ambiguous;
*out_codec = MimeUtilToAudioCodec(parsed_results[0].codec);
- if (*out_codec == kUnknownAudioCodec) {
+ if (*out_codec == AudioCodec::kUnknown) {
DVLOG(3) << __func__ << " Codec string " << codec_id
<< " is not an AUDIO codec.";
return false;
@@ -774,7 +774,7 @@
out_result->codec = itr->second;
// Even "simple" video codecs should have an associated profile.
- if (MimeUtilToVideoCodec(out_result->codec) != kUnknownVideoCodec) {
+ if (MimeUtilToVideoCodec(out_result->codec) != VideoCodec::kUnknown) {
switch (out_result->codec) {
case Codec::VP8:
out_result->video_profile = VP8PROFILE_ANY;
@@ -875,12 +875,12 @@
DCHECK_NE(codec, INVALID_CODEC);
VideoCodec video_codec = MimeUtilToVideoCodec(codec);
- if (video_codec != kUnknownVideoCodec &&
+ if (video_codec != VideoCodec::kUnknown &&
// Theora and VP8 do not have profiles/levels.
- video_codec != kCodecTheora && video_codec != kCodecVP8 &&
+ video_codec != VideoCodec::kTheora && video_codec != VideoCodec::kVP8 &&
// TODO(dalecurtis): AV1 has levels, but they aren't supported yet;
// http://crbug.com/784993
- video_codec != kCodecAV1) {
+ video_codec != VideoCodec::kAV1) {
DCHECK_NE(video_profile, VIDEO_CODEC_PROFILE_UNKNOWN);
DCHECK_GT(video_level, 0);
}
@@ -911,7 +911,7 @@
}
AudioCodec audio_codec = MimeUtilToAudioCodec(codec);
- if (audio_codec != kUnknownAudioCodec) {
+ if (audio_codec != AudioCodec::kUnknown) {
AudioCodecProfile audio_profile = AudioCodecProfile::kUnknown;
if (codec == MPEG4_XHE_AAC)
audio_profile = AudioCodecProfile::kXHE_AAC;
@@ -920,7 +920,7 @@
return IsNotSupported;
}
- if (video_codec != kUnknownVideoCodec) {
+ if (video_codec != VideoCodec::kUnknown) {
if (!IsSupportedVideoType(
{video_codec, video_profile, video_level, color_space})) {
return IsNotSupported;
diff --git a/media/base/mime_util_unittest.cc b/media/base/mime_util_unittest.cc
index 5826cdf..4dcafd0 100644
--- a/media/base/mime_util_unittest.cc
+++ b/media/base/mime_util_unittest.cc
@@ -247,7 +247,7 @@
&out_colorspace));
if (kUsePropCodecs) {
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(H264PROFILE_BASELINE, out_profile);
EXPECT_EQ(30, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -258,7 +258,7 @@
&out_is_ambiguous, &out_codec, &out_profile,
&out_level, &out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE0, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -268,7 +268,7 @@
&out_is_ambiguous, &out_codec, &out_profile,
&out_level, &out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE2, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC601(), out_colorspace);
@@ -280,7 +280,7 @@
&out_profile, &out_level, &out_colorspace));
if (kUsePropCodecs) {
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(VIDEO_CODEC_PROFILE_UNKNOWN, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -316,7 +316,7 @@
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(H264PROFILE_BASELINE, out_profile);
EXPECT_EQ(30, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -326,7 +326,7 @@
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE0, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -335,7 +335,7 @@
&out_is_ambiguous, &out_codec, &out_profile,
&out_level, &out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE2, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC601(), out_colorspace);
@@ -344,7 +344,7 @@
EXPECT_TRUE(ParseVideoCodecString("", "avc3", &out_is_ambiguous, &out_codec,
&out_profile, &out_level, &out_colorspace));
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(VIDEO_CODEC_PROFILE_UNKNOWN, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -368,7 +368,7 @@
EXPECT_TRUE(ParseAudioCodecString("audio/webm", "opus", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecOpus, out_codec);
+ EXPECT_EQ(AudioCodec::kOpus, out_codec);
// Valid AAC string when proprietary codecs are supported.
EXPECT_EQ(kUsePropCodecs,
@@ -376,14 +376,14 @@
&out_codec));
if (kUsePropCodecs) {
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
}
// Valid FLAC string with MP4. Neither decoding nor demuxing is proprietary.
EXPECT_TRUE(ParseAudioCodecString("audio/mp4", "flac", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
+ EXPECT_EQ(AudioCodec::kFLAC, out_codec);
// Ambiguous AAC string.
// TODO(chcunningha): This can probably be allowed. I think we treat all
@@ -393,20 +393,20 @@
&out_codec));
if (kUsePropCodecs) {
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
}
// Valid empty codec string. Codec unambiguously implied by mime type.
EXPECT_TRUE(
ParseAudioCodecString("audio/flac", "", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
+ EXPECT_EQ(AudioCodec::kFLAC, out_codec);
// Valid audio codec should still be allowed with video mime type.
EXPECT_TRUE(ParseAudioCodecString("video/webm", "opus", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecOpus, out_codec);
+ EXPECT_EQ(AudioCodec::kOpus, out_codec);
// Video codec is not valid for audio API.
EXPECT_FALSE(ParseAudioCodecString("audio/webm", "vp09.00.10.08",
@@ -427,18 +427,18 @@
// Valid Opus string.
EXPECT_TRUE(ParseAudioCodecString("", "opus", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecOpus, out_codec);
+ EXPECT_EQ(AudioCodec::kOpus, out_codec);
// Valid AAC string when proprietary codecs are supported.
EXPECT_TRUE(
ParseAudioCodecString("", "mp4a.40.2", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
// Valid FLAC string. Neither decoding nor demuxing is proprietary.
EXPECT_TRUE(ParseAudioCodecString("", "flac", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
+ EXPECT_EQ(AudioCodec::kFLAC, out_codec);
// Ambiguous AAC string.
// TODO(chcunningha): This can probably be allowed. I think we treat all
@@ -447,7 +447,7 @@
ParseAudioCodecString("", "mp4a.40", &out_is_ambiguous, &out_codec));
if (kUsePropCodecs) {
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
}
// Video codec is not valid for audio API.
@@ -468,26 +468,26 @@
EXPECT_TRUE(ParseAudioCodecString("audio/mpeg", "mp3", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(
ParseAudioCodecString("audio/mpeg", "", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(ParseAudioCodecString("", "mp3", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(
ParseAudioCodecString("", "mp4a.69", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(
ParseAudioCodecString("", "mp4a.6B", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
}
// These codecs really only have one profile. Ensure that |out_profile| is
@@ -504,7 +504,7 @@
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP8, out_codec);
+ EXPECT_EQ(VideoCodec::kVP8, out_codec);
EXPECT_EQ(VP8PROFILE_ANY, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -520,7 +520,7 @@
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecTheora, out_codec);
+ EXPECT_EQ(VideoCodec::kTheora, out_codec);
EXPECT_EQ(THEORAPROFILE_ANY, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
diff --git a/media/base/supported_types.cc b/media/base/supported_types.cc
index 0756462..9c4e2ea 100644
--- a/media/base/supported_types.cc
+++ b/media/base/supported_types.cc
@@ -206,26 +206,26 @@
bool IsAudioCodecProprietary(AudioCodec codec) {
switch (codec) {
- case kCodecAAC:
- case kCodecAC3:
- case kCodecEAC3:
- case kCodecAMR_NB:
- case kCodecAMR_WB:
- case kCodecGSM_MS:
- case kCodecALAC:
- case kCodecMpegHAudio:
+ case AudioCodec::kAAC:
+ case AudioCodec::kAC3:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kGSM_MS:
+ case AudioCodec::kALAC:
+ case AudioCodec::kMpegHAudio:
return true;
- case kCodecFLAC:
- case kCodecMP3:
- case kCodecOpus:
- case kCodecVorbis:
- case kCodecPCM:
- case kCodecPCM_MULAW:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
- case kCodecPCM_ALAW:
- case kUnknownAudioCodec:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kMP3:
+ case AudioCodec::kOpus:
+ case AudioCodec::kVorbis:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kUnknown:
return false;
}
@@ -243,7 +243,7 @@
#endif
switch (type.codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
if (type.profile != AudioCodecProfile::kXHE_AAC)
return true;
#if defined(OS_ANDROID)
@@ -253,31 +253,31 @@
return false;
#endif
- case kCodecFLAC:
- case kCodecMP3:
- case kCodecOpus:
- case kCodecPCM:
- case kCodecPCM_MULAW:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
- case kCodecPCM_ALAW:
- case kCodecVorbis:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kMP3:
+ case AudioCodec::kOpus:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kVorbis:
return true;
- case kCodecAMR_NB:
- case kCodecAMR_WB:
- case kCodecGSM_MS:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kGSM_MS:
#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#else
return false;
#endif
- case kCodecEAC3:
- case kCodecALAC:
- case kCodecAC3:
- case kCodecMpegHAudio:
- case kUnknownAudioCodec:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kALAC:
+ case AudioCodec::kAC3:
+ case AudioCodec::kMpegHAudio:
+ case AudioCodec::kUnknown:
return false;
}
@@ -287,18 +287,18 @@
bool IsVideoCodecProprietary(VideoCodec codec) {
switch (codec) {
- case kCodecVC1:
- case kCodecH264:
- case kCodecMPEG2:
- case kCodecMPEG4:
- case kCodecHEVC:
- case kCodecDolbyVision:
+ case VideoCodec::kVC1:
+ case VideoCodec::kH264:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kMPEG4:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kDolbyVision:
return true;
- case kUnknownVideoCodec:
- case kCodecTheora:
- case kCodecVP8:
- case kCodecVP9:
- case kCodecAV1:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kTheora:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
+ case VideoCodec::kAV1:
return false;
}
@@ -318,7 +318,7 @@
#endif
switch (type.codec) {
- case kCodecAV1:
+ case VideoCodec::kAV1:
// If the AV1 decoder is enabled, or if we're on Q or later, yes.
#if BUILDFLAG(ENABLE_AV1_DECODER)
return IsColorSpaceSupported(type.color_space);
@@ -333,29 +333,29 @@
return false;
#endif
- case kCodecVP9:
+ case VideoCodec::kVP9:
// Color management required for HDR to not look terrible.
return IsColorSpaceSupported(type.color_space) &&
IsVp9ProfileSupported(type.profile);
- case kCodecH264:
- case kCodecVP8:
- case kCodecTheora:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP8:
+ case VideoCodec::kTheora:
return true;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
#if BUILDFLAG(ENABLE_PLATFORM_ENCRYPTED_HEVC)
return IsColorSpaceSupported(type.color_space) &&
IsHevcProfileSupported(type.profile);
#else
return false;
#endif // BUILDFLAG(ENABLE_PLATFORM_ENCRYPTED_HEVC)
- case kUnknownVideoCodec:
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecDolbyVision:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kDolbyVision:
return false;
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#else
diff --git a/media/base/supported_types_unittest.cc b/media/base/supported_types_unittest.cc
index eddc5b3..93f3c2a 100644
--- a/media/base/supported_types_unittest.cc
+++ b/media/base/supported_types_unittest.cc
@@ -35,30 +35,35 @@
// Expect support for baseline configuration of known codecs.
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, kColorSpace}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
// Expect non-support for the following.
EXPECT_FALSE(
- IsSupportedVideoType({kUnknownVideoCodec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ IsSupportedVideoType({VideoCodec::kUnknown, VIDEO_CODEC_PROFILE_UNKNOWN,
kUnspecifiedLevel, kColorSpace}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecVC1, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecMPEG2, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecHEVC, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
+ EXPECT_FALSE(
+ IsSupportedVideoType({VideoCodec::kVC1, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
+ EXPECT_FALSE(
+ IsSupportedVideoType({VideoCodec::kMPEG2, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
+ EXPECT_FALSE(
+ IsSupportedVideoType({VideoCodec::kHEVC, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
// Expect conditional support for the following.
+ EXPECT_EQ(kPropCodecsEnabled,
+ IsSupportedVideoType(
+ {VideoCodec::kH264, H264PROFILE_BASELINE, 1, kColorSpace}));
EXPECT_EQ(
- kPropCodecsEnabled,
- IsSupportedVideoType({kCodecH264, H264PROFILE_BASELINE, 1, kColorSpace}));
- EXPECT_EQ(kMpeg4Supported,
- IsSupportedVideoType({kCodecMPEG4, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
+ kMpeg4Supported,
+ IsSupportedVideoType({VideoCodec::kMPEG4, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
}
TEST(SupportedTypesTest, IsSupportedVideoType_VP9TransferFunctions) {
@@ -91,8 +96,9 @@
kSupportedTransfers.end();
if (found)
num_found++;
- EXPECT_EQ(found, IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, 1, color_space}));
+ EXPECT_EQ(found,
+ IsSupportedVideoType(
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, 1, color_space}));
}
EXPECT_EQ(kSupportedTransfers.size(), num_found);
}
@@ -121,8 +127,9 @@
kSupportedPrimaries.end();
if (found)
num_found++;
- EXPECT_EQ(found, IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, 1, color_space}));
+ EXPECT_EQ(found,
+ IsSupportedVideoType(
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, 1, color_space}));
}
EXPECT_EQ(kSupportedPrimaries.size(), num_found);
}
@@ -151,8 +158,9 @@
kSupportedMatrix.find(color_space.matrix) != kSupportedMatrix.end();
if (found)
num_found++;
- EXPECT_EQ(found, IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, 1, color_space}));
+ EXPECT_EQ(found,
+ IsSupportedVideoType(
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, 1, color_space}));
}
EXPECT_EQ(kSupportedMatrix.size(), num_found);
}
@@ -165,9 +173,9 @@
const int kUnspecifiedLevel = 0;
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE1, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE1, kUnspecifiedLevel, kColorSpace}));
// VP9 Profile2 are supported on x86, ChromeOS on ARM and Mac/Win on ARM64.
// See third_party/libvpx/BUILD.gn.
@@ -175,7 +183,7 @@
(defined(ARCH_CPU_ARM_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)) || \
(defined(ARCH_CPU_ARM64) && (defined(OS_MAC) || defined(OS_WIN)))
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE2, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE2, kUnspecifiedLevel, kColorSpace}));
#endif
}
@@ -184,43 +192,53 @@
// Dolby Atmos = E-AC3 (Dolby Digital Plus) + spatialRendering. Currently not
// supported.
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecEAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kEAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
// Expect non-support for codecs with which there is no spatial audio format.
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kAAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecMP3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kMP3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kPCM, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kVorbis, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecVorbis, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kFLAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kAMR_NB, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kAMR_WB, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_MULAW, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kGSM_MS, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_S16BE, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_S24BE, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecFLAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kOpus, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_ALAW, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAMR_NB, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kALAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAMR_WB, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_MULAW, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecGSM_MS, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_S16BE, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_S24BE, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecOpus, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_ALAW, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecALAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecMpegHAudio, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kUnknownAudioCodec, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(IsSupportedAudioType({AudioCodec::kMpegHAudio,
+ AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kUnknown, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
}
TEST(SupportedTypesTest, XHE_AACSupportedOnAndroidOnly) {
@@ -231,11 +249,12 @@
base::android::BuildInfo::GetInstance()->sdk_int() >=
base::android::SDK_VERSION_P;
- EXPECT_EQ(is_supported, IsSupportedAudioType(
- {kCodecAAC, AudioCodecProfile::kXHE_AAC, false}));
+ EXPECT_EQ(is_supported,
+ IsSupportedAudioType(
+ {AudioCodec::kAAC, AudioCodecProfile::kXHE_AAC, false}));
#else
- EXPECT_FALSE(
- IsSupportedAudioType({kCodecAAC, AudioCodecProfile::kXHE_AAC, false}));
+ EXPECT_FALSE(IsSupportedAudioType(
+ {AudioCodec::kAAC, AudioCodecProfile::kXHE_AAC, false}));
#endif
}
@@ -248,42 +267,45 @@
// Expect support for baseline configuration of known codecs.
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, color_space}));
// All combinations of combinations of color gamuts and transfer functions
// should be supported.
color_space.primaries = VideoColorSpace::PrimaryID::SMPTEST431_2;
color_space.transfer = VideoColorSpace::TransferID::SMPTEST2084;
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, color_space}));
color_space.primaries = VideoColorSpace::PrimaryID::BT2020;
color_space.transfer = VideoColorSpace::TransferID::ARIB_STD_B67;
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, color_space}));
// No HDR metadata types are supported.
EXPECT_FALSE(
- IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel,
+ IsSupportedVideoType({VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel,
color_space, gfx::HdrMetadataType::kSmpteSt2086}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY,
+ EXPECT_FALSE(IsSupportedVideoType({VideoCodec::kVP8, VP8PROFILE_ANY,
kUnspecifiedLevel, color_space,
gfx::HdrMetadataType::kSmpteSt2094_10}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY,
+ EXPECT_FALSE(IsSupportedVideoType({VideoCodec::kVP8, VP8PROFILE_ANY,
kUnspecifiedLevel, color_space,
gfx::HdrMetadataType::kSmpteSt2094_40}));
}
diff --git a/media/base/supported_video_decoder_config_unittest.cc b/media/base/supported_video_decoder_config_unittest.cc
index dd3dabd..14f0ad9 100644
--- a/media/base/supported_video_decoder_config_unittest.cc
+++ b/media/base/supported_video_decoder_config_unittest.cc
@@ -13,7 +13,7 @@
public:
SupportedVideoDecoderConfigTest()
: decoder_config_(
- TestVideoConfig::NormalCodecProfile(kCodecH264,
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264,
H264PROFILE_EXTENDED)) {
supported_config_.profile_min = H264PROFILE_MIN;
supported_config_.profile_max = H264PROFILE_MAX;
diff --git a/media/base/test_helpers.cc b/media/base/test_helpers.cc
index 3f2d500..a046a91 100644
--- a/media/base/test_helpers.cc
+++ b/media/base/test_helpers.cc
@@ -146,24 +146,24 @@
static VideoCodecProfile MinProfile(VideoCodec codec) {
switch (codec) {
- case kUnknownVideoCodec:
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecMPEG4:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kMPEG4:
return VIDEO_CODEC_PROFILE_UNKNOWN;
- case kCodecH264:
+ case VideoCodec::kH264:
return H264PROFILE_MIN;
- case kCodecTheora:
+ case VideoCodec::kTheora:
return THEORAPROFILE_MIN;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return VP8PROFILE_MIN;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return VP9PROFILE_MIN;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return HEVCPROFILE_MIN;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return DOLBYVISION_PROFILE0;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return AV1PROFILE_MIN;
}
}
@@ -174,7 +174,7 @@
// static
VideoDecoderConfig TestVideoConfig::Invalid() {
- return GetTestConfig(kUnknownVideoCodec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ return GetTestConfig(VideoCodec::kUnknown, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
false);
}
@@ -195,7 +195,7 @@
// static
VideoDecoderConfig TestVideoConfig::NormalH264(VideoCodecProfile config) {
- return GetTestConfig(kCodecH264, MinProfile(kCodecH264),
+ return GetTestConfig(VideoCodec::kH264, MinProfile(VideoCodec::kH264),
VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
false);
}
@@ -217,7 +217,7 @@
// static
VideoDecoderConfig TestVideoConfig::NormalRotated(VideoRotation rotation) {
- return GetTestConfig(kCodecVP8, MinProfile(kCodecVP8),
+ return GetTestConfig(VideoCodec::kVP8, MinProfile(VideoCodec::kVP8),
VideoColorSpace::JPEG(), rotation, kNormalSize, false);
}
@@ -274,25 +274,25 @@
}
AudioDecoderConfig TestAudioConfig::Normal() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, NormalSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kUnencrypted);
}
AudioDecoderConfig TestAudioConfig::NormalEncrypted() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, NormalSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kCenc);
}
AudioDecoderConfig TestAudioConfig::HighSampleRate() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, HighSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kUnencrypted);
}
AudioDecoderConfig TestAudioConfig::HighSampleRateEncrypted() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, HighSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kCenc);
}
diff --git a/media/base/test_helpers.h b/media/base/test_helpers.h
index 81e73c76..17809f15 100644
--- a/media/base/test_helpers.h
+++ b/media/base/test_helpers.h
@@ -90,30 +90,32 @@
// Returns a configuration that is invalid.
static VideoDecoderConfig Invalid();
- static VideoDecoderConfig Normal(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig Normal(VideoCodec codec = VideoCodec::kVP8);
static VideoDecoderConfig NormalWithColorSpace(
VideoCodec codec,
const VideoColorSpace& color_space);
static VideoDecoderConfig NormalH264(VideoCodecProfile = H264PROFILE_MIN);
static VideoDecoderConfig NormalCodecProfile(
- VideoCodec codec = kCodecVP8,
+ VideoCodec codec = VideoCodec::kVP8,
VideoCodecProfile profile = VP8PROFILE_MIN);
- static VideoDecoderConfig NormalEncrypted(VideoCodec codec = kCodecVP8,
+ static VideoDecoderConfig NormalEncrypted(VideoCodec codec = VideoCodec::kVP8,
VideoCodecProfile = VP8PROFILE_MIN);
static VideoDecoderConfig NormalRotated(VideoRotation rotation);
// Returns a configuration that is larger in dimensions than Normal().
- static VideoDecoderConfig Large(VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig LargeEncrypted(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig Large(VideoCodec codec = VideoCodec::kVP8);
+ static VideoDecoderConfig LargeEncrypted(VideoCodec codec = VideoCodec::kVP8);
// Returns a configuration that is larger in dimensions that Large().
- static VideoDecoderConfig ExtraLarge(VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig ExtraLargeEncrypted(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig ExtraLarge(VideoCodec codec = VideoCodec::kVP8);
+ static VideoDecoderConfig ExtraLargeEncrypted(
+ VideoCodec codec = VideoCodec::kVP8);
static VideoDecoderConfig Custom(gfx::Size size,
- VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig CustomEncrypted(gfx::Size size,
- VideoCodec codec = kCodecVP8);
+ VideoCodec codec = VideoCodec::kVP8);
+ static VideoDecoderConfig CustomEncrypted(
+ gfx::Size size,
+ VideoCodec codec = VideoCodec::kVP8);
// Returns coded size for Normal and Large config.
static gfx::Size NormalCodedSize();
diff --git a/media/base/video_codecs.cc b/media/base/video_codecs.cc
index 6c58bba..8d2b7e3 100644
--- a/media/base/video_codecs.cc
+++ b/media/base/video_codecs.cc
@@ -17,27 +17,27 @@
// The names come from src/third_party/ffmpeg/libavcodec/codec_desc.c
std::string GetCodecName(VideoCodec codec) {
switch (codec) {
- case kUnknownVideoCodec:
+ case VideoCodec::kUnknown:
return "unknown";
- case kCodecH264:
+ case VideoCodec::kH264:
return "h264";
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return "hevc";
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return "dolbyvision";
- case kCodecVC1:
+ case VideoCodec::kVC1:
return "vc1";
- case kCodecMPEG2:
+ case VideoCodec::kMPEG2:
return "mpeg2video";
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
return "mpeg4";
- case kCodecTheora:
+ case VideoCodec::kTheora:
return "theora";
- case kCodecVP8:
+ case VideoCodec::kVP8:
return "vp8";
- case kCodecVP9:
+ case VideoCodec::kVP9:
return "vp9";
- case kCodecAV1:
+ case VideoCodec::kAV1:
return "av1";
}
NOTREACHED();
@@ -867,7 +867,7 @@
#endif
VideoCodec StringToVideoCodec(const std::string& codec_id) {
- VideoCodec codec = kUnknownVideoCodec;
+ VideoCodec codec = VideoCodec::kUnknown;
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
uint8_t level = 0;
VideoColorSpace color_space;
@@ -883,61 +883,61 @@
std::vector<std::string> elem = base::SplitString(
codec_id, ".", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
if (elem.empty()) {
- codec = kUnknownVideoCodec;
+ codec = VideoCodec::kUnknown;
return;
}
if (codec_id == "vp8" || codec_id == "vp8.0") {
- codec = kCodecVP8;
+ codec = VideoCodec::kVP8;
return;
}
if (ParseNewStyleVp9CodecID(codec_id, &profile, &level, &color_space) ||
ParseLegacyVp9CodecID(codec_id, &profile, &level)) {
- codec = kCodecVP9;
+ codec = VideoCodec::kVP9;
return;
}
#if BUILDFLAG(ENABLE_AV1_DECODER)
if (ParseAv1CodecId(codec_id, &profile, &level, &color_space)) {
- codec = kCodecAV1;
+ codec = VideoCodec::kAV1;
return;
}
#endif
if (codec_id == "theora") {
- codec = kCodecTheora;
+ codec = VideoCodec::kTheora;
return;
}
if (ParseAVCCodecId(codec_id, &profile, &level)) {
- codec = kCodecH264;
+ codec = VideoCodec::kH264;
return;
}
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
if (ParseAVCCodecId(TranslateLegacyAvc1CodecIds(codec_id), &profile,
&level)) {
- codec = kCodecH264;
+ codec = VideoCodec::kH264;
return;
}
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
if (ParseHEVCCodecId(codec_id, &profile, &level)) {
- codec = kCodecHEVC;
+ codec = VideoCodec::kHEVC;
return;
}
#endif
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
if (ParseDolbyVisionCodecId(codec_id, &profile, &level)) {
- codec = kCodecDolbyVision;
+ codec = VideoCodec::kDolbyVision;
return;
}
#endif
- codec = kUnknownVideoCodec;
+ codec = VideoCodec::kUnknown;
}
VideoCodec VideoCodecProfileToVideoCodec(VideoCodecProfile profile) {
switch (profile) {
case VIDEO_CODEC_PROFILE_UNKNOWN:
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
case H264PROFILE_BASELINE:
case H264PROFILE_MAIN:
case H264PROFILE_EXTENDED:
@@ -949,31 +949,36 @@
case H264PROFILE_SCALABLEHIGH:
case H264PROFILE_STEREOHIGH:
case H264PROFILE_MULTIVIEWHIGH:
- return kCodecH264;
+ return VideoCodec::kH264;
case HEVCPROFILE_MAIN:
case HEVCPROFILE_MAIN10:
case HEVCPROFILE_MAIN_STILL_PICTURE:
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
case VP8PROFILE_ANY:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case VP9PROFILE_PROFILE0:
case VP9PROFILE_PROFILE1:
case VP9PROFILE_PROFILE2:
case VP9PROFILE_PROFILE3:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case DOLBYVISION_PROFILE0:
case DOLBYVISION_PROFILE4:
case DOLBYVISION_PROFILE5:
case DOLBYVISION_PROFILE7:
case DOLBYVISION_PROFILE8:
case DOLBYVISION_PROFILE9:
- return kCodecDolbyVision;
+ return VideoCodec::kDolbyVision;
case THEORAPROFILE_ANY:
- return kCodecTheora;
+ return VideoCodec::kTheora;
case AV1PROFILE_PROFILE_MAIN:
case AV1PROFILE_PROFILE_HIGH:
case AV1PROFILE_PROFILE_PRO:
- return kCodecAV1;
+ return VideoCodec::kAV1;
}
}
+
+std::ostream& operator<<(std::ostream& os, const VideoCodec& codec) {
+ return os << GetCodecName(codec);
+}
+
} // namespace media
diff --git a/media/base/video_codecs.h b/media/base/video_codecs.h
index 0e7dce19..12bf6e45 100644
--- a/media/base/video_codecs.h
+++ b/media/base/video_codecs.h
@@ -16,27 +16,27 @@
class VideoColorSpace;
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
-enum VideoCodec {
+enum class VideoCodec {
// These values are histogrammed over time; do not change their ordinal
// values. When deleting a codec replace it with a dummy value; when adding a
- // codec, do so at the bottom (and update kVideoCodecMax).
- kUnknownVideoCodec = 0,
- kCodecH264,
- kCodecVC1,
- kCodecMPEG2,
- kCodecMPEG4,
- kCodecTheora,
- kCodecVP8,
- kCodecVP9,
- kCodecHEVC,
- kCodecDolbyVision,
- kCodecAV1,
+ // codec, do so at the bottom (and update kMaxValue).
+ kUnknown = 0,
+ kH264,
+ kVC1,
+ kMPEG2,
+ kMPEG4,
+ kTheora,
+ kVP8,
+ kVP9,
+ kHEVC,
+ kDolbyVision,
+ kAV1,
// DO NOT ADD RANDOM VIDEO CODECS!
//
// The only acceptable time to add a new codec is if there is production code
// that uses said codec in the same CL.
- kVideoCodecMax = kCodecAV1, // Must equal the last "real" codec above.
+ kMaxValue = kAV1, // Must equal the last "real" codec above.
};
// Video codec profiles. Keep in sync with mojo::VideoCodecProfile (see
@@ -166,6 +166,9 @@
std::string TranslateLegacyAvc1CodecIds(const std::string& codec_id);
#endif
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
+ const VideoCodec& codec);
+
} // namespace media
#endif // MEDIA_BASE_VIDEO_CODECS_H_
diff --git a/media/base/video_decoder_config.cc b/media/base/video_decoder_config.cc
index 40c9707..b28a223 100644
--- a/media/base/video_decoder_config.cc
+++ b/media/base/video_decoder_config.cc
@@ -69,7 +69,7 @@
}
bool VideoDecoderConfig::IsValidConfig() const {
- return codec_ != kUnknownVideoCodec && IsValidSize(coded_size_) &&
+ return codec_ != VideoCodec::kUnknown && IsValidSize(coded_size_) &&
IsValidSize(natural_size_) &&
gfx::Rect(coded_size_).Contains(visible_rect_);
}
diff --git a/media/base/video_decoder_config.h b/media/base/video_decoder_config.h
index 1eb9bf07..875de0d 100644
--- a/media/base/video_decoder_config.h
+++ b/media/base/video_decoder_config.h
@@ -160,7 +160,7 @@
bool is_rtc() const { return is_rtc_; }
private:
- VideoCodec codec_ = kUnknownVideoCodec;
+ VideoCodec codec_ = VideoCodec::kUnknown;
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
// Optional video codec level. kNoVideoCodecLevel means the field is not
diff --git a/media/base/video_decoder_config_unittest.cc b/media/base/video_decoder_config_unittest.cc
index 22954f2d..6bfa5b9 100644
--- a/media/base/video_decoder_config_unittest.cc
+++ b/media/base/video_decoder_config_unittest.cc
@@ -14,7 +14,7 @@
static const gfx::Size kNaturalSize(320, 240);
TEST(VideoDecoderConfigTest, AlphaModeSetCorrectly) {
- VideoDecoderConfig config(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoDecoderConfig config(VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize,
kVisibleRect, kNaturalSize, EmptyExtraData(),
@@ -22,7 +22,7 @@
EXPECT_TRUE(config.IsValidConfig());
EXPECT_EQ(config.alpha_mode(), VideoDecoderConfig::AlphaMode::kIsOpaque);
- config.Initialize(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ config.Initialize(VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kHasAlpha, VideoColorSpace(),
kNoTransformation, kCodedSize, kVisibleRect, kNaturalSize,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
@@ -30,10 +30,11 @@
}
TEST(VideoDecoderConfigTest, SetProfile) {
- VideoDecoderConfig config(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig config(VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
config.set_profile(VP9PROFILE_PROFILE2);
EXPECT_EQ(config.profile(), VP9PROFILE_PROFILE2);
}
diff --git a/media/base/video_thumbnail_decoder_unittest.cc b/media/base/video_thumbnail_decoder_unittest.cc
index f070c07c..5e510bb 100644
--- a/media/base/video_thumbnail_decoder_unittest.cc
+++ b/media/base/video_thumbnail_decoder_unittest.cc
@@ -39,9 +39,10 @@
auto mock_video_decoder = std::make_unique<MockVideoDecoder>();
mock_video_decoder_ = mock_video_decoder.get();
VideoDecoderConfig valid_config(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, gfx::Size(1, 1), gfx::Rect(1, 1),
- gfx::Size(1, 1), EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, gfx::Size(1, 1), gfx::Rect(1, 1), gfx::Size(1, 1),
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
thumbnail_decoder_ = std::make_unique<VideoThumbnailDecoder>(
std::move(mock_video_decoder), valid_config, std::vector<uint8_t>{0u});
diff --git a/media/cast/sender/h264_vt_encoder_unittest.cc b/media/cast/sender/h264_vt_encoder_unittest.cc
index be544db..e78b72d6 100644
--- a/media/cast/sender/h264_vt_encoder_unittest.cc
+++ b/media/cast/sender/h264_vt_encoder_unittest.cc
@@ -306,7 +306,7 @@
? VideoDecoderConfig::AlphaMode::kIsOpaque
: VideoDecoderConfig::AlphaMode::kHasAlpha;
VideoDecoderConfig config(
- kCodecH264, H264PROFILE_MAIN, alpha_mode, VideoColorSpace(),
+ VideoCodec::kH264, H264PROFILE_MAIN, alpha_mode, VideoColorSpace(),
kNoTransformation, frame_->coded_size(), frame_->visible_rect(),
frame_->natural_size(), EmptyExtraData(), EncryptionScheme::kUnencrypted);
scoped_refptr<EndToEndFrameChecker> checker(new EndToEndFrameChecker(config));
diff --git a/media/cdm/cdm_type_conversion.cc b/media/cdm/cdm_type_conversion.cc
index 726aa52..dd40ab1e 100644
--- a/media/cdm/cdm_type_conversion.cc
+++ b/media/cdm/cdm_type_conversion.cc
@@ -318,9 +318,9 @@
cdm::AudioCodec ToCdmAudioCodec(AudioCodec codec) {
switch (codec) {
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return cdm::kCodecVorbis;
- case kCodecAAC:
+ case AudioCodec::kAAC:
return cdm::kCodecAac;
default:
DVLOG(1) << "Unsupported AudioCodec " << codec;
@@ -354,13 +354,13 @@
cdm::VideoCodec ToCdmVideoCodec(VideoCodec codec) {
switch (codec) {
- case kCodecVP8:
+ case VideoCodec::kVP8:
return cdm::kCodecVp8;
- case kCodecH264:
+ case VideoCodec::kH264:
return cdm::kCodecH264;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return cdm::kCodecVp9;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return cdm::kCodecAv1;
default:
DVLOG(1) << "Unsupported VideoCodec " << codec;
@@ -371,19 +371,19 @@
VideoCodec ToMediaVideoCodec(cdm::VideoCodec codec) {
switch (codec) {
case cdm::kUnknownVideoCodec:
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
case cdm::kCodecVp8:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case cdm::kCodecH264:
- return kCodecH264;
+ return VideoCodec::kH264;
case cdm::kCodecVp9:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case cdm::kCodecAv1:
- return kCodecAV1;
+ return VideoCodec::kAV1;
}
NOTREACHED() << "Unexpected cdm::VideoCodec " << codec;
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
}
cdm::VideoCodecProfile ToCdmVideoCodecProfile(VideoCodecProfile profile) {
diff --git a/media/cdm/supported_audio_codecs.cc b/media/cdm/supported_audio_codecs.cc
index 1f416ac1..3161889 100644
--- a/media/cdm/supported_audio_codecs.cc
+++ b/media/cdm/supported_audio_codecs.cc
@@ -10,9 +10,9 @@
const std::vector<AudioCodec> GetCdmSupportedAudioCodecs() {
return {
- AudioCodec::kCodecOpus, AudioCodec::kCodecVorbis, AudioCodec::kCodecFLAC,
+ AudioCodec::kOpus, AudioCodec::kVorbis, AudioCodec::kFLAC,
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- AudioCodec::kCodecAAC,
+ AudioCodec::kAAC,
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
};
}
diff --git a/media/ffmpeg/ffmpeg_common.cc b/media/ffmpeg/ffmpeg_common.cc
index faa8de7..d103767 100644
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -84,63 +84,63 @@
AudioCodec CodecIDToAudioCodec(AVCodecID codec_id) {
switch (codec_id) {
case AV_CODEC_ID_AAC:
- return kCodecAAC;
+ return AudioCodec::kAAC;
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
case AV_CODEC_ID_AC3:
- return kCodecAC3;
+ return AudioCodec::kAC3;
case AV_CODEC_ID_EAC3:
- return kCodecEAC3;
+ return AudioCodec::kEAC3;
#endif
case AV_CODEC_ID_MP3:
- return kCodecMP3;
+ return AudioCodec::kMP3;
case AV_CODEC_ID_VORBIS:
- return kCodecVorbis;
+ return AudioCodec::kVorbis;
case AV_CODEC_ID_PCM_U8:
case AV_CODEC_ID_PCM_S16LE:
case AV_CODEC_ID_PCM_S24LE:
case AV_CODEC_ID_PCM_S32LE:
case AV_CODEC_ID_PCM_F32LE:
- return kCodecPCM;
+ return AudioCodec::kPCM;
case AV_CODEC_ID_PCM_S16BE:
- return kCodecPCM_S16BE;
+ return AudioCodec::kPCM_S16BE;
case AV_CODEC_ID_PCM_S24BE:
- return kCodecPCM_S24BE;
+ return AudioCodec::kPCM_S24BE;
case AV_CODEC_ID_FLAC:
- return kCodecFLAC;
+ return AudioCodec::kFLAC;
case AV_CODEC_ID_AMR_NB:
- return kCodecAMR_NB;
+ return AudioCodec::kAMR_NB;
case AV_CODEC_ID_AMR_WB:
- return kCodecAMR_WB;
+ return AudioCodec::kAMR_WB;
case AV_CODEC_ID_GSM_MS:
- return kCodecGSM_MS;
+ return AudioCodec::kGSM_MS;
case AV_CODEC_ID_PCM_ALAW:
- return kCodecPCM_ALAW;
+ return AudioCodec::kPCM_ALAW;
case AV_CODEC_ID_PCM_MULAW:
- return kCodecPCM_MULAW;
+ return AudioCodec::kPCM_MULAW;
case AV_CODEC_ID_OPUS:
- return kCodecOpus;
+ return AudioCodec::kOpus;
case AV_CODEC_ID_ALAC:
- return kCodecALAC;
+ return AudioCodec::kALAC;
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
case AV_CODEC_ID_MPEGH_3D_AUDIO:
- return kCodecMpegHAudio;
+ return AudioCodec::kMpegHAudio;
#endif
default:
DVLOG(1) << "Unknown audio CodecID: " << codec_id;
}
- return kUnknownAudioCodec;
+ return AudioCodec::kUnknown;
}
AVCodecID AudioCodecToCodecID(AudioCodec audio_codec,
SampleFormat sample_format) {
switch (audio_codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
return AV_CODEC_ID_AAC;
- case kCodecALAC:
+ case AudioCodec::kALAC:
return AV_CODEC_ID_ALAC;
- case kCodecMP3:
+ case AudioCodec::kMP3:
return AV_CODEC_ID_MP3;
- case kCodecPCM:
+ case AudioCodec::kPCM:
switch (sample_format) {
case kSampleFormatU8:
return AV_CODEC_ID_PCM_U8;
@@ -156,28 +156,28 @@
DVLOG(1) << "Unsupported sample format: " << sample_format;
}
break;
- case kCodecPCM_S16BE:
+ case AudioCodec::kPCM_S16BE:
return AV_CODEC_ID_PCM_S16BE;
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM_S24BE:
return AV_CODEC_ID_PCM_S24BE;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return AV_CODEC_ID_VORBIS;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return AV_CODEC_ID_FLAC;
- case kCodecAMR_NB:
+ case AudioCodec::kAMR_NB:
return AV_CODEC_ID_AMR_NB;
- case kCodecAMR_WB:
+ case AudioCodec::kAMR_WB:
return AV_CODEC_ID_AMR_WB;
- case kCodecGSM_MS:
+ case AudioCodec::kGSM_MS:
return AV_CODEC_ID_GSM_MS;
- case kCodecPCM_ALAW:
+ case AudioCodec::kPCM_ALAW:
return AV_CODEC_ID_PCM_ALAW;
- case kCodecPCM_MULAW:
+ case AudioCodec::kPCM_MULAW:
return AV_CODEC_ID_PCM_MULAW;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return AV_CODEC_ID_OPUS;
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return AV_CODEC_ID_MPEGH_3D_AUDIO;
#endif
default:
@@ -190,44 +190,44 @@
static VideoCodec CodecIDToVideoCodec(AVCodecID codec_id) {
switch (codec_id) {
case AV_CODEC_ID_H264:
- return kCodecH264;
+ return VideoCodec::kH264;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
case AV_CODEC_ID_HEVC:
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
#endif
case AV_CODEC_ID_THEORA:
- return kCodecTheora;
+ return VideoCodec::kTheora;
case AV_CODEC_ID_MPEG4:
- return kCodecMPEG4;
+ return VideoCodec::kMPEG4;
case AV_CODEC_ID_VP8:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case AV_CODEC_ID_VP9:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case AV_CODEC_ID_AV1:
- return kCodecAV1;
+ return VideoCodec::kAV1;
default:
DVLOG(1) << "Unknown video CodecID: " << codec_id;
}
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
}
AVCodecID VideoCodecToCodecID(VideoCodec video_codec) {
switch (video_codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return AV_CODEC_ID_H264;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return AV_CODEC_ID_HEVC;
#endif
- case kCodecTheora:
+ case VideoCodec::kTheora:
return AV_CODEC_ID_THEORA;
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
return AV_CODEC_ID_MPEG4;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return AV_CODEC_ID_VP8;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return AV_CODEC_ID_VP9;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return AV_CODEC_ID_AV1;
default:
DVLOG(1) << "Unknown VideoCodec: " << video_codec;
@@ -351,8 +351,8 @@
switch (codec) {
// For AC3/EAC3 we enable only demuxing, but not decoding, so FFmpeg does
// not fill |sample_fmt|.
- case kCodecAC3:
- case kCodecEAC3:
+ case AudioCodec::kAC3:
+ case AudioCodec::kEAC3:
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
// The spec for AC3/EAC3 audio is ETSI TS 102 366. According to sections
// F.3.1 and F.5.1 in that spec the sample_format for AC3/EAC3 must be 16.
@@ -362,7 +362,7 @@
#endif
break;
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
channel_layout = CHANNEL_LAYOUT_BITSTREAM;
sample_format = kSampleFormatMpegHAudio;
break;
@@ -403,18 +403,19 @@
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
// These are bitstream formats unknown to ffmpeg, so they don't have
// a known sample format size.
- if (codec == kCodecAC3 || codec == kCodecEAC3)
+ if (codec == AudioCodec::kAC3 || codec == AudioCodec::kEAC3)
return true;
#endif
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
- if (codec == kCodecMpegHAudio)
+ if (codec == AudioCodec::kMpegHAudio)
return true;
#endif
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
// TODO(dalecurtis): Just use the profile from the codec context if ffmpeg
// ever starts supporting xHE-AAC.
- if (codec == kCodecAAC && codec_context->profile == FF_PROFILE_UNKNOWN) {
+ if (codec == AudioCodec::kAAC &&
+ codec_context->profile == FF_PROFILE_UNKNOWN) {
// Errors aren't fatal here, so just drop any MediaLog messages.
NullMediaLog media_log;
mp4::AAC aac_parser;
@@ -521,7 +522,7 @@
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
switch (codec) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case kCodecH264: {
+ case VideoCodec::kH264: {
profile = ProfileIDToVideoCodecProfile(codec_context->profile);
// if the profile is still unknown, try to extract it from
// the extradata using the internal parser
@@ -539,10 +540,10 @@
break;
}
#endif
- case kCodecVP8:
+ case VideoCodec::kVP8:
profile = VP8PROFILE_ANY;
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
switch (codec_context->profile) {
case FF_PROFILE_VP9_0:
profile = VP9PROFILE_PROFILE0;
@@ -561,15 +562,15 @@
break;
}
break;
- case kCodecAV1:
+ case VideoCodec::kAV1:
profile = AV1PROFILE_PROFILE_MAIN;
break;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
profile = HEVCPROFILE_MAIN;
break;
#endif
- case kCodecTheora:
+ case VideoCodec::kTheora:
profile = THEORAPROFILE_ANY;
break;
default:
diff --git a/media/ffmpeg/ffmpeg_common_unittest.cc b/media/ffmpeg/ffmpeg_common_unittest.cc
index 9146072..c8bc7038 100644
--- a/media/ffmpeg/ffmpeg_common_unittest.cc
+++ b/media/ffmpeg/ffmpeg_common_unittest.cc
@@ -143,7 +143,7 @@
AudioDecoderConfig audio_config;
ASSERT_TRUE(AVStreamToAudioDecoderConfig(stream, &audio_config));
- EXPECT_EQ(kCodecOpus, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kOpus, audio_config.codec());
EXPECT_EQ(CHANNEL_LAYOUT_QUAD, audio_config.channel_layout());
EXPECT_EQ(4, audio_config.channels());
}
@@ -166,7 +166,7 @@
AudioDecoderConfig audio_config;
ASSERT_TRUE(AVStreamToAudioDecoderConfig(stream, &audio_config));
- EXPECT_EQ(kCodecOpus, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kOpus, audio_config.codec());
EXPECT_EQ(CHANNEL_LAYOUT_DISCRETE, audio_config.channel_layout());
EXPECT_EQ(11, audio_config.channels());
}
@@ -188,7 +188,7 @@
AudioDecoderConfig audio_config;
ASSERT_TRUE(AVStreamToAudioDecoderConfig(stream, &audio_config));
- EXPECT_EQ(kCodecPCM, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kPCM, audio_config.codec());
EXPECT_EQ(CHANNEL_LAYOUT_DISCRETE, audio_config.channel_layout());
EXPECT_EQ(9, audio_config.channels());
}
diff --git a/media/filters/android/media_codec_audio_decoder.cc b/media/filters/android/media_codec_audio_decoder.cc
index a89182a..1fd4784 100644
--- a/media/filters/android/media_codec_audio_decoder.cc
+++ b/media/filters/android/media_codec_audio_decoder.cc
@@ -71,11 +71,11 @@
is_passthrough_ = MediaCodecUtil::IsPassthroughAudioFormat(config.codec());
sample_format_ = kSampleFormatS16;
- if (config.codec() == kCodecAC3)
+ if (config.codec() == AudioCodec::kAC3)
sample_format_ = kSampleFormatAc3;
- else if (config.codec() == kCodecEAC3)
+ else if (config.codec() == AudioCodec::kEAC3)
sample_format_ = kSampleFormatEac3;
- else if (config.codec() == kCodecMpegHAudio)
+ else if (config.codec() == AudioCodec::kMpegHAudio)
sample_format_ = kSampleFormatMpegHAudio;
if (state_ == STATE_ERROR) {
@@ -88,10 +88,11 @@
// We can support only the codecs that MediaCodecBridge can decode.
// TODO(xhwang): Get this list from MediaCodecBridge or just rely on
// attempting to create one to determine whether the codec is supported.
- const bool is_codec_supported =
- config.codec() == kCodecVorbis || config.codec() == kCodecFLAC ||
- config.codec() == kCodecAAC || config.codec() == kCodecOpus ||
- is_passthrough_;
+ const bool is_codec_supported = config.codec() == AudioCodec::kVorbis ||
+ config.codec() == AudioCodec::kFLAC ||
+ config.codec() == AudioCodec::kAAC ||
+ config.codec() == AudioCodec::kOpus ||
+ is_passthrough_;
if (!is_codec_supported) {
DVLOG(1) << "Unsuported codec " << GetCodecName(config.codec());
BindToCurrentLoop(std::move(init_cb))
@@ -221,8 +222,8 @@
bool MediaCodecAudioDecoder::NeedsBitstreamConversion() const {
// An AAC stream needs to be converted as ADTS stream.
- DCHECK_NE(config_.codec(), kUnknownAudioCodec);
- return config_.codec() == kCodecAAC;
+ DCHECK_NE(config_.codec(), AudioCodec::kUnknown);
+ return config_.codec() == AudioCodec::kAAC;
}
void MediaCodecAudioDecoder::SetCdm(CdmContext* cdm_context, InitCB init_cb) {
@@ -411,10 +412,10 @@
return false;
}
- if (config_.codec() == kCodecAC3) {
+ if (config_.codec() == AudioCodec::kAC3) {
frame_count = Ac3Util::ParseTotalAc3SampleCount(
audio_buffer->channel_data()[0], out.size);
- } else if (config_.codec() == kCodecEAC3) {
+ } else if (config_.codec() == AudioCodec::kEAC3) {
frame_count = Ac3Util::ParseTotalEac3SampleCount(
audio_buffer->channel_data()[0], out.size);
} else {
diff --git a/media/filters/android/video_frame_extractor_unittest.cc b/media/filters/android/video_frame_extractor_unittest.cc
index f659425..890a376 100644
--- a/media/filters/android/video_frame_extractor_unittest.cc
+++ b/media/filters/android/video_frame_extractor_unittest.cc
@@ -78,7 +78,7 @@
auto result = ExtractFrame(GetTestDataFilePath("bear.mp4"));
EXPECT_TRUE(result.success);
EXPECT_GT(result.encoded_frame.size(), 0u);
- EXPECT_EQ(result.decoder_config.codec(), VideoCodec::kCodecH264);
+ EXPECT_EQ(result.decoder_config.codec(), VideoCodec::kH264);
}
// Verifies graceful failure when trying to extract frame from an invalid video
diff --git a/media/filters/audio_decoder_stream_unittest.cc b/media/filters/audio_decoder_stream_unittest.cc
index 9e6475c..9a5ffce 100644
--- a/media/filters/audio_decoder_stream_unittest.cc
+++ b/media/filters/audio_decoder_stream_unittest.cc
@@ -54,8 +54,12 @@
base::Unretained(this)),
&media_log_) {
// Any valid config will do.
- demuxer_stream_.set_audio_decoder_config(
- {kCodecAAC, kSampleFormatS16, CHANNEL_LAYOUT_STEREO, 44100, {}, {}});
+ demuxer_stream_.set_audio_decoder_config({AudioCodec::kAAC,
+ kSampleFormatS16,
+ CHANNEL_LAYOUT_STEREO,
+ 44100,
+ {},
+ {}});
EXPECT_CALL(demuxer_stream_, SupportsConfigChanges())
.WillRepeatedly(Return(true));
diff --git a/media/filters/audio_decoder_unittest.cc b/media/filters/audio_decoder_unittest.cc
index 127855d4..3488419 100644
--- a/media/filters/audio_decoder_unittest.cc
+++ b/media/filters/audio_decoder_unittest.cc
@@ -159,7 +159,7 @@
VLOG(0) << "Could not run test - no MediaCodec on device.";
return false;
}
- if (params_.codec == kCodecOpus &&
+ if (params_.codec == AudioCodec::kOpus &&
base::android::BuildInfo::GetInstance()->sdk_int() <
base::android::SDK_VERSION_LOLLIPOP) {
VLOG(0) << "Could not run test - Opus is not supported";
@@ -216,7 +216,7 @@
#if defined(OS_ANDROID) && BUILDFLAG(USE_PROPRIETARY_CODECS)
// MEDIA_CODEC type requires config->extra_data() for AAC codec. For ADTS
// streams we need to extract it with a separate procedure.
- if (decoder_type_ == MEDIA_CODEC && params_.codec == kCodecAAC &&
+ if (decoder_type_ == MEDIA_CODEC && params_.codec == AudioCodec::kAAC &&
config.extra_data().empty()) {
int sample_rate;
ChannelLayout channel_layout;
@@ -225,7 +225,7 @@
packet.data, packet.size, nullptr, &sample_rate,
&channel_layout, nullptr, nullptr, &extra_data),
0);
- config.Initialize(kCodecAAC, kSampleFormatS16, channel_layout,
+ config.Initialize(AudioCodec::kAAC, kSampleFormatS16, channel_layout,
sample_rate, extra_data, EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
ASSERT_FALSE(config.extra_data().empty());
@@ -274,7 +274,7 @@
// Don't set discard padding for Opus, it already has discard behavior set
// based on the codec delay in the AudioDecoderConfig.
- if (decoder_type_ == FFMPEG && params_.codec != kCodecOpus)
+ if (decoder_type_ == FFMPEG && params_.codec != AudioCodec::kOpus)
SetDiscardPadding(&packet, buffer.get(), params_.samples_per_second);
// DecodeBuffer() shouldn't need the original packet since it uses the copy.
@@ -345,7 +345,7 @@
#if defined(OS_ANDROID)
return (base::android::BuildInfo::GetInstance()->sdk_int() <
base::android::SDK_VERSION_LOLLIPOP) &&
- decoder_type_ == MEDIA_CODEC && params_.codec == kCodecAAC;
+ decoder_type_ == MEDIA_CODEC && params_.codec == AudioCodec::kAAC;
#else
return false;
#endif
@@ -427,8 +427,8 @@
// Test params to test decoder reinitialization. Choose opus because it is
// supported on all platforms we test on.
const TestParams kReinitializeTestParams = {
- kCodecOpus, "bear-opus.ogg", kBearOpusExpectations,
- 24, 48000, CHANNEL_LAYOUT_STEREO};
+ AudioCodec::kOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
+ CHANNEL_LAYOUT_STEREO};
#if defined(OS_ANDROID)
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -446,13 +446,13 @@
#endif // defined(USE_PROPRIETARY_CODECS)
const TestParams kMediaCodecTestParams[] = {
- {kCodecOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
+ {AudioCodec::kOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
CHANNEL_LAYOUT_STEREO},
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {kCodecAAC, "sfx.adts", kSfxAdtsMcExpectations, 0, 44100,
+ {AudioCodec::kAAC, "sfx.adts", kSfxAdtsMcExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecAAC, "bear-audio-implicit-he-aac-v2.aac", kHeAacMcExpectations, 0,
- 24000, CHANNEL_LAYOUT_MONO},
+ {AudioCodec::kAAC, "bear-audio-implicit-he-aac-v2.aac",
+ kHeAacMcExpectations, 0, 24000, CHANNEL_LAYOUT_MONO},
#endif // defined(USE_PROPRIETARY_CODECS)
};
@@ -517,28 +517,29 @@
#endif
const TestParams kFFmpegTestParams[] = {
- {kCodecMP3, "sfx.mp3", kSfxMp3Expectations, 0, 44100, CHANNEL_LAYOUT_MONO},
+ {AudioCodec::kMP3, "sfx.mp3", kSfxMp3Expectations, 0, 44100,
+ CHANNEL_LAYOUT_MONO},
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {kCodecAAC, "sfx.adts", kSfxAdtsExpectations, 0, 44100,
+ {AudioCodec::kAAC, "sfx.adts", kSfxAdtsExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
#endif
- {kCodecFLAC, "sfx-flac.mp4", kSfxFlacExpectations, 0, 44100,
+ {AudioCodec::kFLAC, "sfx-flac.mp4", kSfxFlacExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecFLAC, "sfx.flac", kSfxFlacExpectations, 0, 44100,
+ {AudioCodec::kFLAC, "sfx.flac", kSfxFlacExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecPCM, "sfx_f32le.wav", kSfxWaveExpectations, 0, 44100,
+ {AudioCodec::kPCM, "sfx_f32le.wav", kSfxWaveExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecPCM, "4ch.wav", kFourChannelWaveExpectations, 0, 44100,
+ {AudioCodec::kPCM, "4ch.wav", kFourChannelWaveExpectations, 0, 44100,
CHANNEL_LAYOUT_QUAD},
- {kCodecVorbis, "sfx.ogg", kSfxOggExpectations, 0, 44100,
+ {AudioCodec::kVorbis, "sfx.ogg", kSfxOggExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
// Note: bear.ogv is incorrectly muxed such that valid samples are given
// negative timestamps, this marks them for discard per the ogg vorbis spec.
- {kCodecVorbis, "bear.ogv", kBearOgvExpectations, -704, 44100,
+ {AudioCodec::kVorbis, "bear.ogv", kBearOgvExpectations, -704, 44100,
CHANNEL_LAYOUT_STEREO},
- {kCodecOpus, "sfx-opus.ogg", kSfxOpusExpectations, -312, 48000,
+ {AudioCodec::kOpus, "sfx-opus.ogg", kSfxOpusExpectations, -312, 48000,
CHANNEL_LAYOUT_MONO},
- {kCodecOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
+ {AudioCodec::kOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
CHANNEL_LAYOUT_STEREO},
};
diff --git a/media/filters/audio_file_reader.cc b/media/filters/audio_file_reader.cc
index 94a4a722..b7cdd31c 100644
--- a/media/filters/audio_file_reader.cc
+++ b/media/filters/audio_file_reader.cc
@@ -29,7 +29,7 @@
AudioFileReader::AudioFileReader(FFmpegURLProtocol* protocol)
: stream_index_(0),
protocol_(protocol),
- audio_codec_(kUnknownAudioCodec),
+ audio_codec_(AudioCodec::kUnknown),
channels_(0),
sample_rate_(0),
av_sample_format_(0) {}
@@ -169,7 +169,7 @@
base::CheckedNumeric<int64_t> estimated_duration_us =
glue_->format_context()->duration;
- if (audio_codec_ == kCodecAAC) {
+ if (audio_codec_ == AudioCodec::kAAC) {
// For certain AAC-encoded files, FFMPEG's estimated frame count might not
// be sufficient to capture the entire audio content that we want. This is
// especially noticeable for short files (< 10ms) resulting in silence
@@ -242,7 +242,7 @@
// silence from being output. In the case where we are also discarding some
// portion of the packet (as indicated by a negative pts), we further want to
// adjust the duration downward by however much exists before zero.
- if (audio_codec_ == kCodecAAC && frame->pkt_duration) {
+ if (audio_codec_ == AudioCodec::kAAC && frame->pkt_duration) {
const base::TimeDelta pkt_duration = ConvertFromTimeBase(
glue_->format_context()->streams[stream_index_]->time_base,
frame->pkt_duration + std::min(static_cast<int64_t>(0), frame->pts));
diff --git a/media/filters/audio_timestamp_validator_unittest.cc b/media/filters/audio_timestamp_validator_unittest.cc
index b662ed6e..a391a2e 100644
--- a/media/filters/audio_timestamp_validator_unittest.cc
+++ b/media/filters/audio_timestamp_validator_unittest.cc
@@ -19,7 +19,7 @@
namespace media {
// Constants to specify the type of audio data used.
-static const AudioCodec kCodec = kCodecVorbis;
+static const AudioCodec kCodec = AudioCodec::kVorbis;
static const SampleFormat kSampleFormat = kSampleFormatPlanarF32;
static const base::TimeDelta kSeekPreroll;
static const int kSamplesPerSecond = 10000;
diff --git a/media/filters/chunk_demuxer_unittest.cc b/media/filters/chunk_demuxer_unittest.cc
index 4575cf6..80766b1 100644
--- a/media/filters/chunk_demuxer_unittest.cc
+++ b/media/filters/chunk_demuxer_unittest.cc
@@ -1330,7 +1330,7 @@
ASSERT_TRUE(audio_stream);
const AudioDecoderConfig& config = audio_stream->audio_decoder_config();
- EXPECT_EQ(kCodecVorbis, config.codec());
+ EXPECT_EQ(AudioCodec::kVorbis, config.codec());
EXPECT_EQ(32, config.bits_per_channel());
EXPECT_EQ(CHANNEL_LAYOUT_STEREO, config.channel_layout());
EXPECT_EQ(44100, config.samples_per_second());
diff --git a/media/filters/dav1d_video_decoder.cc b/media/filters/dav1d_video_decoder.cc
index ff50619..7cdd048 100644
--- a/media/filters/dav1d_video_decoder.cc
+++ b/media/filters/dav1d_video_decoder.cc
@@ -172,7 +172,7 @@
return;
}
- if (config.codec() != kCodecAV1) {
+ if (config.codec() != VideoCodec::kAV1) {
std::move(bound_init_cb)
.Run(Status(StatusCode::kDecoderUnsupportedCodec)
.WithData("codec", config.codec()));
diff --git a/media/filters/dav1d_video_decoder.h b/media/filters/dav1d_video_decoder.h
index 3cdc5184..938a9097 100644
--- a/media/filters/dav1d_video_decoder.h
+++ b/media/filters/dav1d_video_decoder.h
@@ -98,7 +98,7 @@
explicit OffloadingDav1dVideoDecoder(MediaLog* media_log)
: OffloadingVideoDecoder(
0,
- std::vector<VideoCodec>(1, kCodecAV1),
+ std::vector<VideoCodec>(1, VideoCodec::kAV1),
std::make_unique<Dav1dVideoDecoder>(
media_log,
OffloadableVideoDecoder::OffloadState::kOffloaded)) {}
diff --git a/media/filters/dav1d_video_decoder_unittest.cc b/media/filters/dav1d_video_decoder_unittest.cc
index 0711b2f..f102894 100644
--- a/media/filters/dav1d_video_decoder_unittest.cc
+++ b/media/filters/dav1d_video_decoder_unittest.cc
@@ -45,7 +45,7 @@
~Dav1dVideoDecoderTest() override { Destroy(); }
void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Normal(VideoCodec::kAV1));
}
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
@@ -69,7 +69,7 @@
}
void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Large(VideoCodec::kAV1));
}
void Reset() {
diff --git a/media/filters/decrypting_audio_decoder_unittest.cc b/media/filters/decrypting_audio_decoder_unittest.cc
index b2836c34..877fd43 100644
--- a/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/media/filters/decrypting_audio_decoder_unittest.cc
@@ -114,7 +114,7 @@
return std::make_unique<CallbackRegistration>();
});
- config_.Initialize(kCodecVorbis, kSampleFormatPlanarF32,
+ config_.Initialize(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, kSampleRate, EmptyExtraData(),
EncryptionScheme::kCenc, base::TimeDelta(), 0);
InitializeAndExpectResult(config_, true);
@@ -282,7 +282,7 @@
// Ensure decoder handles invalid audio configs without crashing.
TEST_F(DecryptingAudioDecoderTest, Initialize_InvalidAudioConfig) {
- AudioDecoderConfig config(kUnknownAudioCodec, kUnknownSampleFormat,
+ AudioDecoderConfig config(AudioCodec::kUnknown, kUnknownSampleFormat,
CHANNEL_LAYOUT_STEREO, 0, EmptyExtraData(),
EncryptionScheme::kCenc);
@@ -299,7 +299,7 @@
EXPECT_CALL(*decryptor_, InitializeAudioDecoder(_, _))
.WillOnce(RunOnceCallback<1>(false));
- AudioDecoderConfig config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, kSampleRate,
EmptyExtraData(), EncryptionScheme::kCenc);
InitializeAndExpectResult(config, false);
@@ -307,7 +307,7 @@
TEST_F(DecryptingAudioDecoderTest, Initialize_CdmWithoutDecryptor) {
SetCdmType(CDM_WITHOUT_DECRYPTOR);
- AudioDecoderConfig config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, kSampleRate,
EmptyExtraData(), EncryptionScheme::kCenc);
InitializeAndExpectResult(config, false);
@@ -372,7 +372,7 @@
// The new config is different from the initial config in bits-per-channel,
// channel layout and samples_per_second.
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarS16,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarS16,
CHANNEL_LAYOUT_5_1, 88200, EmptyExtraData(),
EncryptionScheme::kCenc);
EXPECT_NE(new_config.bits_per_channel(), config_.bits_per_channel());
@@ -394,7 +394,7 @@
// The new config is different from the initial config in bits-per-channel,
// channel layout and samples_per_second.
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarS16,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarS16,
CHANNEL_LAYOUT_5_1, 88200, EmptyExtraData(),
EncryptionScheme::kUnencrypted);
EXPECT_NE(new_config.bits_per_channel(), config_.bits_per_channel());
diff --git a/media/filters/decrypting_demuxer_stream_unittest.cc b/media/filters/decrypting_demuxer_stream_unittest.cc
index dfe1e171..bb04809 100644
--- a/media/filters/decrypting_demuxer_stream_unittest.cc
+++ b/media/filters/decrypting_demuxer_stream_unittest.cc
@@ -143,7 +143,7 @@
return std::make_unique<CallbackRegistration>();
});
- AudioDecoderConfig input_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig input_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kCenc);
@@ -336,7 +336,7 @@
TEST_F(DecryptingDemuxerStreamTest, Initialize_CdmWithoutDecryptor) {
SetCdmType(CDM_WITHOUT_DECRYPTOR);
- AudioDecoderConfig input_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig input_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kCenc);
EXPECT_MEDIA_LOG(HasSubstr("kAudioTracks"));
@@ -516,7 +516,7 @@
TEST_F(DecryptingDemuxerStreamTest, DemuxerRead_ConfigChanged) {
Initialize(2, 2);
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 88200, EmptyExtraData(),
EncryptionScheme::kCenc);
input_audio_stream_->set_audio_decoder_config(new_config);
diff --git a/media/filters/ffmpeg_audio_decoder.cc b/media/filters/ffmpeg_audio_decoder.cc
index f860ead..98ccc3e 100644
--- a/media/filters/ffmpeg_audio_decoder.cc
+++ b/media/filters/ffmpeg_audio_decoder.cc
@@ -320,7 +320,7 @@
codec_context_->flags2 |= AV_CODEC_FLAG2_SKIP_MANUAL;
AVDictionary* codec_options = NULL;
- if (config.codec() == kCodecOpus) {
+ if (config.codec() == AudioCodec::kOpus) {
codec_context_->request_sample_fmt = AV_SAMPLE_FMT_FLT;
// Disable phase inversion to avoid artifacts in mono downmix. See
@@ -365,9 +365,10 @@
void FFmpegAudioDecoder::ResetTimestampState(const AudioDecoderConfig& config) {
// Opus codec delay is handled by ffmpeg.
const int codec_delay =
- config.codec() == kCodecOpus ? 0 : config.codec_delay();
+ config.codec() == AudioCodec::kOpus ? 0 : config.codec_delay();
discard_helper_ = std::make_unique<AudioDiscardHelper>(
- config.samples_per_second(), codec_delay, config.codec() == kCodecVorbis);
+ config.samples_per_second(), codec_delay,
+ config.codec() == AudioCodec::kVorbis);
discard_helper_->Reset(codec_delay);
}
diff --git a/media/filters/ffmpeg_demuxer.cc b/media/filters/ffmpeg_demuxer.cc
index a76be9a0..00777584 100644
--- a/media/filters/ffmpeg_demuxer.cc
+++ b/media/filters/ffmpeg_demuxer.cc
@@ -121,8 +121,7 @@
// Record audio decoder config UMA stats corresponding to a src= playback.
static void RecordAudioCodecStats(const AudioDecoderConfig& audio_config) {
- UMA_HISTOGRAM_ENUMERATION("Media.AudioCodec", audio_config.codec(),
- kAudioCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.AudioCodec", audio_config.codec());
}
// Record video decoder config UMA stats corresponding to a src= playback.
@@ -132,14 +131,13 @@
MediaLog* media_log) {
// TODO(xhwang): Fix these misleading metric names. They should be something
// like "Media.SRC.Xxxx". See http://crbug.com/716183.
- UMA_HISTOGRAM_ENUMERATION("Media.VideoCodec", video_config.codec(),
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.VideoCodec", video_config.codec());
if (container == container_names::CONTAINER_MOV) {
- UMA_HISTOGRAM_ENUMERATION("Media.SRC.VideoCodec.MP4", video_config.codec(),
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.SRC.VideoCodec.MP4",
+ video_config.codec());
} else if (container == container_names::CONTAINER_WEBM) {
- UMA_HISTOGRAM_ENUMERATION("Media.SRC.VideoCodec.WebM", video_config.codec(),
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.SRC.VideoCodec.WebM",
+ video_config.codec());
}
}
@@ -1098,7 +1096,7 @@
GetFirstEnabledFFmpegStream(DemuxerStream::AUDIO);
if (audio_stream) {
const AudioDecoderConfig& config = audio_stream->audio_decoder_config();
- if (config.codec() == kCodecOpus)
+ if (config.codec() == AudioCodec::kOpus)
seek_time = std::max(start_time_, seek_time - config.seek_preroll());
}
diff --git a/media/filters/ffmpeg_demuxer_unittest.cc b/media/filters/ffmpeg_demuxer_unittest.cc
index 4ddc9521..6837152 100644
--- a/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/media/filters/ffmpeg_demuxer_unittest.cc
@@ -398,7 +398,7 @@
EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
const VideoDecoderConfig& video_config = stream->video_decoder_config();
- EXPECT_EQ(kCodecVP8, video_config.codec());
+ EXPECT_EQ(VideoCodec::kVP8, video_config.codec());
EXPECT_EQ(VideoDecoderConfig::AlphaMode::kIsOpaque,
video_config.alpha_mode());
EXPECT_EQ(320, video_config.coded_size().width());
@@ -417,7 +417,7 @@
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
const AudioDecoderConfig& audio_config = stream->audio_decoder_config();
- EXPECT_EQ(kCodecVorbis, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kVorbis, audio_config.codec());
EXPECT_EQ(32, audio_config.bits_per_channel());
EXPECT_EQ(CHANNEL_LAYOUT_STEREO, audio_config.channel_layout());
EXPECT_EQ(44100, audio_config.samples_per_second());
@@ -447,26 +447,26 @@
DemuxerStream* stream = streams[0];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
- EXPECT_EQ(kCodecVP8, stream->video_decoder_config().codec());
+ EXPECT_EQ(VideoCodec::kVP8, stream->video_decoder_config().codec());
// Stream #1 should be Vorbis audio.
stream = streams[1];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
- EXPECT_EQ(kCodecVorbis, stream->audio_decoder_config().codec());
+ EXPECT_EQ(AudioCodec::kVorbis, stream->audio_decoder_config().codec());
// The subtitles stream is skipped.
// Stream #2 should be Theora video.
stream = streams[2];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
- EXPECT_EQ(kCodecTheora, stream->video_decoder_config().codec());
+ EXPECT_EQ(VideoCodec::kTheora, stream->video_decoder_config().codec());
// Stream #3 should be PCM audio.
stream = streams[3];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
- EXPECT_EQ(kCodecPCM, stream->audio_decoder_config().codec());
+ EXPECT_EQ(AudioCodec::kPCM, stream->audio_decoder_config().codec());
}
#endif
@@ -1313,8 +1313,8 @@
VideoColorSpace::TransferID::SMPTE170M,
VideoColorSpace::MatrixID::SMPTE170M,
gfx::ColorSpace::RangeID::LIMITED);
- VideoType hevc_type = {VideoCodec::kCodecHEVC,
- VideoCodecProfile::HEVCPROFILE_MAIN, 10, color_space};
+ VideoType hevc_type = {VideoCodec::kHEVC, VideoCodecProfile::HEVCPROFILE_MAIN,
+ 10, color_space};
EXPECT_CALL(media_client, IsSupportedVideoType(Eq(hevc_type)))
.WillRepeatedly(Return(true));
@@ -1340,7 +1340,7 @@
MockMediaClient media_client;
SetMediaClient(&media_client);
- AudioType ac3_type = {AudioCodec::kCodecAC3};
+ AudioType ac3_type = {AudioCodec::kAC3};
EXPECT_CALL(media_client, IsSupportedAudioType(Eq(ac3_type)))
.WillRepeatedly(Return(true));
@@ -1367,7 +1367,7 @@
MockMediaClient media_client;
SetMediaClient(&media_client);
- AudioType eac3_type = {AudioCodec::kCodecEAC3};
+ AudioType eac3_type = {AudioCodec::kEAC3};
EXPECT_CALL(media_client, IsSupportedAudioType(Eq(eac3_type)))
.WillRepeatedly(Return(true));
@@ -1576,7 +1576,7 @@
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
const AudioDecoderConfig& audio_config = stream->audio_decoder_config();
- EXPECT_EQ(kCodecFLAC, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kFLAC, audio_config.codec());
EXPECT_EQ(expected_bits_per_channel, audio_config.bits_per_channel());
EXPECT_EQ(expected_channel_layout, audio_config.channel_layout());
EXPECT_EQ(expected_samples_per_second, audio_config.samples_per_second());
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 5aba5f0..dfb65a7 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -37,23 +37,23 @@
// Some ffmpeg codecs don't actually benefit from using more threads.
// Only add more threads for those codecs that we know will benefit.
switch (config.codec()) {
- case kUnknownVideoCodec:
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecHEVC:
- case kCodecVP9:
- case kCodecAV1:
- case kCodecDolbyVision:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kVP9:
+ case VideoCodec::kAV1:
+ case VideoCodec::kDolbyVision:
// We do not compile ffmpeg with support for any of these codecs.
break;
- case kCodecTheora:
- case kCodecMPEG4:
+ case VideoCodec::kTheora:
+ case VideoCodec::kMPEG4:
// No extra threads for these codecs.
break;
- case kCodecH264:
- case kCodecVP8:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP8:
// Normalize to three threads for 1080p content, then scale linearly
// with number of pixels.
// Examples:
@@ -89,7 +89,7 @@
SupportedVideoDecoderConfigs FFmpegVideoDecoder::SupportedConfigsForWebRTC() {
SupportedVideoDecoderConfigs supported_configs;
- if (IsCodecSupported(kCodecH264)) {
+ if (IsCodecSupported(VideoCodec::kH264)) {
supported_configs.emplace_back(/*profile_min=*/H264PROFILE_BASELINE,
/*profile_max=*/H264PROFILE_HIGH,
/*coded_size_min=*/kDefaultSwDecodeSizeMin,
@@ -97,7 +97,7 @@
/*allow_encrypted=*/false,
/*require_encrypted=*/false);
}
- if (IsCodecSupported(kCodecVP8)) {
+ if (IsCodecSupported(VideoCodec::kVP8)) {
supported_configs.emplace_back(/*profile_min=*/VP8PROFILE_ANY,
/*profile_max=*/VP8PROFILE_ANY,
/*coded_size_min=*/kDefaultSwDecodeSizeMin,
diff --git a/media/filters/ffmpeg_video_decoder_unittest.cc b/media/filters/ffmpeg_video_decoder_unittest.cc
index dead419..7530c0f 100644
--- a/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -226,7 +226,7 @@
TEST_F(FFmpegVideoDecoderTest, Initialize_OpenDecoderFails) {
// Specify Theora w/o extra data so that avcodec_open2() fails.
- VideoDecoderConfig config(kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoDecoderConfig config(VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize,
kVisibleRect, kNaturalSize, EmptyExtraData(),
diff --git a/media/filters/frame_processor_unittest.cc b/media/filters/frame_processor_unittest.cc
index 9547f72..20ef171 100644
--- a/media/filters/frame_processor_unittest.cc
+++ b/media/filters/frame_processor_unittest.cc
@@ -381,13 +381,14 @@
AudioDecoderConfig decoder_config;
if (support_audio_nonkeyframes) {
decoder_config = AudioDecoderConfig(
- kCodecAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ AudioCodec::kAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO,
+ 1000, EmptyExtraData(), EncryptionScheme::kUnencrypted);
decoder_config.set_profile(AudioCodecProfile::kXHE_AAC);
} else {
- decoder_config = AudioDecoderConfig(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ decoder_config =
+ AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
+ CHANNEL_LAYOUT_STEREO, 1000, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
}
frame_processor_->OnPossibleAudioConfigUpdate(decoder_config);
ASSERT_TRUE(
diff --git a/media/filters/fuchsia/fuchsia_video_decoder.cc b/media/filters/fuchsia/fuchsia_video_decoder.cc
index 908c5b2..fd7068a3 100644
--- a/media/filters/fuchsia/fuchsia_video_decoder.cc
+++ b/media/filters/fuchsia/fuchsia_video_decoder.cc
@@ -250,7 +250,7 @@
container_aspect_ratio_ = config.aspect_ratio();
// Keep decoder and decryptor if the configuration hasn't changed.
- if (decoder_ && current_config_.is_encrypted() == config.codec() &&
+ if (decoder_ && current_config_.codec() == config.codec() &&
current_config_.is_encrypted() == config.is_encrypted()) {
std::move(done_callback).Run(OkStatus());
return;
@@ -275,19 +275,19 @@
decoder_params.mutable_input_details()->set_format_details_version_ordinal(0);
switch (config.codec()) {
- case kCodecH264:
+ case VideoCodec::kH264:
decoder_params.mutable_input_details()->set_mime_type("video/h264");
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
decoder_params.mutable_input_details()->set_mime_type("video/vp8");
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
decoder_params.mutable_input_details()->set_mime_type("video/vp9");
break;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
decoder_params.mutable_input_details()->set_mime_type("video/hevc");
break;
- case kCodecAV1:
+ case VideoCodec::kAV1:
decoder_params.mutable_input_details()->set_mime_type("video/av1");
break;
diff --git a/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc b/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
index aced32b7..a86439e 100644
--- a/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
+++ b/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
@@ -411,7 +411,7 @@
}
TEST_F(FuchsiaVideoDecoderTest, DISABLED_VP9) {
- ASSERT_TRUE(InitializeDecoder(TestVideoConfig::Normal(kCodecVP9)));
+ ASSERT_TRUE(InitializeDecoder(TestVideoConfig::Normal(VideoCodec::kVP9)));
DecodeBuffer(ReadTestDataFile("vp9-I-frame-320x240"));
DecodeBuffer(DecoderBuffer::CreateEOSBuffer());
diff --git a/media/filters/gav1_video_decoder.cc b/media/filters/gav1_video_decoder.cc
index 453476d..b79bb7171 100644
--- a/media/filters/gav1_video_decoder.cc
+++ b/media/filters/gav1_video_decoder.cc
@@ -269,7 +269,7 @@
InitCB bound_init_cb = bind_callbacks_ ? BindToCurrentLoop(std::move(init_cb))
: std::move(init_cb);
- if (config.is_encrypted() || config.codec() != kCodecAV1) {
+ if (config.is_encrypted() || config.codec() != VideoCodec::kAV1) {
std::move(bound_init_cb).Run(StatusCode::kEncryptedContentUnsupported);
return;
}
diff --git a/media/filters/gav1_video_decoder.h b/media/filters/gav1_video_decoder.h
index 68daf5c..5ac59365 100644
--- a/media/filters/gav1_video_decoder.h
+++ b/media/filters/gav1_video_decoder.h
@@ -96,7 +96,7 @@
explicit OffloadingGav1VideoDecoder(MediaLog* media_log)
: OffloadingVideoDecoder(
0,
- std::vector<VideoCodec>(1, kCodecAV1),
+ std::vector<VideoCodec>(1, VideoCodec::kAV1),
std::make_unique<Gav1VideoDecoder>(
media_log,
OffloadableVideoDecoder::OffloadState::kOffloaded)) {}
diff --git a/media/filters/gav1_video_decoder_unittest.cc b/media/filters/gav1_video_decoder_unittest.cc
index 33ccf13e..e77309c 100644
--- a/media/filters/gav1_video_decoder_unittest.cc
+++ b/media/filters/gav1_video_decoder_unittest.cc
@@ -72,7 +72,7 @@
~Gav1VideoDecoderTest() override { Destroy(); }
void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Normal(VideoCodec::kAV1));
}
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
@@ -94,7 +94,7 @@
}
void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Large(VideoCodec::kAV1));
}
void Reset() {
diff --git a/media/filters/offloading_video_decoder_unittest.cc b/media/filters/offloading_video_decoder_unittest.cc
index 7571c9e..4f9a116f 100644
--- a/media/filters/offloading_video_decoder_unittest.cc
+++ b/media/filters/offloading_video_decoder_unittest.cc
@@ -204,35 +204,35 @@
};
TEST_F(OffloadingVideoDecoderTest, NoOffloadingTooSmall) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
}
TEST_F(OffloadingVideoDecoderTest, NoOffloadingDifferentCodec) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
- TestNoOffloading(TestVideoConfig::Large(kCodecVP8));
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
+ TestNoOffloading(TestVideoConfig::Large(VideoCodec::kVP8));
}
TEST_F(OffloadingVideoDecoderTest, NoOffloadingHasEncryption) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
- TestNoOffloading(TestVideoConfig::LargeEncrypted(kCodecVP9));
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
+ TestNoOffloading(TestVideoConfig::LargeEncrypted(VideoCodec::kVP9));
}
TEST_F(OffloadingVideoDecoderTest, Offloading) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
TestOffloading(offload_config);
}
TEST_F(OffloadingVideoDecoderTest, OffloadingAfterNoOffloading) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
// Setup and test the no offloading path first.
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
// Test offloading now.
TestOffloading(offload_config, true);
@@ -242,7 +242,8 @@
// should happen asynchronously, set expectation after the call.
VideoDecoder::OutputCB output_cb;
offloading_decoder_->Initialize(
- TestVideoConfig::Normal(kCodecVP9), false, nullptr, ExpectInitCB(true),
+ TestVideoConfig::Normal(VideoCodec::kVP9), false, nullptr,
+ ExpectInitCB(true),
base::BindRepeating(&OffloadingVideoDecoderTest::OutputDone,
base::Unretained(this)),
base::NullCallback());
@@ -255,17 +256,17 @@
}
TEST_F(OffloadingVideoDecoderTest, InitializeWithoutDetach) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
EXPECT_CALL(*decoder_, Detach()).Times(0);
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
}
TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloading) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
// Since this Initialize() should be happening on another thread, set the
// expectation after we make the call.
@@ -315,8 +316,8 @@
}
TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloadingResetAbortsDecodes) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
// Since this Initialize() should be happening on another thread, set the
// expectation after we make the call.
diff --git a/media/filters/source_buffer_state.cc b/media/filters/source_buffer_state.cc
index c1a4cae4..6751493 100644
--- a/media/filters/source_buffer_state.cc
+++ b/media/filters/source_buffer_state.cc
@@ -585,12 +585,12 @@
std::vector<VideoCodec> expected_vcodecs;
for (const auto& codec_id : expected_codecs_parsed) {
AudioCodec acodec = StringToAudioCodec(codec_id);
- if (acodec != kUnknownAudioCodec) {
+ if (acodec != AudioCodec::kUnknown) {
expected_audio_codecs_.push_back(acodec);
continue;
}
VideoCodec vcodec = StringToVideoCodec(codec_id);
- if (vcodec != kUnknownVideoCodec) {
+ if (vcodec != VideoCodec::kUnknown) {
expected_video_codecs_.push_back(vcodec);
continue;
}
@@ -716,7 +716,7 @@
<< " config: " << video_config.AsHumanReadableString();
DCHECK(video_config.IsValidConfig());
- if (video_config.codec() == kCodecHEVC) {
+ if (video_config.codec() == VideoCodec::kHEVC) {
#if BUILDFLAG(ENABLE_PLATFORM_ENCRYPTED_HEVC)
#if BUILDFLAG(IS_CHROMEOS_LACROS)
if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
diff --git a/media/filters/source_buffer_state_unittest.cc b/media/filters/source_buffer_state_unittest.cc
index f0fd900..2e11a8978 100644
--- a/media/filters/source_buffer_state_unittest.cc
+++ b/media/filters/source_buffer_state_unittest.cc
@@ -158,7 +158,7 @@
CreateAndInitSourceBufferState("vorbis");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
EXPECT_FOUND_CODEC_NAME(Audio, "vorbis");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -170,7 +170,7 @@
CreateAndInitSourceBufferState("vp8");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP8, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 1);
EXPECT_FOUND_CODEC_NAME(Video, "vp8");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -182,10 +182,10 @@
CreateAndInitSourceBufferState("vorbis,vp8,opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
- AddAudioTrack(tracks, kCodecOpus, 2);
- AddVideoTrack(tracks, kCodecVP8, 3);
- AddVideoTrack(tracks, kCodecVP9, 4);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 2);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 3);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 4);
EXPECT_FOUND_CODEC_NAME(Audio, "vorbis");
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
@@ -199,7 +199,7 @@
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("opus");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
EXPECT_MEDIA_LOG(InitSegmentMismatchesMimeType("Audio", "vorbis"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
}
@@ -208,7 +208,7 @@
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP8, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 1);
EXPECT_MEDIA_LOG(InitSegmentMismatchesMimeType("Video", "vp8"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
}
@@ -217,7 +217,7 @@
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP9, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 1);
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_MEDIA_LOG(InitSegmentMissesExpectedTrack("opus"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
@@ -227,8 +227,9 @@
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- tracks->AddAudioTrack(CreateAudioConfig(kCodecOpus), 1, MediaTrack::Kind(),
- MediaTrack::Label(), MediaTrack::Language());
+ tracks->AddAudioTrack(CreateAudioConfig(AudioCodec::kOpus), 1,
+ MediaTrack::Kind(), MediaTrack::Label(),
+ MediaTrack::Language());
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_MEDIA_LOG(InitSegmentMissesExpectedTrack("vp9"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
@@ -239,8 +240,8 @@
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecOpus, 1);
- AddVideoTrack(tracks, kCodecVP9, 2);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 2);
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -251,8 +252,8 @@
// Bytestream track ids are allowed to change when there is only a single
// track of each type.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddAudioTrack(tracks2, kCodecOpus, 3);
- AddVideoTrack(tracks2, kCodecVP9, 4);
+ AddAudioTrack(tracks2, AudioCodec::kOpus, 3);
+ AddVideoTrack(tracks2, VideoCodec::kVP9, 4);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
AppendDataAndReportTracks(sbs, std::move(tracks2));
}
@@ -262,8 +263,8 @@
CreateAndInitSourceBufferState("vorbis,opus");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
- AddAudioTrack(tracks, kCodecOpus, 2);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 2);
EXPECT_FOUND_CODEC_NAME(Audio, "vorbis");
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -272,16 +273,16 @@
// Since we have two audio tracks, bytestream track ids must match the first
// init segment.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddAudioTrack(tracks2, kCodecVorbis, 1);
- AddAudioTrack(tracks2, kCodecOpus, 2);
+ AddAudioTrack(tracks2, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks2, AudioCodec::kOpus, 2);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
EXPECT_TRUE(AppendDataAndReportTracks(sbs, std::move(tracks2)));
// Emulate the situation where bytestream track ids have changed in the third
// init segment. This must cause failure in the OnNewConfigs.
std::unique_ptr<MediaTracks> tracks3(new MediaTracks());
- AddAudioTrack(tracks3, kCodecVorbis, 1);
- AddAudioTrack(tracks3, kCodecOpus, 3);
+ AddAudioTrack(tracks3, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks3, AudioCodec::kOpus, 3);
EXPECT_MEDIA_LOG(UnexpectedTrack("audio", "3"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks3)));
}
@@ -291,8 +292,8 @@
CreateAndInitSourceBufferState("vp8,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP8, 1);
- AddVideoTrack(tracks, kCodecVP9, 2);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 2);
EXPECT_FOUND_CODEC_NAME(Video, "vp8");
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -301,16 +302,16 @@
// Since we have two video tracks, bytestream track ids must match the first
// init segment.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddVideoTrack(tracks2, kCodecVP8, 1);
- AddVideoTrack(tracks2, kCodecVP9, 2);
+ AddVideoTrack(tracks2, VideoCodec::kVP8, 1);
+ AddVideoTrack(tracks2, VideoCodec::kVP9, 2);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
EXPECT_TRUE(AppendDataAndReportTracks(sbs, std::move(tracks2)));
// Emulate the situation where bytestream track ids have changed in the third
// init segment. This must cause failure in the OnNewConfigs.
std::unique_ptr<MediaTracks> tracks3(new MediaTracks());
- AddVideoTrack(tracks3, kCodecVP8, 1);
- AddVideoTrack(tracks3, kCodecVP9, 3);
+ AddVideoTrack(tracks3, VideoCodec::kVP8, 1);
+ AddVideoTrack(tracks3, VideoCodec::kVP9, 3);
EXPECT_MEDIA_LOG(UnexpectedTrack("video", "3"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks3)));
}
@@ -320,8 +321,8 @@
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecOpus, 1);
- AddVideoTrack(tracks, kCodecVP9, 2);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 2);
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -329,8 +330,8 @@
// Track ids are swapped in the second init segment.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddAudioTrack(tracks2, kCodecOpus, 2);
- AddVideoTrack(tracks2, kCodecVP9, 1);
+ AddAudioTrack(tracks2, AudioCodec::kOpus, 2);
+ AddVideoTrack(tracks2, VideoCodec::kVP9, 1);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
AppendDataAndReportTracks(sbs, std::move(tracks2));
}
diff --git a/media/filters/source_buffer_stream.cc b/media/filters/source_buffer_stream.cc
index 3c877e3..d54c6cb 100644
--- a/media/filters/source_buffer_stream.cc
+++ b/media/filters/source_buffer_stream.cc
@@ -1495,7 +1495,7 @@
// |timestamp| is already before the range start time, as can happen due to
// fudge room, do not adjust it.
const auto& config = audio_configs_[(*itr)->GetConfigIdAtTime(timestamp)];
- if (config.codec() == kCodecOpus &&
+ if (config.codec() == AudioCodec::kOpus &&
timestamp > (*itr)->GetStartTimestamp()) {
base::TimeDelta preroll_timestamp = std::max(
timestamp - config.seek_preroll(), (*itr)->GetStartTimestamp());
diff --git a/media/filters/source_buffer_stream_unittest.cc b/media/filters/source_buffer_stream_unittest.cc
index 27d1313359..9842e4d 100644
--- a/media/filters/source_buffer_stream_unittest.cc
+++ b/media/filters/source_buffer_stream_unittest.cc
@@ -99,9 +99,10 @@
void SetAudioStream() {
video_config_ = TestVideoConfig::Invalid();
- audio_config_.Initialize(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted, base::TimeDelta(), 0);
+ audio_config_.Initialize(AudioCodec::kVorbis, kSampleFormatPlanarF32,
+ CHANNEL_LAYOUT_STEREO, 1000, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted, base::TimeDelta(),
+ 0);
ResetStream<>(audio_config_);
// Equivalent to 2ms per frame.
@@ -3893,8 +3894,8 @@
// Test all the valid same timestamp cases for audio.
TEST_F(SourceBufferStreamTest, SameTimestamp_Audio) {
- AudioDecoderConfig config(kCodecMP3, kSampleFormatF32, CHANNEL_LAYOUT_STEREO,
- 44100, EmptyExtraData(),
+ AudioDecoderConfig config(AudioCodec::kMP3, kSampleFormatF32,
+ CHANNEL_LAYOUT_STEREO, 44100, EmptyExtraData(),
EncryptionScheme::kUnencrypted);
ResetStream<>(config);
Seek(0);
@@ -4522,7 +4523,7 @@
video_config_ = TestVideoConfig::Invalid();
audio_config_.Initialize(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 4000,
+ AudioCodec::kVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 4000,
EmptyExtraData(), EncryptionScheme::kUnencrypted, base::TimeDelta(), 0);
ResetStream<>(audio_config_);
// Equivalent to 0.5ms per frame.
@@ -4554,7 +4555,7 @@
}
TEST_F(SourceBufferStreamTest, Audio_ConfigChangeWithPreroll) {
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_MONO, 2000, EmptyExtraData(),
EncryptionScheme::kUnencrypted);
SetAudioStream();
@@ -4599,7 +4600,7 @@
// in case the associated logic to check same config in the preroll time
// interval requires a nonzero seek_preroll value.
video_config_ = TestVideoConfig::Invalid();
- audio_config_.Initialize(kCodecOpus, kSampleFormatPlanarF32,
+ audio_config_.Initialize(AudioCodec::kOpus, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 1000, EmptyExtraData(),
EncryptionScheme::kUnencrypted,
base::TimeDelta::FromMilliseconds(10), 0);
diff --git a/media/filters/vpx_video_decoder.cc b/media/filters/vpx_video_decoder.cc
index 45b1248..e15e8652 100644
--- a/media/filters/vpx_video_decoder.cc
+++ b/media/filters/vpx_video_decoder.cc
@@ -42,7 +42,7 @@
// For VP9 decoding increase the number of decode threads to equal the
// maximum number of tiles possible for higher resolution streams.
- if (config.codec() == kCodecVP9) {
+ if (config.codec() == VideoCodec::kVP9) {
const int width = config.coded_size().width();
if (width >= 3840)
desired_threads = 16;
@@ -63,10 +63,11 @@
vpx_config.h = config.coded_size().height();
vpx_config.threads = GetVpxVideoDecoderThreadCount(config);
- vpx_codec_err_t status = vpx_codec_dec_init(
- context.get(),
- config.codec() == kCodecVP9 ? vpx_codec_vp9_dx() : vpx_codec_vp8_dx(),
- &vpx_config, 0 /* flags */);
+ vpx_codec_err_t status = vpx_codec_dec_init(context.get(),
+ config.codec() == VideoCodec::kVP9
+ ? vpx_codec_vp9_dx()
+ : vpx_codec_vp8_dx(),
+ &vpx_config, 0 /* flags */);
if (status == VPX_CODEC_OK)
return context;
@@ -224,7 +225,7 @@
bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9)
+ if (config.codec() != VideoCodec::kVP8 && config.codec() != VideoCodec::kVP9)
return false;
#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
@@ -232,7 +233,7 @@
// VpxVideoDecoder will handle VP8 with alpha. FFvp8 is being deprecated.
// See http://crbug.com/992235.
if (base::FeatureList::IsEnabled(kFFmpegDecodeOpaqueVP8) &&
- config.codec() == kCodecVP8 &&
+ config.codec() == VideoCodec::kVP8 &&
config.alpha_mode() == VideoDecoderConfig::AlphaMode::kIsOpaque) {
return false;
}
@@ -246,7 +247,7 @@
// Configure VP9 to decode on our buffers to skip a data copy on
// decoding. For YV12A-VP9, we use our buffers for the Y, U and V planes and
// copy the A plane.
- if (config.codec() == kCodecVP9) {
+ if (config.codec() == VideoCodec::kVP9) {
DCHECK(vpx_codec_get_caps(vpx_codec_->iface) &
VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER);
@@ -347,7 +348,7 @@
if (!CopyVpxImageToVideoFrame(vpx_image, vpx_image_alpha, video_frame))
return false;
- if (vpx_image_alpha && config_.codec() == kCodecVP8) {
+ if (vpx_image_alpha && config_.codec() == VideoCodec::kVP8) {
libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
vpx_image_alpha->stride[VPX_PLANE_Y],
(*video_frame)->visible_data(VideoFrame::kAPlane),
@@ -550,7 +551,7 @@
config_.aspect_ratio().GetNaturalSize(gfx::Rect(visible_size));
if (memory_pool_) {
- DCHECK_EQ(kCodecVP9, config_.codec());
+ DCHECK_EQ(VideoCodec::kVP9, config_.codec());
if (vpx_image_alpha) {
size_t alpha_plane_size =
vpx_image_alpha->stride[VPX_PLANE_Y] * vpx_image_alpha->d_h;
diff --git a/media/filters/vpx_video_decoder.h b/media/filters/vpx_video_decoder.h
index 34a8433..b89a77a 100644
--- a/media/filters/vpx_video_decoder.h
+++ b/media/filters/vpx_video_decoder.h
@@ -123,7 +123,7 @@
OffloadingVpxVideoDecoder()
: OffloadingVideoDecoder(
1024,
- std::vector<VideoCodec>(1, kCodecVP9),
+ std::vector<VideoCodec>(1, VideoCodec::kVP9),
std::make_unique<VpxVideoDecoder>(
OffloadableVideoDecoder::OffloadState::kOffloaded)) {}
};
diff --git a/media/filters/vpx_video_decoder_fuzzertest.cc b/media/filters/vpx_video_decoder_fuzzertest.cc
index 087d465..afeba4f 100644
--- a/media/filters/vpx_video_decoder_fuzzertest.cc
+++ b/media/filters/vpx_video_decoder_fuzzertest.cc
@@ -64,11 +64,11 @@
bool has_alpha = false;
if (rng() & 1) {
- codec = media::kCodecVP8;
+ codec = media::VideoCodec::kVP8;
// non-Alpha VP8 decoding isn't supported by VpxVideoDecoder on Linux.
has_alpha = true;
} else {
- codec = media::kCodecVP9;
+ codec = media::VideoCodec::kVP9;
has_alpha = rng() & 1;
}
diff --git a/media/filters/vpx_video_decoder_unittest.cc b/media/filters/vpx_video_decoder_unittest.cc
index 6f37edb..a131892 100644
--- a/media/filters/vpx_video_decoder_unittest.cc
+++ b/media/filters/vpx_video_decoder_unittest.cc
@@ -34,7 +34,7 @@
~VpxVideoDecoderTest() override { Destroy(); }
void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecVP9));
+ InitializeWithConfig(TestVideoConfig::Normal(VideoCodec::kVP9));
}
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
@@ -56,7 +56,7 @@
}
void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecVP9));
+ InitializeWithConfig(TestVideoConfig::Large(VideoCodec::kVP9));
}
void Reset() {
diff --git a/media/formats/mp2t/es_adapter_video_unittest.cc b/media/formats/mp2t/es_adapter_video_unittest.cc
index 8043605d..f5e896b 100644
--- a/media/formats/mp2t/es_adapter_video_unittest.cc
+++ b/media/formats/mp2t/es_adapter_video_unittest.cc
@@ -31,10 +31,11 @@
gfx::Size coded_size(320, 240);
gfx::Rect visible_rect(0, 0, 320, 240);
gfx::Size natural_size(320, 240);
- return VideoDecoderConfig(
- kCodecH264, H264PROFILE_MAIN, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, coded_size, visible_rect,
- natural_size, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ return VideoDecoderConfig(VideoCodec::kH264, H264PROFILE_MAIN,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, coded_size,
+ visible_rect, natural_size, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
}
BufferQueue GenerateFakeBuffers(const int* frame_pts_ms,
diff --git a/media/formats/mp2t/es_parser_adts.cc b/media/formats/mp2t/es_parser_adts.cc
index 91470b75..a484ee3 100644
--- a/media/formats/mp2t/es_parser_adts.cc
+++ b/media/formats/mp2t/es_parser_adts.cc
@@ -263,8 +263,8 @@
scheme = init_encryption_scheme_;
#endif
AudioDecoderConfig audio_decoder_config(
- kCodecAAC, kSampleFormatS16, channel_layout, extended_samples_per_second,
- extra_data, scheme);
+ AudioCodec::kAAC, kSampleFormatS16, channel_layout,
+ extended_samples_per_second, extra_data, scheme);
if (!audio_decoder_config.IsValidConfig()) {
DVLOG(1) << "Invalid config: "
diff --git a/media/formats/mp2t/es_parser_h264.cc b/media/formats/mp2t/es_parser_h264.cc
index a00a90c..eeae824 100644
--- a/media/formats/mp2t/es_parser_h264.cc
+++ b/media/formats/mp2t/es_parser_h264.cc
@@ -517,7 +517,7 @@
}
VideoDecoderConfig video_decoder_config(
- kCodecH264, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoCodec::kH264, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC709(), kNoTransformation, coded_size.value(),
visible_rect.value(), natural_size, EmptyExtraData(), scheme);
diff --git a/media/formats/mp2t/es_parser_mpeg1audio.cc b/media/formats/mp2t/es_parser_mpeg1audio.cc
index 4cc67f9..a48a4ba 100644
--- a/media/formats/mp2t/es_parser_mpeg1audio.cc
+++ b/media/formats/mp2t/es_parser_mpeg1audio.cc
@@ -170,8 +170,8 @@
// TODO(damienv): Verify whether Android playback requires the extra data
// field for Mpeg1 audio. If yes, we should generate this field.
AudioDecoderConfig audio_decoder_config(
- kCodecMP3, kSampleFormatS16, header.channel_layout, header.sample_rate,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ AudioCodec::kMP3, kSampleFormatS16, header.channel_layout,
+ header.sample_rate, EmptyExtraData(), EncryptionScheme::kUnencrypted);
if (!audio_decoder_config.IsValidConfig()) {
DVLOG(1) << "Invalid config: "
diff --git a/media/formats/mp2t/mp2t_stream_parser.cc b/media/formats/mp2t/mp2t_stream_parser.cc
index 3cbc426..a15bef6bb 100644
--- a/media/formats/mp2t/mp2t_stream_parser.cc
+++ b/media/formats/mp2t/mp2t_stream_parser.cc
@@ -201,13 +201,13 @@
segment_started_(false) {
for (const std::string& codec_name : allowed_codecs) {
switch (StringToVideoCodec(codec_name)) {
- case VideoCodec::kCodecH264:
+ case VideoCodec::kH264:
allowed_stream_types_.insert(kStreamTypeAVC);
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
allowed_stream_types_.insert(kStreamTypeAVCWithSampleAES);
#endif
continue;
- case VideoCodec::kUnknownVideoCodec:
+ case VideoCodec::kUnknown:
// Probably audio.
break;
default:
@@ -216,17 +216,17 @@
}
switch (StringToAudioCodec(codec_name)) {
- case AudioCodec::kCodecAAC:
+ case AudioCodec::kAAC:
allowed_stream_types_.insert(kStreamTypeAAC);
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
allowed_stream_types_.insert(kStreamTypeAACWithSampleAES);
#endif
continue;
- case AudioCodec::kCodecMP3:
+ case AudioCodec::kMP3:
allowed_stream_types_.insert(kStreamTypeMpeg1Audio);
allowed_stream_types_.insert(kStreamTypeMpeg2Audio);
continue;
- case AudioCodec::kUnknownAudioCodec:
+ case AudioCodec::kUnknown:
// Neither audio, nor video.
break;
default:
diff --git a/media/formats/mp4/box_definitions.cc b/media/formats/mp4/box_definitions.cc
index 263e913..08896d0 100644
--- a/media/formats/mp4/box_definitions.cc
+++ b/media/formats/mp4/box_definitions.cc
@@ -1049,7 +1049,7 @@
data_reference_index(0),
width(0),
height(0),
- video_codec(kUnknownVideoCodec),
+ video_codec(VideoCodec::kUnknown),
video_codec_profile(VIDEO_CODEC_PROFILE_UNKNOWN),
video_codec_level(kNoVideoCodecLevel) {}
@@ -1095,7 +1095,7 @@
std::unique_ptr<AVCDecoderConfigurationRecord> avcConfig(
new AVCDecoderConfigurationRecord());
RCHECK(reader->ReadChild(avcConfig.get()));
- video_codec = kCodecH264;
+ video_codec = VideoCodec::kH264;
video_codec_profile = H264Parser::ProfileIDCToVideoCodecProfile(
avcConfig->profile_indication);
@@ -1106,7 +1106,7 @@
auto dv_config = ParseDOVIConfig(reader);
if (dv_config.has_value()) {
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
}
@@ -1120,7 +1120,7 @@
std::unique_ptr<HEVCDecoderConfigurationRecord> hevcConfig(
new HEVCDecoderConfigurationRecord());
RCHECK(reader->ReadChild(hevcConfig.get()));
- video_codec = kCodecHEVC;
+ video_codec = VideoCodec::kHEVC;
video_codec_profile = hevcConfig->GetVideoProfile();
frame_bitstream_converter =
base::MakeRefCounted<HEVCBitstreamConverter>(std::move(hevcConfig));
@@ -1129,7 +1129,7 @@
auto dv_config = ParseDOVIConfig(reader);
if (dv_config.has_value()) {
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
}
@@ -1150,7 +1150,7 @@
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
auto dv_config = ParseDOVIConfig(reader);
RCHECK(dv_config.has_value());
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
break;
@@ -1167,7 +1167,7 @@
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
auto dv_config = ParseDOVIConfig(reader);
RCHECK(dv_config.has_value());
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
break;
@@ -1181,7 +1181,7 @@
new VPCodecConfigurationRecord());
RCHECK(reader->ReadChild(vp_config.get()));
frame_bitstream_converter = nullptr;
- video_codec = kCodecVP9;
+ video_codec = VideoCodec::kVP9;
video_codec_profile = vp_config->profile;
video_color_space = vp_config->color_space;
video_codec_level = vp_config->level;
@@ -1205,7 +1205,7 @@
AV1CodecConfigurationRecord av1_config;
RCHECK(reader->ReadChild(&av1_config));
frame_bitstream_converter = nullptr;
- video_codec = kCodecAV1;
+ video_codec = VideoCodec::kAV1;
video_codec_profile = av1_config.profile;
break;
}
diff --git a/media/formats/mp4/mp4_stream_parser.cc b/media/formats/mp4/mp4_stream_parser.cc
index aabdce0..f3675dc 100644
--- a/media/formats/mp4/mp4_stream_parser.cc
+++ b/media/formats/mp4/mp4_stream_parser.cc
@@ -345,7 +345,7 @@
return false;
}
- AudioCodec codec = kUnknownAudioCodec;
+ AudioCodec codec = AudioCodec::kUnknown;
AudioCodecProfile profile = AudioCodecProfile::kUnknown;
ChannelLayout channel_layout = CHANNEL_LAYOUT_NONE;
int sample_per_second = 0;
@@ -353,7 +353,7 @@
base::TimeDelta seek_preroll;
std::vector<uint8_t> extra_data;
if (audio_format == FOURCC_OPUS) {
- codec = kCodecOpus;
+ codec = AudioCodec::kOpus;
channel_layout = GuessChannelLayout(entry.dops.channel_count);
sample_per_second = entry.dops.sample_rate;
codec_delay_in_frames = entry.dops.codec_delay_in_frames;
@@ -369,14 +369,14 @@
return false;
}
- codec = kCodecFLAC;
+ codec = AudioCodec::kFLAC;
channel_layout = GuessChannelLayout(entry.channelcount);
sample_per_second = entry.samplerate;
extra_data = entry.dfla.stream_info;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
} else if (audio_format == FOURCC_MHM1 || audio_format == FOURCC_MHA1) {
- codec = kCodecMpegHAudio;
+ codec = AudioCodec::kMpegHAudio;
channel_layout = CHANNEL_LAYOUT_BITSTREAM;
sample_per_second = entry.samplerate;
extra_data = entry.dfla.stream_info;
@@ -404,18 +404,18 @@
// supported MPEG2 AAC varients.
if (ESDescriptor::IsAAC(audio_type)) {
const AAC& aac = entry.esds.aac;
- codec = kCodecAAC;
+ codec = AudioCodec::kAAC;
profile = aac.GetProfile();
channel_layout = aac.GetChannelLayout(has_sbr_);
sample_per_second = aac.GetOutputSamplesPerSecond(has_sbr_);
extra_data = aac.codec_specific_data();
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
} else if (audio_type == kAC3) {
- codec = kCodecAC3;
+ codec = AudioCodec::kAC3;
channel_layout = GuessChannelLayout(entry.channelcount);
sample_per_second = entry.samplerate;
} else if (audio_type == kEAC3) {
- codec = kCodecEAC3;
+ codec = AudioCodec::kEAC3;
channel_layout = GuessChannelLayout(entry.channelcount);
sample_per_second = entry.samplerate;
#endif
@@ -459,7 +459,7 @@
audio_config.Initialize(codec, sample_format, channel_layout,
sample_per_second, extra_data, scheme,
seek_preroll, codec_delay_in_frames);
- if (codec == kCodecAAC) {
+ if (codec == AudioCodec::kAAC) {
audio_config.disable_discard_decoder_delay();
audio_config.set_profile(profile);
}
@@ -804,9 +804,9 @@
std::vector<uint8_t> frame_buf(buf, buf + sample_size);
if (video) {
- if (runs_->video_description().video_codec == kCodecH264 ||
- runs_->video_description().video_codec == kCodecHEVC ||
- runs_->video_description().video_codec == kCodecDolbyVision) {
+ if (runs_->video_description().video_codec == VideoCodec::kH264 ||
+ runs_->video_description().video_codec == VideoCodec::kHEVC ||
+ runs_->video_description().video_codec == VideoCodec::kDolbyVision) {
DCHECK(runs_->video_description().frame_bitstream_converter);
BitstreamConverter::AnalysisResult analysis;
if (!runs_->video_description()
diff --git a/media/formats/mp4/mp4_stream_parser_unittest.cc b/media/formats/mp4/mp4_stream_parser_unittest.cc
index e5f745c..acc67bb 100644
--- a/media/formats/mp4/mp4_stream_parser_unittest.cc
+++ b/media/formats/mp4/mp4_stream_parser_unittest.cc
@@ -499,7 +499,7 @@
EXPECT_EQ(expect_success,
AppendDataInPieces(buffer->data(), buffer->data_size(), 512));
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- EXPECT_EQ(kCodecHEVC, video_decoder_config_.codec());
+ EXPECT_EQ(VideoCodec::kHEVC, video_decoder_config_.codec());
EXPECT_EQ(HEVCPROFILE_MAIN, video_decoder_config_.profile());
#endif
}
diff --git a/media/formats/mpeg/adts_stream_parser.cc b/media/formats/mpeg/adts_stream_parser.cc
index a47d2242..116f536a6 100644
--- a/media/formats/mpeg/adts_stream_parser.cc
+++ b/media/formats/mpeg/adts_stream_parser.cc
@@ -16,7 +16,7 @@
constexpr uint32_t kADTSStartCodeMask = 0xfff00000;
ADTSStreamParser::ADTSStreamParser()
- : MPEGAudioStreamParserBase(kADTSStartCodeMask, kCodecAAC, 0) {}
+ : MPEGAudioStreamParserBase(kADTSStartCodeMask, AudioCodec::kAAC, 0) {}
ADTSStreamParser::~ADTSStreamParser() = default;
diff --git a/media/formats/mpeg/mpeg1_audio_stream_parser.cc b/media/formats/mpeg/mpeg1_audio_stream_parser.cc
index d12f7ca..5b996764 100644
--- a/media/formats/mpeg/mpeg1_audio_stream_parser.cc
+++ b/media/formats/mpeg/mpeg1_audio_stream_parser.cc
@@ -187,7 +187,9 @@
}
MPEG1AudioStreamParser::MPEG1AudioStreamParser()
- : MPEGAudioStreamParserBase(kMPEG1StartCodeMask, kCodecMP3, kCodecDelay) {}
+ : MPEGAudioStreamParserBase(kMPEG1StartCodeMask,
+ AudioCodec::kMP3,
+ kCodecDelay) {}
MPEG1AudioStreamParser::~MPEG1AudioStreamParser() = default;
diff --git a/media/formats/mpeg/mpeg_audio_stream_parser_base.cc b/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
index 0ea60b26..aea794e 100644
--- a/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
+++ b/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
@@ -213,7 +213,7 @@
config_.Initialize(audio_codec_, kSampleFormatF32, channel_layout,
sample_rate, extra_data, EncryptionScheme::kUnencrypted,
base::TimeDelta(), codec_delay_);
- if (audio_codec_ == kCodecAAC)
+ if (audio_codec_ == AudioCodec::kAAC)
config_.disable_discard_decoder_delay();
base::TimeDelta base_timestamp;
diff --git a/media/formats/webm/webm_audio_client.cc b/media/formats/webm/webm_audio_client.cc
index 5ac98e8e..b245845 100644
--- a/media/formats/webm/webm_audio_client.cc
+++ b/media/formats/webm/webm_audio_client.cc
@@ -32,11 +32,11 @@
DCHECK(config);
SampleFormat sample_format = kSampleFormatPlanarF32;
- AudioCodec audio_codec = kUnknownAudioCodec;
+ AudioCodec audio_codec = AudioCodec::kUnknown;
if (codec_id == "A_VORBIS") {
- audio_codec = kCodecVorbis;
+ audio_codec = AudioCodec::kVorbis;
} else if (codec_id == "A_OPUS") {
- audio_codec = kCodecOpus;
+ audio_codec = AudioCodec::kOpus;
} else {
MEDIA_LOG(ERROR, media_log_) << "Unsupported audio codec_id " << codec_id;
return false;
@@ -63,7 +63,7 @@
// Always use 48kHz for OPUS. See the "Input Sample Rate" section of the
// spec: http://tools.ietf.org/html/draft-terriberry-oggopus-01#page-11
- if (audio_codec == kCodecOpus) {
+ if (audio_codec == AudioCodec::kOpus) {
samples_per_second = 48000;
sample_format = kSampleFormatF32;
}
diff --git a/media/formats/webm/webm_cluster_parser.cc b/media/formats/webm/webm_cluster_parser.cc
index 9d192f81..eb40d29 100644
--- a/media/formats/webm/webm_cluster_parser.cc
+++ b/media/formats/webm/webm_cluster_parser.cc
@@ -172,7 +172,7 @@
// TODO(chcunningham): Consider parsing "Signal Byte" for encrypted streams
// to return duration for any unencrypted blocks.
- if (audio_codec_ == kCodecOpus) {
+ if (audio_codec_ == AudioCodec::kOpus) {
return ReadOpusDuration(data, size);
}
diff --git a/media/formats/webm/webm_cluster_parser_unittest.cc b/media/formats/webm/webm_cluster_parser_unittest.cc
index 987dc71..6fa912a 100644
--- a/media/formats/webm/webm_cluster_parser_unittest.cc
+++ b/media/formats/webm/webm_cluster_parser_unittest.cc
@@ -325,7 +325,7 @@
WebMClusterParser* CreateDefaultParser() {
return CreateParserHelper(kNoTimestamp, kNoTimestamp, TextTracks(),
std::set<int64_t>(), std::string(), std::string(),
- kUnknownAudioCodec);
+ AudioCodec::kUnknown);
}
// Create a parser for test with custom audio and video default durations, and
@@ -336,7 +336,7 @@
const WebMTracksParser::TextTracks& text_tracks = TextTracks()) {
return CreateParserHelper(audio_default_duration, video_default_duration,
text_tracks, std::set<int64_t>(), std::string(),
- std::string(), kUnknownAudioCodec);
+ std::string(), AudioCodec::kUnknown);
}
// Create a parser for test with custom ignored tracks.
@@ -344,7 +344,7 @@
std::set<int64_t>& ignored_tracks) {
return CreateParserHelper(kNoTimestamp, kNoTimestamp, TextTracks(),
ignored_tracks, std::string(), std::string(),
- kUnknownAudioCodec);
+ AudioCodec::kUnknown);
}
// Create a parser for test with custom encryption key ids and audio codec.
@@ -716,7 +716,7 @@
CreateEncryptedCluster(sizeof(kEncryptedFrame)));
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), "video_key_id", kUnknownAudioCodec));
+ std::string(), "video_key_id", AudioCodec::kUnknown));
// The encrypted cluster contains just one block, video.
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(
@@ -736,7 +736,7 @@
CreateEncryptedCluster(sizeof(kEncryptedFrame) - 1));
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), "video_key_id", kUnknownAudioCodec));
+ std::string(), "video_key_id", AudioCodec::kUnknown));
EXPECT_MEDIA_LOG(HasSubstr("Failed to extract decrypt config"));
int result = parser_->Parse(cluster->data(), cluster->size());
@@ -1149,7 +1149,7 @@
// Get a new parser each iteration to prevent exceeding the media log cap.
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), std::string(), kCodecOpus));
+ std::string(), std::string(), AudioCodec::kOpus));
const BlockInfo kBlockInfo[] = {{kAudioTrackNum,
0,
@@ -1186,7 +1186,7 @@
// Get a new parser each iteration to prevent exceeding the media log cap.
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), std::string(), kCodecOpus));
+ std::string(), std::string(), AudioCodec::kOpus));
// Setting BlockDuration != Opus duration to see which one the parser uses.
double block_duration_ms = packet_ptr->duration_ms() + 10;
@@ -1234,8 +1234,8 @@
std::string audio_encryption_id("audio_key_id");
// Reset parser to expect Opus codec audio and use audio encryption key id.
- parser_.reset(CreateParserWithKeyIdsAndAudioCodec(audio_encryption_id,
- std::string(), kCodecOpus));
+ parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
+ audio_encryption_id, std::string(), AudioCodec::kOpus));
// Single Block with BlockDuration and encrypted data.
const BlockInfo kBlockInfo[] = {{kAudioTrackNum, 0,
diff --git a/media/formats/webm/webm_video_client.cc b/media/formats/webm/webm_video_client.cc
index 407c11b..8e4add7 100644
--- a/media/formats/webm/webm_video_client.cc
+++ b/media/formats/webm/webm_video_client.cc
@@ -80,13 +80,13 @@
is_8bit = color_metadata.BitsPerChannel <= 8;
}
- VideoCodec video_codec = kUnknownVideoCodec;
+ VideoCodec video_codec = VideoCodec::kUnknown;
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
if (codec_id == "V_VP8") {
- video_codec = kCodecVP8;
+ video_codec = VideoCodec::kVP8;
profile = VP8PROFILE_ANY;
} else if (codec_id == "V_VP9") {
- video_codec = kCodecVP9;
+ video_codec = VideoCodec::kVP9;
profile = GetVP9CodecProfile(
codec_private, color_space.ToGfxColorSpace().IsHDR() ||
config->hdr_metadata().has_value() || !is_8bit);
@@ -95,7 +95,7 @@
// TODO(dalecurtis): AV1 profiles in WebM are not finalized, this needs
// updating to read the actual profile and configuration before enabling for
// release. http://crbug.com/784993
- video_codec = kCodecAV1;
+ video_codec = VideoCodec::kAV1;
profile = AV1PROFILE_PROFILE_MAIN;
#endif
} else {
@@ -129,7 +129,8 @@
// TODO(dalecurtis): This is not correct, but it's what's muxed in webm
// containers with AV1 right now. So accept it. We won't get here unless the
// build and runtime flags are enabled for AV1.
- if (display_unit_ == 0 || (video_codec == kCodecAV1 && display_unit_ == 4)) {
+ if (display_unit_ == 0 ||
+ (video_codec == VideoCodec::kAV1 && display_unit_ == 4)) {
if (display_width_ <= 0)
display_width_ = visible_rect.width();
if (display_height_ <= 0)
diff --git a/media/formats/webm/webm_video_client_unittest.cc b/media/formats/webm/webm_video_client_unittest.cc
index eb422b2..5786ed1 100644
--- a/media/formats/webm/webm_video_client_unittest.cc
+++ b/media/formats/webm/webm_video_client_unittest.cc
@@ -150,7 +150,7 @@
EncryptionScheme(), &config));
VideoDecoderConfig expected_config(
- kCodecVP9, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoCodec::kVP9, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC709(), kNoTransformation, kCodedSize,
gfx::Rect(kCodedSize), kCodedSize, codec_private,
EncryptionScheme::kUnencrypted);
diff --git a/media/fuchsia/audio/fuchsia_audio_renderer.cc b/media/fuchsia/audio/fuchsia_audio_renderer.cc
index 20d98f0..52b5b7e7 100644
--- a/media/fuchsia/audio/fuchsia_audio_renderer.cc
+++ b/media/fuchsia/audio/fuchsia_audio_renderer.cc
@@ -29,22 +29,22 @@
GetFuchsiaCompressionFromDecoderConfig(AudioDecoderConfig config) {
auto compression = std::make_unique<fuchsia::media::Compression>();
switch (config.codec()) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
compression->type = fuchsia::media::AUDIO_ENCODING_AAC;
break;
- case kCodecMP3:
+ case AudioCodec::kMP3:
compression->type = fuchsia::media::AUDIO_ENCODING_MP3;
break;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
compression->type = fuchsia::media::AUDIO_ENCODING_VORBIS;
break;
- case kCodecOpus:
+ case AudioCodec::kOpus:
compression->type = fuchsia::media::AUDIO_ENCODING_OPUS;
break;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
compression->type = fuchsia::media::AUDIO_ENCODING_FLAC;
break;
- case kCodecPCM:
+ case AudioCodec::kPCM:
compression.reset();
break;
@@ -124,7 +124,7 @@
// produce decoded stream without ADTS headers which are required for AAC
// streams in AudioConsumer.
// TODO(crbug.com/1120095): Reconsider this logic.
- if (stream->audio_decoder_config().codec() == kCodecAAC) {
+ if (stream->audio_decoder_config().codec() == AudioCodec::kAAC) {
stream->EnableBitstreamConverter();
}
diff --git a/media/fuchsia/audio/fuchsia_audio_renderer_test.cc b/media/fuchsia/audio/fuchsia_audio_renderer_test.cc
index 986856c..8982872 100644
--- a/media/fuchsia/audio/fuchsia_audio_renderer_test.cc
+++ b/media/fuchsia/audio/fuchsia_audio_renderer_test.cc
@@ -520,8 +520,8 @@
}
void FuchsiaAudioRendererTest::CreateTestDemuxerStream() {
- AudioDecoderConfig config(kCodecPCM, kSampleFormatF32, CHANNEL_LAYOUT_MONO,
- kDefaultSampleRate, {},
+ AudioDecoderConfig config(AudioCodec::kPCM, kSampleFormatF32,
+ CHANNEL_LAYOUT_MONO, kDefaultSampleRate, {},
EncryptionScheme::kUnencrypted);
if (GetParam().simulate_fuchsia_cdm) {
@@ -793,8 +793,8 @@
const size_t kNewSampleRate = 44100;
const std::vector<uint8_t> kArbitraryExtraData = {1, 2, 3};
AudioDecoderConfig updated_config(
- kCodecOpus, kSampleFormatF32, CHANNEL_LAYOUT_STEREO, kNewSampleRate,
- kArbitraryExtraData, EncryptionScheme::kUnencrypted);
+ AudioCodec::kOpus, kSampleFormatF32, CHANNEL_LAYOUT_STEREO,
+ kNewSampleRate, kArbitraryExtraData, EncryptionScheme::kUnencrypted);
demuxer_stream_->QueueReadResult(
TestDemuxerStream::ReadResult(updated_config));
diff --git a/media/gpu/android/android_video_encode_accelerator.cc b/media/gpu/android/android_video_encode_accelerator.cc
index 008ab0b..cd6d379 100644
--- a/media/gpu/android/android_video_encode_accelerator.cc
+++ b/media/gpu/android/android_video_encode_accelerator.cc
@@ -107,16 +107,16 @@
const struct {
const VideoCodec codec;
const VideoCodecProfile profile;
- } kSupportedCodecs[] = {{kCodecVP8, VP8PROFILE_ANY},
- {kCodecH264, H264PROFILE_BASELINE}};
+ } kSupportedCodecs[] = {{VideoCodec::kVP8, VP8PROFILE_ANY},
+ {VideoCodec::kH264, H264PROFILE_BASELINE}};
for (const auto& supported_codec : kSupportedCodecs) {
- if (supported_codec.codec == kCodecVP8 &&
+ if (supported_codec.codec == VideoCodec::kVP8 &&
!MediaCodecUtil::IsVp8EncoderAvailable()) {
continue;
}
- if (supported_codec.codec == kCodecH264 &&
+ if (supported_codec.codec == VideoCodec::kH264 &&
!MediaCodecUtil::IsH264EncoderAvailable()) {
continue;
}
@@ -161,13 +161,13 @@
uint32_t frame_input_count;
uint32_t i_frame_interval;
if (config.output_profile == VP8PROFILE_ANY) {
- codec = kCodecVP8;
+ codec = VideoCodec::kVP8;
mime_type = "video/x-vnd.on2.vp8";
frame_input_count = 1;
i_frame_interval = IFRAME_INTERVAL_VPX;
} else if (config.output_profile == H264PROFILE_BASELINE ||
config.output_profile == H264PROFILE_MAIN) {
- codec = kCodecH264;
+ codec = VideoCodec::kH264;
mime_type = "video/avc";
frame_input_count = 30;
i_frame_interval = IFRAME_INTERVAL_H264;
diff --git a/media/gpu/android/media_codec_video_decoder.cc b/media/gpu/android/media_codec_video_decoder.cc
index af3d1e1..d7a2223 100644
--- a/media/gpu/android/media_codec_video_decoder.cc
+++ b/media/gpu/android/media_codec_video_decoder.cc
@@ -64,7 +64,7 @@
if (device_info->IsVp8DecoderAvailable()) {
// For unencrypted content, require that the size is at least 360p and that
// the MediaCodec implementation is hardware; otherwise fall back to libvpx.
- if (!device_info->IsDecoderKnownUnaccelerated(kCodecVP8)) {
+ if (!device_info->IsDecoderKnownUnaccelerated(VideoCodec::kVP8)) {
supported_configs.emplace_back(VP8PROFILE_ANY, VP8PROFILE_ANY,
gfx::Size(480, 360), gfx::Size(3840, 2160),
false, // allow_encrypted
@@ -81,7 +81,8 @@
// TODO(dalecurtis): This needs to actually check the profiles available. This
// can be done by calling MediaCodecUtil::AddSupportedCodecProfileLevels.
if (device_info->IsVp9DecoderAvailable()) {
- const bool is_sw = device_info->IsDecoderKnownUnaccelerated(kCodecVP9);
+ const bool is_sw =
+ device_info->IsDecoderKnownUnaccelerated(VideoCodec::kVP9);
std::vector<CodecProfileLevel> profiles;
@@ -92,10 +93,10 @@
// If we think a VP9 decoder is available, but we didn't get any profiles
// returned, just assume support for vp9.0 only.
if (profiles.empty())
- profiles.push_back({kCodecVP9, VP9PROFILE_PROFILE0, 0});
+ profiles.push_back({VideoCodec::kVP9, VP9PROFILE_PROFILE0, 0});
for (const auto& p : profiles) {
- if (p.codec != kCodecVP9)
+ if (p.codec != VideoCodec::kVP9)
continue;
// We don't compile support into libvpx for these profiles, so allow them
@@ -373,7 +374,7 @@
waiting_cb_ = waiting_cb;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- if (config.codec() == kCodecH264)
+ if (config.codec() == VideoCodec::kH264)
ExtractSpsAndPps(config.extra_data(), &csd0_, &csd1_);
#endif
@@ -876,10 +877,11 @@
// larger based on the actual input size.
if (decoder_config_.coded_size().width() == last_width_) {
// See MediaFormatBuilder::addInputSizeInfoToFormat() for details.
- const size_t compression_ratio = (decoder_config_.codec() == kCodecH264 ||
- decoder_config_.codec() == kCodecVP8)
- ? 2
- : 4;
+ const size_t compression_ratio =
+ (decoder_config_.codec() == VideoCodec::kH264 ||
+ decoder_config_.codec() == VideoCodec::kVP8)
+ ? 2
+ : 4;
const size_t max_pixels =
(pending_decode.buffer->data_size() * compression_ratio * 2) / 3;
if (max_pixels > 8294400) // 4K
@@ -1105,8 +1107,8 @@
// (http://crbug.com/598963).
// TODO(watk): Strongly consider blocking VP8 (or specific MediaCodecs)
// instead. Draining is responsible for a lot of complexity.
- if (decoder_config_.codec() != kCodecVP8 || !codec_ || codec_->IsFlushed() ||
- codec_->IsDrained() || using_async_api_) {
+ if (decoder_config_.codec() != VideoCodec::kVP8 || !codec_ ||
+ codec_->IsFlushed() || codec_->IsDrained() || using_async_api_) {
// If the codec isn't already drained or flushed, then we have to remember
// that we owe it a flush. We also have to remember not to deliver any
// output buffers that might still be in progress in the codec.
diff --git a/media/gpu/android/media_codec_video_decoder_unittest.cc b/media/gpu/android/media_codec_video_decoder_unittest.cc
index 958bd3ef..b4b51a3 100644
--- a/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -121,7 +121,8 @@
void TearDown() override {
// For VP8, make MCVD skip the drain by resetting it. Otherwise, it's hard
// to finish the drain.
- if (mcvd_ && codec_ == kCodecVP8 && codec_allocator_->most_recent_codec)
+ if (mcvd_ && codec_ == VideoCodec::kVP8 &&
+ codec_allocator_->most_recent_codec)
DoReset();
// MCVD calls DeleteSoon() on itself, so we have to run a RunLoop.
@@ -327,7 +328,7 @@
TEST_P(MediaCodecVideoDecoderAV1Test, Av1IsSupported) {
EXPECT_CALL(*device_info_, IsAv1DecoderAvailable()).WillOnce(Return(true));
- ASSERT_TRUE(Initialize(TestVideoConfig::Normal(kCodecAV1)));
+ ASSERT_TRUE(Initialize(TestVideoConfig::Normal(VideoCodec::kAV1)));
}
TEST_P(MediaCodecVideoDecoderTest, InitializeDoesntInitSurfaceOrCodec) {
@@ -955,7 +956,7 @@
VideoColorSpace::MatrixID::BT2020_CL,
gfx::ColorSpace::RangeID::LIMITED);
VideoDecoderConfig config =
- TestVideoConfig::NormalWithColorSpace(kCodecVP9, color_space);
+ TestVideoConfig::NormalWithColorSpace(VideoCodec::kVP9, color_space);
EXPECT_TRUE(InitializeFully_OneDecodePending(config));
EXPECT_EQ(color_space,
@@ -963,7 +964,7 @@
}
TEST_P(MediaCodecVideoDecoderVp9Test, HdrMetadataIsIncludedInCodecConfig) {
- VideoDecoderConfig config = TestVideoConfig::Normal(kCodecVP9);
+ VideoDecoderConfig config = TestVideoConfig::Normal(VideoCodec::kVP9);
gfx::HDRMetadata hdr_metadata;
hdr_metadata.max_frame_average_light_level = 123;
hdr_metadata.max_content_light_level = 456;
@@ -990,29 +991,29 @@
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
if (MediaCodecUtil::IsMediaCodecAvailable())
- test_codecs.push_back(kCodecH264);
+ test_codecs.push_back(VideoCodec::kH264);
#endif
if (MediaCodecUtil::IsVp8DecoderAvailable())
- test_codecs.push_back(kCodecVP8);
+ test_codecs.push_back(VideoCodec::kVP8);
if (MediaCodecUtil::IsVp9DecoderAvailable())
- test_codecs.push_back(kCodecVP9);
+ test_codecs.push_back(VideoCodec::kVP9);
if (MediaCodecUtil::IsAv1DecoderAvailable())
- test_codecs.push_back(kCodecAV1);
+ test_codecs.push_back(VideoCodec::kAV1);
return test_codecs;
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
static std::vector<VideoCodec> GetH264IfAvailable() {
return MediaCodecUtil::IsMediaCodecAvailable()
- ? std::vector<VideoCodec>(1, kCodecH264)
+ ? std::vector<VideoCodec>(1, VideoCodec::kH264)
: std::vector<VideoCodec>();
}
#endif
static std::vector<VideoCodec> GetVp8IfAvailable() {
return MediaCodecUtil::IsVp8DecoderAvailable()
- ? std::vector<VideoCodec>(1, kCodecVP8)
+ ? std::vector<VideoCodec>(1, VideoCodec::kVP8)
: std::vector<VideoCodec>();
}
@@ -1020,13 +1021,13 @@
// is fixed.
// static std::vector<VideoCodec> GetVp9IfAvailable() {
// return MediaCodecUtil::IsVp9DecoderAvailable()
-// ? std::vector<VideoCodec>(1, kCodecVP9)
+// ? std::vector<VideoCodec>(1, VideoCodec::kVP9)
// : std::vector<VideoCodec>();
// }
static std::vector<VideoCodec> GetAv1IfAvailable() {
return MediaCodecUtil::IsAv1DecoderAvailable()
- ? std::vector<VideoCodec>(1, kCodecAV1)
+ ? std::vector<VideoCodec>(1, VideoCodec::kAV1)
: std::vector<VideoCodec>();
}
diff --git a/media/gpu/chromeos/video_decoder_pipeline.cc b/media/gpu/chromeos/video_decoder_pipeline.cc
index 852a0a3..1a48df6 100644
--- a/media/gpu/chromeos/video_decoder_pipeline.cc
+++ b/media/gpu/chromeos/video_decoder_pipeline.cc
@@ -271,8 +271,8 @@
}
#endif // !BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
- needs_bitstream_conversion_ =
- (config.codec() == kCodecH264) || (config.codec() == kCodecHEVC);
+ needs_bitstream_conversion_ = (config.codec() == VideoCodec::kH264) ||
+ (config.codec() == VideoCodec::kHEVC);
decoder_task_runner_->PostTask(
FROM_HERE,
diff --git a/media/gpu/chromeos/video_decoder_pipeline_unittest.cc b/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
index e67004c..0e1b693 100644
--- a/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
+++ b/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
@@ -127,7 +127,7 @@
: public testing::TestWithParam<DecoderPipelineTestParams> {
public:
VideoDecoderPipelineTest()
- : config_(kCodecVP8,
+ : config_(VideoCodec::kVP8,
VP8PROFILE_ANY,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(),
diff --git a/media/gpu/ipc/service/vda_video_decoder.cc b/media/gpu/ipc/service/vda_video_decoder.cc
index 06787f9f..90a26a2 100644
--- a/media/gpu/ipc/service/vda_video_decoder.cc
+++ b/media/gpu/ipc/service/vda_video_decoder.cc
@@ -486,7 +486,8 @@
// TODO(sandersd): Can we move bitstream conversion into VdaVideoDecoder and
// always return false?
- return config_.codec() == kCodecH264 || config_.codec() == kCodecHEVC;
+ return config_.codec() == VideoCodec::kH264 ||
+ config_.codec() == VideoCodec::kHEVC;
}
bool VdaVideoDecoder::CanReadWithoutStalling() const {
diff --git a/media/gpu/ipc/service/vda_video_decoder_unittest.cc b/media/gpu/ipc/service/vda_video_decoder_unittest.cc
index 87d64df..b7d18597 100644
--- a/media/gpu/ipc/service/vda_video_decoder_unittest.cc
+++ b/media/gpu/ipc/service/vda_video_decoder_unittest.cc
@@ -147,7 +147,7 @@
.WillOnce(Return(GetParam()));
EXPECT_CALL(init_cb_, Run(IsOkStatus()));
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0,
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
kNoTransformation, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
gfx::Size(1920, 1080), EmptyExtraData(),
@@ -323,10 +323,10 @@
TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedSize) {
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace::REC601(), kNoTransformation, gfx::Size(320, 240),
- gfx::Rect(320, 240), gfx::Size(320, 240), EmptyExtraData(),
- EncryptionScheme::kUnencrypted));
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC601(),
+ kNoTransformation, gfx::Size(320, 240), gfx::Rect(320, 240),
+ gfx::Size(320, 240), EmptyExtraData(), EncryptionScheme::kUnencrypted));
EXPECT_CALL(init_cb_,
Run(HasStatusCode(StatusCode::kDecoderInitializeNeverCompleted)));
RunUntilIdle();
@@ -334,7 +334,7 @@
TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedCodec) {
InitializeWithConfig(VideoDecoderConfig(
- kCodecH264, H264PROFILE_BASELINE,
+ VideoCodec::kH264, H264PROFILE_BASELINE,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
kNoTransformation, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
gfx::Size(1920, 1080), EmptyExtraData(), EncryptionScheme::kUnencrypted));
@@ -346,10 +346,10 @@
TEST_P(VdaVideoDecoderTest, Initialize_RejectedByVda) {
EXPECT_CALL(*vda_, Initialize(_, client_)).WillOnce(Return(false));
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace::REC709(), kNoTransformation, gfx::Size(1920, 1088),
- gfx::Rect(1920, 1080), gfx::Size(1920, 1080), EmptyExtraData(),
- EncryptionScheme::kUnencrypted));
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
+ kNoTransformation, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
+ gfx::Size(1920, 1080), EmptyExtraData(), EncryptionScheme::kUnencrypted));
EXPECT_CALL(init_cb_,
Run(HasStatusCode(StatusCode::kDecoderInitializeNeverCompleted)));
RunUntilIdle();
@@ -430,10 +430,10 @@
EXPECT_CALL(*vda_, TryToSetupDecodeOnSeparateThread(_, _))
.WillOnce(Return(GetParam()));
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace::REC709(), kNoTransformation, gfx::Size(640, 480),
- gfx::Rect(640, 480), gfx::Size(1280, 480), EmptyExtraData(),
- EncryptionScheme::kUnencrypted));
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
+ kNoTransformation, gfx::Size(640, 480), gfx::Rect(640, 480),
+ gfx::Size(1280, 480), EmptyExtraData(), EncryptionScheme::kUnencrypted));
EXPECT_CALL(init_cb_, Run(IsOkStatus()));
RunUntilIdle();
diff --git a/media/gpu/mac/vt_video_decode_accelerator_mac.cc b/media/gpu/mac/vt_video_decode_accelerator_mac.cc
index 428dd4e8..455e8c62 100644
--- a/media/gpu/mac/vt_video_decode_accelerator_mac.cc
+++ b/media/gpu/mac/vt_video_decode_accelerator_mac.cc
@@ -620,11 +620,11 @@
case H264PROFILE_EXTENDED:
case H264PROFILE_MAIN:
case H264PROFILE_HIGH:
- codec_ = kCodecH264;
+ codec_ = VideoCodec::kH264;
break;
case VP9PROFILE_PROFILE0:
case VP9PROFILE_PROFILE2:
- codec_ = kCodecVP9;
+ codec_ = VideoCodec::kVP9;
break;
default:
NOTREACHED() << "Unsupported profile.";
@@ -656,10 +656,10 @@
base::ScopedCFTypeRef<CMFormatDescriptionRef> format;
switch (codec_) {
- case kCodecH264:
+ case VideoCodec::kH264:
format = CreateVideoFormatH264(active_sps_, active_spsext_, active_pps_);
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
format = CreateVideoFormatVP9(
cc_detector_->GetColorSpace(config_.container_color_space),
config_.profile, config_.hdr_metadata,
@@ -708,7 +708,7 @@
}
UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware);
- if (codec_ == kCodecVP9 && !vp9_bsf_)
+ if (codec_ == VideoCodec::kVP9 && !vp9_bsf_)
vp9_bsf_ = std::make_unique<VP9SuperFrameBitstreamFilter>();
// Record that the configuration change is complete.
@@ -1228,7 +1228,7 @@
Frame* frame = new Frame(bitstream_id);
pending_frames_[bitstream_id] = base::WrapUnique(frame);
- if (codec_ == kCodecVP9) {
+ if (codec_ == VideoCodec::kVP9) {
decoder_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VTVideoDecodeAccelerator::DecodeTaskVp9,
@@ -1307,7 +1307,7 @@
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
switch (state_) {
case STATE_DECODING:
- if (codec_ != kCodecH264) {
+ if (codec_ != VideoCodec::kH264) {
while (state_ == STATE_DECODING) {
if (!ProcessOutputQueue() && !ProcessTaskQueue())
break;
@@ -1350,7 +1350,7 @@
Task& task = task_queue_.front();
switch (task.type) {
case TASK_FRAME: {
- if (codec_ == kCodecVP9) {
+ if (codec_ == VideoCodec::kVP9) {
// Once we've reached our maximum output queue size, defer end of
// bitstream buffer signals to avoid piling up too many frames.
if (output_queue_.size() >= limits::kMaxVideoFrames)
@@ -1382,8 +1382,8 @@
case TASK_FLUSH:
DCHECK_EQ(task.type, pending_flush_tasks_.front());
- if ((codec_ == kCodecH264 && reorder_queue_.size() == 0) ||
- (codec_ == kCodecVP9 && output_queue_.empty())) {
+ if ((codec_ == VideoCodec::kH264 && reorder_queue_.size() == 0) ||
+ (codec_ == VideoCodec::kVP9 && output_queue_.empty())) {
DVLOG(1) << "Flush complete";
pending_flush_tasks_.pop();
client_->NotifyFlushDone();
@@ -1394,8 +1394,8 @@
case TASK_RESET:
DCHECK_EQ(task.type, pending_flush_tasks_.front());
- if ((codec_ == kCodecH264 && reorder_queue_.size() == 0) ||
- (codec_ == kCodecVP9 && output_queue_.empty())) {
+ if ((codec_ == VideoCodec::kH264 && reorder_queue_.size() == 0) ||
+ (codec_ == VideoCodec::kVP9 && output_queue_.empty())) {
DVLOG(1) << "Reset complete";
waiting_for_idr_ = true;
pending_flush_tasks_.pop();
diff --git a/media/gpu/test/video.cc b/media/gpu/test/video.cc
index 76063a4..b2763d0 100644
--- a/media/gpu/test/video.cc
+++ b/media/gpu/test/video.cc
@@ -234,7 +234,7 @@
}
bool Video::Decode() {
- if (codec_ != VideoCodec::kCodecVP9) {
+ if (codec_ != VideoCodec::kVP9) {
LOG(ERROR) << "Decoding is currently only supported for VP9 videos";
return false;
}
@@ -268,7 +268,7 @@
// data will be replaced with the decompressed video stream.
pixel_format_ = VideoPixelFormat::PIXEL_FORMAT_I420;
profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
- codec_ = kUnknownVideoCodec;
+ codec_ = VideoCodec::kUnknown;
data_ = std::move(decompressed_data);
return true;
}
@@ -649,15 +649,15 @@
absl::optional<VideoCodec> Video::ConvertProfileToCodec(
VideoCodecProfile profile) {
if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) {
- return kCodecH264;
+ return VideoCodec::kH264;
} else if (profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX) {
- return kCodecVP8;
+ return VideoCodec::kVP8;
} else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) {
- return kCodecVP9;
+ return VideoCodec::kVP9;
} else if (profile >= AV1PROFILE_MIN && profile <= AV1PROFILE_MAX) {
- return kCodecAV1;
+ return VideoCodec::kAV1;
} else if (profile >= HEVCPROFILE_MIN && profile <= HEVCPROFILE_MAX) {
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
} else {
VLOG(2) << GetProfileName(profile) << " is not supported";
return absl::nullopt;
diff --git a/media/gpu/test/video.h b/media/gpu/test/video.h
index a6c402b..fa07af42 100644
--- a/media/gpu/test/video.h
+++ b/media/gpu/test/video.h
@@ -141,7 +141,7 @@
// Video codec, profile and bit depth for encoded videos.
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
- VideoCodec codec_ = kUnknownVideoCodec;
+ VideoCodec codec_ = VideoCodec::kUnknown;
uint8_t bit_depth_ = 0u;
// Pixel format for raw videos.
diff --git a/media/gpu/test/video_encoder/bitstream_file_writer.cc b/media/gpu/test/video_encoder/bitstream_file_writer.cc
index 90b0b79..8cb1f5d 100644
--- a/media/gpu/test/video_encoder/bitstream_file_writer.cc
+++ b/media/gpu/test/video_encoder/bitstream_file_writer.cc
@@ -75,7 +75,7 @@
if (!base::DirectoryExists(output_filepath.DirName()))
base::CreateDirectory(output_filepath.DirName());
- if (codec == kCodecH264) {
+ if (codec == VideoCodec::kH264) {
base::File output_file(output_filepath, base::File::FLAG_CREATE_ALWAYS |
base::File::FLAG_WRITE);
LOG_ASSERT(output_file.IsValid());
diff --git a/media/gpu/test/video_encoder/bitstream_validator.cc b/media/gpu/test/video_encoder/bitstream_validator.cc
index c523680..b54218c 100644
--- a/media/gpu/test/video_encoder/bitstream_validator.cc
+++ b/media/gpu/test/video_encoder/bitstream_validator.cc
@@ -35,7 +35,7 @@
std::unique_ptr<MediaLog>* media_log) {
std::unique_ptr<VideoDecoder> decoder;
- if (codec == kCodecVP8 || codec == kCodecVP9) {
+ if (codec == VideoCodec::kVP8 || codec == VideoCodec::kVP9) {
#if BUILDFLAG(ENABLE_LIBVPX)
LOG_ASSERT(!base::FeatureList::IsEnabled(kFFmpegDecodeOpaqueVP8));
decoder = std::make_unique<VpxVideoDecoder>();
diff --git a/media/gpu/test/video_encoder/video_encoder_test_environment.cc b/media/gpu/test/video_encoder/video_encoder_test_environment.cc
index af66abe..c478d94 100644
--- a/media/gpu/test/video_encoder/video_encoder_test_environment.cc
+++ b/media/gpu/test/video_encoder/video_encoder_test_environment.cc
@@ -319,7 +319,7 @@
bool VideoEncoderTestEnvironment::IsKeplerUsed() const {
#if BUILDFLAG(IS_CHROMEOS_ASH)
const VideoCodec codec = VideoCodecProfileToVideoCodec(Profile());
- if (codec != VideoCodec::kCodecVP8)
+ if (codec != VideoCodec::kVP8)
return false;
const static std::string board = base::SysInfo::GetLsbReleaseBoard();
if (board == "unknown") {
diff --git a/media/gpu/test/video_test_helpers.cc b/media/gpu/test/video_test_helpers.cc
index e6502ab..ad88d9e 100644
--- a/media/gpu/test/video_test_helpers.cc
+++ b/media/gpu/test/video_test_helpers.cc
@@ -91,10 +91,10 @@
write16(4, kVersion);
write16(6, kIvfFileHeaderSize);
switch (codec) {
- case kCodecVP8:
+ case VideoCodec::kVP8:
strcpy(&ivf_header[8], "VP80");
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
strcpy(&ivf_header[8], "VP90");
break;
default:
@@ -144,12 +144,12 @@
scoped_refptr<DecoderBuffer> EncodedDataHelper::GetNextBuffer() {
switch (VideoCodecProfileToVideoCodec(profile_)) {
- case kCodecH264:
- case kCodecHEVC:
+ case VideoCodec::kH264:
+ case VideoCodec::kHEVC:
return GetNextFragment();
- case kCodecVP8:
- case kCodecVP9:
- case kCodecAV1:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
+ case VideoCodec::kAV1:
return GetNextFrame();
default:
NOTREACHED();
diff --git a/media/gpu/v4l2/v4l2_device.cc b/media/gpu/v4l2/v4l2_device.cc
index a376928..db04e6f 100644
--- a/media/gpu/v4l2/v4l2_device.cc
+++ b/media/gpu/v4l2/v4l2_device.cc
@@ -1545,7 +1545,7 @@
VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
uint32_t v4l2_profile) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
switch (v4l2_profile) {
// H264 Stereo amd Multiview High are not tested and the use is
// minuscule, skip.
@@ -1560,7 +1560,7 @@
return H264PROFILE_HIGH;
}
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_VP8_PROFILE_0:
case V4L2_MPEG_VIDEO_VP8_PROFILE_1:
@@ -1569,7 +1569,7 @@
return VP8PROFILE_ANY;
}
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
switch (v4l2_profile) {
// VP9 Profile 1 and 3 are not tested and the use is minuscule, skip.
case V4L2_MPEG_VIDEO_VP9_PROFILE_0:
@@ -1594,13 +1594,13 @@
std::vector<VideoCodecProfile>* profiles) {
uint32_t query_id = 0;
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
query_id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
query_id = V4L2_CID_MPEG_VIDEO_VP8_PROFILE;
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
query_id = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
break;
default:
@@ -1633,7 +1633,7 @@
switch (pix_fmt) {
case V4L2_PIX_FMT_H264:
case V4L2_PIX_FMT_H264_SLICE:
- if (!get_supported_profiles(kCodecH264, &profiles)) {
+ if (!get_supported_profiles(VideoCodec::kH264, &profiles)) {
DLOG(WARNING) << "Driver doesn't support QUERY H264 profiles, "
<< "use default values, Base, Main, High";
profiles = {
@@ -1649,7 +1649,7 @@
break;
case V4L2_PIX_FMT_VP9:
case V4L2_PIX_FMT_VP9_FRAME:
- if (!get_supported_profiles(kCodecVP9, &profiles)) {
+ if (!get_supported_profiles(VideoCodec::kVP9, &profiles)) {
DLOG(WARNING) << "Driver doesn't support QUERY VP9 profiles, "
<< "use default values, Profile0";
profiles = {VP9PROFILE_PROFILE0};
diff --git a/media/gpu/v4l2/v4l2_vda_helpers.cc b/media/gpu/v4l2/v4l2_vda_helpers.cc
index adec5b8..b83b41a 100644
--- a/media/gpu/v4l2/v4l2_vda_helpers.cc
+++ b/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -147,11 +147,11 @@
InputBufferFragmentSplitter::CreateFromProfile(
media::VideoCodecProfile profile) {
switch (VideoCodecProfileToVideoCodec(profile)) {
- case kCodecH264:
+ case VideoCodec::kH264:
return std::make_unique<
v4l2_vda_helpers::H264InputBufferFragmentSplitter>();
- case kCodecVP8:
- case kCodecVP9:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
// VP8/VP9 don't need any frame splitting, use the default implementation.
return std::make_unique<v4l2_vda_helpers::InputBufferFragmentSplitter>();
default:
diff --git a/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index 0dc47d88..7514fe4 100644
--- a/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -1206,7 +1206,7 @@
base::EraseIf(profiles, [](const auto& profile) {
VideoCodec codec = VideoCodecProfileToVideoCodec(profile.profile);
return profile.profile == VP9PROFILE_PROFILE2 ||
- codec == VideoCodec::kCodecAV1 || codec == VideoCodec::kCodecHEVC;
+ codec == VideoCodec::kAV1 || codec == VideoCodec::kHEVC;
});
return profiles;
}
diff --git a/media/gpu/vaapi/vaapi_video_decoder.cc b/media/gpu/vaapi/vaapi_video_decoder.cc
index b398d0b2..f92e5f64 100644
--- a/media/gpu/vaapi/vaapi_video_decoder.cc
+++ b/media/gpu/vaapi/vaapi_video_decoder.cc
@@ -245,8 +245,9 @@
std::move(init_cb).Run(StatusCode::kDecoderMissingCdmForEncryptedContent);
return;
}
- if (config.codec() != kCodecH264 && config.codec() != kCodecVP9 &&
- config.codec() != kCodecHEVC) {
+ if (config.codec() != VideoCodec::kH264 &&
+ config.codec() != VideoCodec::kVP9 &&
+ config.codec() != VideoCodec::kHEVC) {
SetErrorState(
base::StringPrintf("%s is not supported for encrypted content",
GetCodecName(config.codec()).c_str()));
@@ -263,7 +264,7 @@
VAImplementation::kMesaGallium);
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC_DECODING)
- } else if (config.codec() == kCodecHEVC &&
+ } else if (config.codec() == VideoCodec::kHEVC &&
!base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kEnableClearHevcForTesting)) {
SetErrorState("clear HEVC content is not supported");
diff --git a/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index 2add9b8..9c91ce3 100644
--- a/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -204,7 +204,8 @@
client_ = client_ptr_factory_->GetWeakPtr();
const VideoCodec codec = VideoCodecProfileToVideoCodec(config.output_profile);
- if (codec != kCodecH264 && codec != kCodecVP8 && codec != kCodecVP9) {
+ if (codec != VideoCodec::kH264 && codec != VideoCodec::kVP8 &&
+ codec != VideoCodec::kVP9) {
VLOGF(1) << "Unsupported profile: "
<< GetProfileName(config.output_profile);
return false;
@@ -266,8 +267,9 @@
return false;
}
VaapiWrapper::CodecMode mode =
- codec == kCodecVP9 ? VaapiWrapper::kEncodeConstantQuantizationParameter
- : VaapiWrapper::kEncodeConstantBitrate;
+ codec == VideoCodec::kVP9
+ ? VaapiWrapper::kEncodeConstantQuantizationParameter
+ : VaapiWrapper::kEncodeConstantBitrate;
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
mode, config.output_profile, EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
@@ -303,7 +305,7 @@
},
base::Unretained(this));
switch (output_codec_) {
- case kCodecH264:
+ case VideoCodec::kH264:
if (!IsConfiguredForTesting()) {
encoder_ = std::make_unique<H264VaapiVideoEncoderDelegate>(
vaapi_wrapper_, error_cb);
@@ -312,7 +314,7 @@
DCHECK_EQ(ave_config.bitrate_control,
VaapiVideoEncoderDelegate::BitrateControl::kConstantBitrate);
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
if (!IsConfiguredForTesting()) {
encoder_ = std::make_unique<VP8VaapiVideoEncoderDelegate>(
vaapi_wrapper_, error_cb);
@@ -321,7 +323,7 @@
DCHECK_EQ(ave_config.bitrate_control,
VaapiVideoEncoderDelegate::BitrateControl::kConstantBitrate);
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
if (!IsConfiguredForTesting()) {
encoder_ = std::make_unique<VP9VaapiVideoEncoderDelegate>(
vaapi_wrapper_, error_cb);
@@ -742,13 +744,13 @@
}
scoped_refptr<CodecPicture> picture;
switch (output_codec_) {
- case kCodecH264:
+ case VideoCodec::kH264:
picture = new VaapiH264Picture(std::move(reconstructed_surface));
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
picture = new VaapiVP8Picture(std::move(reconstructed_surface));
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
picture = new VaapiVP9Picture(std::move(reconstructed_surface));
break;
default:
diff --git a/media/gpu/vaapi/vaapi_video_encode_accelerator.h b/media/gpu/vaapi/vaapi_video_encode_accelerator.h
index 53196eb..dc72ff1 100644
--- a/media/gpu/vaapi/vaapi_video_encode_accelerator.h
+++ b/media/gpu/vaapi/vaapi_video_encode_accelerator.h
@@ -190,7 +190,7 @@
gfx::Size expected_input_coded_size_;
// The codec of the stream to be produced. Set during initialization.
- VideoCodec output_codec_ = kUnknownVideoCodec;
+ VideoCodec output_codec_ = VideoCodec::kUnknown;
// The visible rect to be encoded.
gfx::Rect visible_rect_;
diff --git a/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc b/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc
index 76d8059..971437a 100644
--- a/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc
+++ b/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc
@@ -98,7 +98,8 @@
const VaapiVideoEncoderDelegate::Config& ave_config) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (VideoCodecProfileToVideoCodec(config.output_profile) != kCodecVP8) {
+ if (VideoCodecProfileToVideoCodec(config.output_profile) !=
+ VideoCodec::kVP8) {
DVLOGF(1) << "Invalid profile: " << GetProfileName(config.output_profile);
return false;
}
diff --git a/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc b/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc
index 8ace81f..2ef00b1 100644
--- a/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc
+++ b/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc
@@ -209,7 +209,8 @@
const VideoEncodeAccelerator::Config& config,
const VaapiVideoEncoderDelegate::Config& ave_config) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (VideoCodecProfileToVideoCodec(config.output_profile) != kCodecVP9) {
+ if (VideoCodecProfileToVideoCodec(config.output_profile) !=
+ VideoCodec::kVP9) {
DVLOGF(1) << "Invalid profile: " << GetProfileName(config.output_profile);
return false;
}
diff --git a/media/gpu/video_decode_accelerator_tests.cc b/media/gpu/video_decode_accelerator_tests.cc
index 81678b4..11188b2a 100644
--- a/media/gpu/video_decode_accelerator_tests.cc
+++ b/media/gpu/video_decode_accelerator_tests.cc
@@ -173,7 +173,7 @@
// TODO(hiroh): Move this to Video class or video_frame_helpers.h.
// TODO(hiroh): Create model frames once during the test.
bool CreateModelFrames(const Video* video) {
- if (video->Codec() != VideoCodec::kCodecAV1) {
+ if (video->Codec() != VideoCodec::kAV1) {
LOG(ERROR) << "Frame validation by SSIM is allowed for AV1 streams only";
return false;
}
@@ -357,8 +357,8 @@
// H.264/HEVC video stream. After resetting the video is played until the end.
TEST_F(VideoDecoderTest, ResetAfterFirstConfigInfo) {
// This test is only relevant for H.264/HEVC video streams.
- if (g_env->Video()->Codec() != media::kCodecH264 &&
- g_env->Video()->Codec() != media::kCodecHEVC)
+ if (g_env->Video()->Codec() != media::VideoCodec::kH264 &&
+ g_env->Video()->Codec() != media::VideoCodec::kHEVC)
GTEST_SKIP();
auto tvp = CreateVideoPlayer(g_env->Video());
diff --git a/media/gpu/video_encode_accelerator_tests.cc b/media/gpu/video_encode_accelerator_tests.cc
index 7f9854d..6913007 100644
--- a/media/gpu/video_encode_accelerator_tests.cc
+++ b/media/gpu/video_encode_accelerator_tests.cc
@@ -206,8 +206,8 @@
VideoCodecProfileToVideoCodec(config.output_profile);
if (g_env->SaveOutputBitstream()) {
base::FilePath::StringPieceType extension =
- codec == VideoCodec::kCodecH264 ? FILE_PATH_LITERAL("h264")
- : FILE_PATH_LITERAL("ivf");
+ codec == VideoCodec::kH264 ? FILE_PATH_LITERAL("h264")
+ : FILE_PATH_LITERAL("ivf");
auto output_bitstream_filepath =
g_env->OutputFolder()
.Append(g_env->GetTestOutputFilePath())
@@ -248,14 +248,14 @@
}
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
bitstream_processors.emplace_back(new H264Validator(
config.output_profile, visible_rect, config.num_temporal_layers));
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
bitstream_processors.emplace_back(new VP8Validator(visible_rect));
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
bitstream_processors.emplace_back(new VP9Validator(
config.output_profile, visible_rect, config.num_spatial_layers,
config.num_temporal_layers));
diff --git a/media/gpu/windows/d3d11_decoder_configurator.cc b/media/gpu/windows/d3d11_decoder_configurator.cc
index 59f0823..6ef4473 100644
--- a/media/gpu/windows/d3d11_decoder_configurator.cc
+++ b/media/gpu/windows/d3d11_decoder_configurator.cc
@@ -44,7 +44,7 @@
const auto decoder_dxgi_format =
bit_depth == 8 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010;
GUID decoder_guid = {};
- if (config.codec() == kCodecH264) {
+ if (config.codec() == VideoCodec::kH264) {
decoder_guid = D3D11_DECODER_PROFILE_H264_VLD_NOFGT;
} else if (config.profile() == VP9PROFILE_PROFILE0) {
decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0;
diff --git a/media/gpu/windows/d3d11_decoder_configurator_unittest.cc b/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
index 4bc4976..9f6c219 100644
--- a/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
+++ b/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
@@ -27,7 +27,8 @@
bool encrypted) {
VideoDecoderConfig result;
result.Initialize(
- kUnknownVideoCodec, // It doesn't matter because it won't be used.
+ VideoCodec::kUnknown, // It doesn't matter because it won't
+ // be used.
profile, VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, size, {}, {}, {},
encrypted ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted);
diff --git a/media/gpu/windows/d3d11_texture_selector_unittest.cc b/media/gpu/windows/d3d11_texture_selector_unittest.cc
index d9d57c7..57801b9e 100644
--- a/media/gpu/windows/d3d11_texture_selector_unittest.cc
+++ b/media/gpu/windows/d3d11_texture_selector_unittest.cc
@@ -36,7 +36,8 @@
bool encrypted) {
VideoDecoderConfig result;
result.Initialize(
- kUnknownVideoCodec, // It doesn't matter because it won't be used.
+ VideoCodec::kUnknown, // It doesn't matter because it won't
+ // be used.
profile, VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, size, {}, {}, {},
encrypted ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted);
diff --git a/media/gpu/windows/d3d11_video_decoder.cc b/media/gpu/windows/d3d11_video_decoder.cc
index 1a2cc64..bbef0cc 100644
--- a/media/gpu/windows/d3d11_video_decoder.cc
+++ b/media/gpu/windows/d3d11_video_decoder.cc
@@ -168,17 +168,17 @@
return hr;
profile_ = config.profile();
- if (config.codec() == kCodecVP9) {
+ if (config.codec() == VideoCodec::kVP9) {
accelerated_video_decoder_ = std::make_unique<VP9Decoder>(
std::make_unique<D3D11VP9Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
- } else if (config.codec() == kCodecH264) {
+ } else if (config.codec() == VideoCodec::kH264) {
accelerated_video_decoder_ = std::make_unique<H264Decoder>(
std::make_unique<D3D11H264Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
- } else if (config.codec() == kCodecAV1) {
+ } else if (config.codec() == VideoCodec::kAV1) {
accelerated_video_decoder_ = std::make_unique<AV1Decoder>(
std::make_unique<D3D11AV1Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
@@ -257,14 +257,16 @@
.AddCause(HresultToStatus(hr));
}
- if ((config_.codec() == kCodecVP9 || config_.codec() == kCodecAV1) &&
+ if ((config_.codec() == VideoCodec::kVP9 ||
+ config_.codec() == VideoCodec::kAV1) &&
dec_config.ConfigBitstreamRaw == 1) {
// DXVA VP9 and AV1 specifications say ConfigBitstreamRaw "shall be 1".
found = true;
break;
}
- if (config_.codec() == kCodecH264 && dec_config.ConfigBitstreamRaw == 2) {
+ if (config_.codec() == VideoCodec::kH264 &&
+ dec_config.ConfigBitstreamRaw == 2) {
// ConfigBitstreamRaw == 2 means the decoder uses DXVA_Slice_H264_Short.
found = true;
break;
diff --git a/media/gpu/windows/d3d11_video_decoder_unittest.cc b/media/gpu/windows/d3d11_video_decoder_unittest.cc
index a3c5e9c..c0d71a2 100644
--- a/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -254,8 +254,8 @@
};
TEST_F(D3D11VideoDecoderTest, SupportsVP9Profile0WithDecoderEnabled) {
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
@@ -269,8 +269,8 @@
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithLegacyGPU) {
SetGPUProfile(LegacyIntelGPU);
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
@@ -279,8 +279,8 @@
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithGPUWorkaroundDisableVPX) {
gpu_workarounds_.disable_accelerated_vp9_decode = true;
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
@@ -288,8 +288,8 @@
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithoutDecoderEnabled) {
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
// Enable a non-VP9 decoder.
EnableDecoder(D3D11_DECODER_PROFILE_H264_VLD_NOFGT); // Paranoia, not VP9.
@@ -301,7 +301,7 @@
CreateDecoder();
VideoDecoderConfig high10 = TestVideoConfig::NormalCodecProfile(
- kCodecH264, H264PROFILE_HIGH10PROFILE);
+ VideoCodec::kH264, H264PROFILE_HIGH10PROFILE);
InitializeDecoder(high10, false);
}
@@ -310,7 +310,7 @@
CreateDecoder();
VideoDecoderConfig normal =
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN);
InitializeDecoder(normal, true);
// TODO(liberato): Check |last_video_decoder_desc_| for sanity.
@@ -328,7 +328,7 @@
CreateDecoder(empty_configs);
VideoDecoderConfig normal =
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN);
InitializeDecoder(normal, false);
}
@@ -336,7 +336,7 @@
TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptedConfig) {
CreateDecoder();
VideoDecoderConfig encrypted_config =
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN);
encrypted_config.SetIsEncrypted(true);
InitializeDecoder(encrypted_config, false);
}
@@ -348,7 +348,8 @@
gpu_workarounds_.disable_d3d11_video_decoder = true;
CreateDecoder();
InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN), true);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN),
+ true);
}
TEST_F(D3D11VideoDecoderTest, WorkaroundTurnsOffDecoder) {
@@ -356,7 +357,8 @@
gpu_workarounds_.disable_d3d11_video_decoder = true;
CreateDecoder();
InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN), false);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN),
+ false);
}
} // namespace media
diff --git a/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index 237b23d8..4c0a987 100644
--- a/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -652,7 +652,7 @@
make_context_current_cb_(make_context_current_cb),
bind_image_cb_(bind_image_cb),
media_log_(media_log),
- codec_(kUnknownVideoCodec),
+ codec_(VideoCodec::kUnknown),
decoder_thread_("DXVAVideoDecoderThread"),
pending_flush_(false),
enable_low_latency_(gpu_preferences.enable_low_latency_dxva),
@@ -824,11 +824,11 @@
SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
"Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", false);
- if (codec_ == kCodecH264)
+ if (codec_ == VideoCodec::kH264)
config_change_detector_ = std::make_unique<H264ConfigChangeDetector>();
- if (codec_ == kCodecVP8)
+ if (codec_ == VideoCodec::kVP8)
config_change_detector_ = std::make_unique<VP8ConfigChangeDetector>();
- if (codec_ == kCodecVP9)
+ if (codec_ == VideoCodec::kVP9)
config_change_detector_ = std::make_unique<VP9ConfigChangeDetector>();
processing_config_changed_ = false;
@@ -1500,14 +1500,14 @@
std::u16string file_version = version_info->file_version();
RETURN_ON_FAILURE(file_version.find(u"6.1.7140") == std::u16string::npos,
"blocked version of msmpeg2vdec.dll 6.1.7140", false);
- codec_ = kCodecH264;
+ codec_ = VideoCodec::kH264;
clsid = __uuidof(CMSH264DecoderMFT);
} else if ((profile >= VP9PROFILE_PROFILE0 &&
profile <= VP9PROFILE_PROFILE3) ||
profile == VP8PROFILE_ANY) {
- codec_ = profile == VP8PROFILE_ANY ? kCodecVP8 : kCodecVP9;
- if ((codec_ == kCodecVP8 && enable_accelerated_vp8_decode_) ||
- (codec_ == kCodecVP9 && enable_accelerated_vp9_decode_)) {
+ codec_ = profile == VP8PROFILE_ANY ? VideoCodec::kVP8 : VideoCodec::kVP9;
+ if ((codec_ == VideoCodec::kVP8 && enable_accelerated_vp8_decode_) ||
+ (codec_ == VideoCodec::kVP9 && enable_accelerated_vp9_decode_)) {
clsid = CLSID_MSVPxDecoder;
decoder_dll = ::LoadLibrary(kMSVPxDecoderDLLName);
if (decoder_dll)
@@ -1518,7 +1518,7 @@
if (enable_accelerated_av1_decode_ &&
base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
(profile >= AV1PROFILE_MIN && profile <= AV1PROFILE_MAX)) {
- codec_ = kCodecAV1;
+ codec_ = VideoCodec::kAV1;
clsid = CLSID_CAV1DecoderMFT;
// Since the AV1 decoder is a Windows Store package, it can't be created
@@ -1632,7 +1632,7 @@
hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
- if (codec_ == kCodecH264) {
+ if (codec_ == VideoCodec::kH264) {
hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
}
@@ -1701,13 +1701,13 @@
hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
- if (codec_ == kCodecH264) {
+ if (codec_ == VideoCodec::kH264) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
- } else if (codec_ == kCodecVP9) {
+ } else if (codec_ == VideoCodec::kVP9) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90);
- } else if (codec_ == kCodecVP8) {
+ } else if (codec_ == VideoCodec::kVP8) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80);
- } else if (codec_ == kCodecAV1) {
+ } else if (codec_ == VideoCodec::kAV1) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_AV1);
} else {
NOTREACHED();
@@ -1788,7 +1788,7 @@
// There should be three flags, one for requiring a whole frame be in a
// single sample, one for requiring there be one buffer only in a single
// sample, and one that specifies a fixed sample size. (as in cbSize)
- if (codec_ == kCodecH264 && input_stream_info_.dwFlags != 0x7u)
+ if (codec_ == VideoCodec::kH264 && input_stream_info_.dwFlags != 0x7u)
return false;
DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
@@ -1804,7 +1804,7 @@
// The flags here should be the same and mean the same thing, except when
// DXVA is enabled, there is an extra 0x100 flag meaning decoder will
// allocate its own sample.
- if (codec_ == kCodecH264 && output_stream_info_.dwFlags != 0x107u)
+ if (codec_ == VideoCodec::kH264 && output_stream_info_.dwFlags != 0x107u)
return false;
// We should fail above during MFT_MESSAGE_SET_D3D_MANAGER if the decoder
@@ -2355,7 +2355,7 @@
// https://crbug.com/1160623 -- non 4:2:0 content hangs the decoder.
RETURN_AND_NOTIFY_ON_FAILURE(
- codec_ != kCodecH264 || config_change_detector_->IsYUV420(),
+ codec_ != VideoCodec::kH264 || config_change_detector_->IsYUV420(),
"Only 4:2:0 H.264 content is supported", PLATFORM_FAILURE, );
processing_config_changed_ = config_changed;
diff --git a/media/mojo/clients/mojo_audio_decoder_unittest.cc b/media/mojo/clients/mojo_audio_decoder_unittest.cc
index 8f4ad6c..8dd971fd 100644
--- a/media/mojo/clients/mojo_audio_decoder_unittest.cc
+++ b/media/mojo/clients/mojo_audio_decoder_unittest.cc
@@ -140,9 +140,9 @@
EXPECT_CALL(*this, OnInitialized(SameStatusCode(status)))
.WillOnce(InvokeWithoutArgs(this, &MojoAudioDecoderTest::QuitLoop));
- AudioDecoderConfig audio_config(kCodecVorbis, kSampleFormat, kChannelLayout,
- kDefaultSampleRate, EmptyExtraData(),
- EncryptionScheme::kUnencrypted);
+ AudioDecoderConfig audio_config(
+ AudioCodec::kVorbis, kSampleFormat, kChannelLayout, kDefaultSampleRate,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
mojo_audio_decoder_->Initialize(
audio_config, nullptr,
diff --git a/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc b/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
index c3423ac..b973116a 100644
--- a/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
+++ b/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
@@ -20,8 +20,8 @@
&kExtraData[0], &kExtraData[0] + base::size(kExtraData));
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- kExtraDataVector, EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, kExtraDataVector, EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
std::vector<uint8_t> data =
media::mojom::AudioDecoderConfig::Serialize(&input);
@@ -34,8 +34,8 @@
TEST(AudioDecoderConfigStructTraitsTest,
ConvertAudioDecoderConfig_EmptyExtraData) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
std::vector<uint8_t> data =
media::mojom::AudioDecoderConfig::Serialize(&input);
@@ -47,9 +47,9 @@
TEST(AudioDecoderConfigStructTraitsTest, ConvertAudioDecoderConfig_Encrypted) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kCenc, base::TimeDelta(),
- 0);
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kCenc,
+ base::TimeDelta(), 0);
std::vector<uint8_t> data =
media::mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
@@ -61,8 +61,8 @@
TEST(AudioDecoderConfigStructTraitsTest,
ConvertAudioDecoderConfig_WithProfile) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
input.set_profile(AudioCodecProfile::kXHE_AAC);
std::vector<uint8_t> data =
@@ -76,8 +76,8 @@
TEST(AudioDecoderConfigStructTraitsTest,
ConvertAudioDecoderConfig_DisableDiscardDecoderDelay) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
input.disable_discard_decoder_delay();
std::vector<uint8_t> data =
@@ -92,8 +92,8 @@
TEST(AudioDecoderConfigStructTraitsTest,
ConvertAudioDecoderConfig_TargetOutputChannelLayout) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
input.set_target_output_channel_layout(CHANNEL_LAYOUT_5_1);
std::vector<uint8_t> data =
diff --git a/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc b/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
index e62e951..666d790 100644
--- a/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
+++ b/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
@@ -25,10 +25,11 @@
const uint8_t kExtraData[] = "config extra data";
const std::vector<uint8_t> kExtraDataVector(
&kExtraData[0], &kExtraData[0] + base::size(kExtraData));
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, kExtraDataVector, EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, kExtraDataVector,
+ EncryptionScheme::kUnencrypted);
std::vector<uint8_t> data =
media::mojom::VideoDecoderConfig::Serialize(&input);
VideoDecoderConfig output;
@@ -39,10 +40,11 @@
TEST(VideoDecoderConfigStructTraitsTest,
ConvertVideoDecoderConfig_EmptyExtraData) {
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
std::vector<uint8_t> data =
media::mojom::VideoDecoderConfig::Serialize(&input);
VideoDecoderConfig output;
@@ -52,10 +54,11 @@
}
TEST(VideoDecoderConfigStructTraitsTest, ConvertVideoDecoderConfig_Encrypted) {
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kCenc);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kCenc);
std::vector<uint8_t> data =
media::mojom::VideoDecoderConfig::Serialize(&input);
VideoDecoderConfig output;
@@ -67,7 +70,8 @@
TEST(VideoDecoderConfigStructTraitsTest,
ConvertVideoDecoderConfig_ColorSpaceInfo) {
VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(VideoColorSpace::PrimaryID::BT2020,
VideoColorSpace::TransferID::SMPTEST2084,
VideoColorSpace::MatrixID::BT2020_CL,
@@ -84,10 +88,11 @@
TEST(VideoDecoderConfigStructTraitsTest,
ConvertVideoDecoderConfig_HDRMetadata) {
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
gfx::HDRMetadata hdr_metadata;
hdr_metadata.max_frame_average_light_level = 123;
hdr_metadata.max_content_light_level = 456;
@@ -126,10 +131,11 @@
// Next try an non-empty invalid config. Natural size must not be zero.
const gfx::Size kInvalidNaturalSize(0, 0);
- input.Initialize(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kInvalidNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ input.Initialize(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, kCodedSize, kVisibleRect,
+ kInvalidNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
EXPECT_FALSE(input.IsValidConfig());
// Deserialize should again fail due to invalid config.
diff --git a/media/mojo/services/media_foundation_service.cc b/media/mojo/services/media_foundation_service.cc
index 3cbb55a..fab9976 100644
--- a/media/mojo/services/media_foundation_service.cc
+++ b/media/mojo/services/media_foundation_service.cc
@@ -51,23 +51,22 @@
// to query.
constexpr VideoCodec kAllVideoCodecs[] = {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- VideoCodec::kCodecH264,
+ VideoCodec::kH264,
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- VideoCodec::kCodecHEVC,
+ VideoCodec::kHEVC,
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
- VideoCodec::kCodecDolbyVision,
+ VideoCodec::kDolbyVision,
#endif // BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
- VideoCodec::kCodecVP9, VideoCodec::kCodecAV1};
+ VideoCodec::kVP9, VideoCodec::kAV1};
constexpr AudioCodec kAllAudioCodecs[] = {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- AudioCodec::kCodecAAC, AudioCodec::kCodecEAC3,
- AudioCodec::kCodecAC3, AudioCodec::kCodecMpegHAudio,
+ AudioCodec::kAAC, AudioCodec::kEAC3, AudioCodec::kAC3,
+ AudioCodec::kMpegHAudio,
#endif
- AudioCodec::kCodecVorbis, AudioCodec::kCodecFLAC,
- AudioCodec::kCodecOpus};
+ AudioCodec::kVorbis, AudioCodec::kFLAC, AudioCodec::kOpus};
constexpr EncryptionScheme kAllEncryptionSchemes[] = {EncryptionScheme::kCenc,
EncryptionScheme::kCbcs};
@@ -84,15 +83,15 @@
std::string GetFourCCString(VideoCodec codec) {
switch (codec) {
- case VideoCodec::kCodecH264:
+ case VideoCodec::kH264:
return "avc1";
- case VideoCodec::kCodecVP9:
+ case VideoCodec::kVP9:
return "vp09";
- case VideoCodec::kCodecHEVC:
+ case VideoCodec::kHEVC:
return "hvc1";
- case VideoCodec::kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return "dvhe";
- case VideoCodec::kCodecAV1:
+ case VideoCodec::kAV1:
return "av01";
default:
NOTREACHED()
@@ -104,19 +103,19 @@
std::string GetFourCCString(AudioCodec codec) {
switch (codec) {
- case AudioCodec::kCodecAAC:
+ case AudioCodec::kAAC:
return "mp4a";
- case AudioCodec::kCodecVorbis:
+ case AudioCodec::kVorbis:
return "vrbs";
- case AudioCodec::kCodecFLAC:
+ case AudioCodec::kFLAC:
return "fLaC";
- case AudioCodec::kCodecOpus:
+ case AudioCodec::kOpus:
return "Opus";
- case AudioCodec::kCodecEAC3:
+ case AudioCodec::kEAC3:
return "ec-3";
- case AudioCodec::kCodecAC3:
+ case AudioCodec::kAC3:
return "ac-3";
- case AudioCodec::kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return "mhm1";
default:
NOTREACHED()
diff --git a/media/mojo/services/media_metrics_provider.cc b/media/mojo/services/media_metrics_provider.cc
index bb121c2..214af2f 100644
--- a/media/mojo/services/media_metrics_provider.cc
+++ b/media/mojo/services/media_metrics_provider.cc
@@ -102,13 +102,13 @@
const PipelineInfo& player_info) {
constexpr char kPipelineUmaPrefix[] = "Media.PipelineStatus.AudioVideo.";
std::string uma_name = kPipelineUmaPrefix;
- if (player_info.video_codec == kCodecVP8)
+ if (player_info.video_codec == VideoCodec::kVP8)
uma_name += "VP8.";
- else if (player_info.video_codec == kCodecVP9)
+ else if (player_info.video_codec == VideoCodec::kVP9)
uma_name += "VP9.";
- else if (player_info.video_codec == kCodecH264)
+ else if (player_info.video_codec == VideoCodec::kH264)
uma_name += "H264.";
- else if (player_info.video_codec == kCodecAV1)
+ else if (player_info.video_codec == VideoCodec::kAV1)
uma_name += "AV1.";
else
return uma_name + "Other";
diff --git a/media/mojo/services/media_metrics_provider_unittest.cc b/media/mojo/services/media_metrics_provider_unittest.cc
index 56c3030215..8d88455 100644
--- a/media/mojo/services/media_metrics_provider_unittest.cc
+++ b/media/mojo/services/media_metrics_provider_unittest.cc
@@ -213,8 +213,8 @@
{false, false, AudioDecoderType::kMojo, EncryptionType::kClear});
provider_->SetVideoPipelineInfo(
{false, false, VideoDecoderType::kMojo, EncryptionType::kEncrypted});
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
@@ -233,8 +233,8 @@
{false, false, AudioDecoderType::kMojo, EncryptionType::kClear});
provider_->SetVideoPipelineInfo(
{false, false, VideoDecoderType::kMojo, EncryptionType::kEncrypted});
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
@@ -251,7 +251,7 @@
provider_->SetIsEME();
provider_->SetVideoPipelineInfo(
{true, true, VideoDecoderType::kMojo, EncryptionType::kEncrypted});
- provider_->SetHasVideo(VideoCodec::kCodecAV1);
+ provider_->SetHasVideo(VideoCodec::kAV1);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
@@ -271,8 +271,8 @@
{false, false, AudioDecoderType::kMojo, EncryptionType::kClear});
provider_->SetVideoPipelineInfo(
{true, false, VideoDecoderType::kD3D11, EncryptionType::kEncrypted});
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_->SetVideoPipelineInfo({true, false, VideoDecoderType::kFFmpeg});
@@ -289,8 +289,8 @@
Initialize(false, false, false, kTestOrigin, mojom::MediaURLScheme::kHttps);
provider_->SetIsEME();
provider_->SetRendererType(RendererType::kMediaFoundation);
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
diff --git a/media/mojo/services/watch_time_recorder.cc b/media/mojo/services/watch_time_recorder.cc
index 414751e..c630537 100644
--- a/media/mojo/services/watch_time_recorder.cc
+++ b/media/mojo/services/watch_time_recorder.cc
@@ -201,8 +201,8 @@
// update without creating a whole new record. Not checking
// audio_encryption_scheme and video_encryption_scheme as we want to
// capture changes in encryption schemes.
- if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec ||
- last_record.secondary_properties->video_codec == kUnknownVideoCodec ||
+ if (last_record.secondary_properties->audio_codec == AudioCodec::kUnknown ||
+ last_record.secondary_properties->video_codec == VideoCodec::kUnknown ||
last_record.secondary_properties->audio_codec_profile ==
AudioCodecProfile::kUnknown ||
last_record.secondary_properties->video_codec_profile ==
@@ -212,9 +212,9 @@
last_record.secondary_properties->video_decoder ==
VideoDecoderType::kUnknown) {
auto temp_props = last_record.secondary_properties.Clone();
- if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec)
+ if (last_record.secondary_properties->audio_codec == AudioCodec::kUnknown)
temp_props->audio_codec = secondary_properties->audio_codec;
- if (last_record.secondary_properties->video_codec == kUnknownVideoCodec)
+ if (last_record.secondary_properties->video_codec == VideoCodec::kUnknown)
temp_props->video_codec = secondary_properties->video_codec;
if (last_record.secondary_properties->audio_codec_profile ==
AudioCodecProfile::kUnknown) {
@@ -425,8 +425,10 @@
}
// See note in mojom::PlaybackProperties about why we have both of these.
- builder.SetAudioCodec(ukm_record.secondary_properties->audio_codec);
- builder.SetVideoCodec(ukm_record.secondary_properties->video_codec);
+ builder.SetAudioCodec(
+ static_cast<int64_t>(ukm_record.secondary_properties->audio_codec));
+ builder.SetVideoCodec(
+ static_cast<int64_t>(ukm_record.secondary_properties->video_codec));
builder.SetAudioCodecProfile(static_cast<int64_t>(
ukm_record.secondary_properties->audio_codec_profile));
builder.SetVideoCodecProfile(
@@ -434,7 +436,7 @@
builder.SetHasAudio(properties_->has_audio);
builder.SetHasVideo(properties_->has_video);
- if (ukm_record.secondary_properties->audio_codec == kCodecAAC)
+ if (ukm_record.secondary_properties->audio_codec == AudioCodec::kAAC)
aac_profiles.insert(ukm_record.secondary_properties->audio_codec_profile);
builder.SetAudioDecoderName(
diff --git a/media/mojo/services/watch_time_recorder_unittest.cc b/media/mojo/services/watch_time_recorder_unittest.cc
index 8a235e4..5c26909 100644
--- a/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/media/mojo/services/watch_time_recorder_unittest.cc
@@ -177,10 +177,10 @@
mojom::SecondaryPlaybackPropertiesPtr CreateSecondaryProperties() {
return mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kUnknown, H264PROFILE_MAIN,
- AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kUnknown,
+ H264PROFILE_MAIN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
}
ukm::SourceId GetSourceId() { return source_id_; }
@@ -681,8 +681,10 @@
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -761,8 +763,10 @@
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -810,10 +814,10 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
- EncryptionScheme::kCenc, EncryptionScheme::kCbcs,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kCenc,
+ EncryptionScheme::kCbcs, gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
@@ -833,8 +837,10 @@
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -881,7 +887,7 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kUnknown,
VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
@@ -955,8 +961,10 @@
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1002,7 +1010,7 @@
wtr_.reset();
base::RunLoop().RunUntilIdle();
- if (secondary_properties->audio_codec == kCodecAAC) {
+ if (secondary_properties->audio_codec == AudioCodec::kAAC) {
ExpectAacAudioCodecProfileHistogram(
secondary_properties->audio_codec_profile);
}
@@ -1015,8 +1023,10 @@
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1071,7 +1081,7 @@
wtr_.reset();
base::RunLoop().RunUntilIdle();
- if (secondary_properties->audio_codec == kCodecAAC) {
+ if (secondary_properties->audio_codec == AudioCodec::kAAC) {
ExpectAacAudioCodecProfileHistogram(
secondary_properties->audio_codec_profile);
}
@@ -1083,8 +1093,10 @@
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1141,7 +1153,7 @@
wtr_.reset();
base::RunLoop().RunUntilIdle();
- if (secondary_properties->audio_codec == kCodecAAC) {
+ if (secondary_properties->audio_codec == AudioCodec::kAAC) {
ExpectAacAudioCodecProfileHistogram(
secondary_properties->audio_codec_profile);
}
@@ -1153,8 +1165,10 @@
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1254,10 +1268,11 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kUnknownAudioCodec, kUnknownVideoCodec, AudioCodecProfile::kUnknown,
- VIDEO_CODEC_PROFILE_UNKNOWN, AudioDecoderType::kUnknown,
- VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
- EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
+ AudioCodec::kUnknown, VideoCodec::kUnknown,
+ AudioCodecProfile::kUnknown, VIDEO_CODEC_PROFILE_UNKNOWN,
+ AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
+ EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
@@ -1266,10 +1281,10 @@
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
wtr_.reset();
@@ -1298,8 +1313,10 @@
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(
UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
@@ -1325,7 +1342,7 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(400, 300));
@@ -1346,10 +1363,10 @@
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kUnknown, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kCenc, EncryptionScheme::kCenc,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kUnknown,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kCenc,
+ EncryptionScheme::kCenc, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
@@ -1405,8 +1422,10 @@
kUnderflowDuration.InMilliseconds());
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, kDecodedFrameCount1);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount1);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1435,8 +1454,10 @@
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount2);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1459,7 +1480,7 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(400, 300));
@@ -1480,10 +1501,10 @@
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
// Don't record any watch time to the new record, it should report zero watch
@@ -1524,8 +1545,10 @@
kUnderflowDuration.InMilliseconds());
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, kDecodedFrameCount1);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount1);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1551,8 +1574,10 @@
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1575,7 +1600,7 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kCbcs, EncryptionScheme::kCbcs,
gfx::Size(400, 300));
@@ -1600,10 +1625,10 @@
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
@@ -1650,8 +1675,10 @@
kUnderflowDuration.InMilliseconds());
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, kDecodedFrameCount1);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount1);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1679,8 +1706,10 @@
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1703,7 +1732,7 @@
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kCbcs, EncryptionScheme::kCbcs,
gfx::Size(400, 300));
@@ -1722,10 +1751,10 @@
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
@@ -1780,8 +1809,10 @@
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, kUnderflowCount1 - 1);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName,
kUnderflowDuration.InMilliseconds());
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1808,8 +1839,10 @@
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 1);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName,
(kUnderflowDuration * 1.5 - kUnderflowDuration).InMilliseconds());
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
diff --git a/media/muxers/webm_muxer.cc b/media/muxers/webm_muxer.cc
index c2fe59e1..dd53bfb1e 100644
--- a/media/muxers/webm_muxer.cc
+++ b/media/muxers/webm_muxer.cc
@@ -97,13 +97,13 @@
static const char* MkvCodeIcForMediaVideoCodecId(VideoCodec video_codec) {
switch (video_codec) {
- case kCodecVP8:
+ case VideoCodec::kVP8:
return mkvmuxer::Tracks::kVp8CodecId;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return mkvmuxer::Tracks::kVp9CodecId;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return mkvmuxer::Tracks::kAv1CodecId;
- case kCodecH264:
+ case VideoCodec::kH264:
return kH264CodecId;
default:
NOTREACHED() << "Unsupported codec " << GetCodecName(video_codec);
@@ -204,7 +204,7 @@
scoped_refptr<media::VideoFrame> frame)
: visible_rect_size(frame->visible_rect().size()),
frame_rate(frame->metadata().frame_rate.value_or(0.0)),
- codec(kUnknownVideoCodec),
+ codec(VideoCodec::kUnknown),
color_space(frame->ColorSpace()) {}
WebmMuxer::VideoParameters::VideoParameters(
@@ -229,7 +229,7 @@
bool has_audio,
std::unique_ptr<Delegate> delegate)
: audio_codec_(audio_codec),
- video_codec_(kUnknownVideoCodec),
+ video_codec_(VideoCodec::kUnknown),
video_track_index_(0),
audio_track_index_(0),
has_video_(has_video),
@@ -238,7 +238,7 @@
force_one_libwebm_error_(false) {
DCHECK(has_video_ || has_audio_);
DCHECK(delegate_);
- DCHECK(audio_codec == kCodecOpus || audio_codec == kCodecPCM)
+ DCHECK(audio_codec == AudioCodec::kOpus || audio_codec == AudioCodec::kPCM)
<< " Unsupported audio codec: " << GetCodecName(audio_codec);
delegate_->InitSegment(&segment_);
@@ -268,10 +268,10 @@
bool is_key_frame) {
DVLOG(2) << __func__ << " - " << encoded_data.size() << "B";
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(params.codec == kCodecVP8 || params.codec == kCodecVP9 ||
- params.codec == kCodecH264 || params.codec == kCodecAV1)
+ DCHECK(params.codec == VideoCodec::kVP8 || params.codec == VideoCodec::kVP9 ||
+ params.codec == VideoCodec::kH264 || params.codec == VideoCodec::kAV1)
<< " Unsupported video codec: " << GetCodecName(params.codec);
- DCHECK(video_codec_ == kUnknownVideoCodec || video_codec_ == params.codec)
+ DCHECK(video_codec_ == VideoCodec::kUnknown || video_codec_ == params.codec)
<< "Unsupported: codec switched, to: " << GetCodecName(params.codec);
if (encoded_data.size() == 0u) {
@@ -292,7 +292,7 @@
last_frame_timestamp_video_ = first_frame_timestamp_video_;
}
// Add codec private for AV1.
- if (params.codec == kCodecAV1 &&
+ if (params.codec == VideoCodec::kAV1 &&
!segment_.GetTrackByNumber(video_track_index_)
->SetCodecPrivate(av1::codec_private, sizeof(av1::codec_private)))
LOG(ERROR) << __func__ << " failed to set CodecPrivate for AV1.";
@@ -409,7 +409,7 @@
DCHECK_EQ(1000000ull, segment_.GetSegmentInfo()->timecode_scale());
// Set alpha channel parameters for only VPX (crbug.com/711825).
- if (video_codec_ == kCodecH264)
+ if (video_codec_ == VideoCodec::kH264)
return;
video_track->SetAlphaMode(mkvmuxer::VideoTrack::kAlpha);
// Alpha channel, if present, is stored in a BlockAdditional next to the
@@ -445,7 +445,7 @@
// Audio data is always pcm_f32le.
audio_track->set_bit_depth(32u);
- if (audio_codec_ == kCodecOpus) {
+ if (audio_codec_ == AudioCodec::kOpus) {
audio_track->set_codec_id(mkvmuxer::Tracks::kOpusCodecId);
uint8_t opus_header[OPUS_EXTRADATA_SIZE];
@@ -457,7 +457,7 @@
// Segment's timestamps should be in milliseconds, DCHECK it. See
// http://www.webmproject.org/docs/container/#muxer-guidelines
DCHECK_EQ(1000000ull, segment_.GetSegmentInfo()->timecode_scale());
- } else if (audio_codec_ == kCodecPCM) {
+ } else if (audio_codec_ == AudioCodec::kPCM) {
audio_track->set_codec_id(kPcmCodecId);
}
}
diff --git a/media/muxers/webm_muxer_fuzzertest.cc b/media/muxers/webm_muxer_fuzzertest.cc
index c9abb04..0d241ef5 100644
--- a/media/muxers/webm_muxer_fuzzertest.cc
+++ b/media/muxers/webm_muxer_fuzzertest.cc
@@ -23,10 +23,10 @@
const int kMinNumIterations = 1;
const int kMaxNumIterations = 10;
-static const int kSupportedVideoCodecs[] = {media::kCodecVP8, media::kCodecVP9,
- media::kCodecH264};
-static const int kSupportedAudioCodecs[] = {media::kCodecOpus,
- media::kCodecPCM};
+static const media::VideoCodec kSupportedVideoCodecs[] = {
+ media::VideoCodec::kVP8, media::VideoCodec::kVP9, media::VideoCodec::kH264};
+static const media::AudioCodec kSupportedAudioCodecs[] = {
+ media::AudioCodec::kOpus, media::AudioCodec::kPCM};
static const int kSampleRatesInKHz[] = {48, 24, 16, 12, 8};
@@ -55,10 +55,10 @@
}
for (const auto& input_type : kVideoAudioInputTypes) {
- const auto video_codec = static_cast<media::VideoCodec>(
- kSupportedVideoCodecs[rng() % base::size(kSupportedVideoCodecs)]);
- const auto audio_codec = static_cast<media::AudioCodec>(
- kSupportedAudioCodecs[rng() % base::size(kSupportedAudioCodecs)]);
+ const auto video_codec =
+ kSupportedVideoCodecs[rng() % base::size(kSupportedVideoCodecs)];
+ const auto audio_codec =
+ kSupportedAudioCodecs[rng() % base::size(kSupportedAudioCodecs)];
media::WebmMuxer muxer(audio_codec, input_type.has_video,
input_type.has_audio,
std::make_unique<media::LiveWebmMuxerDelegate>(
diff --git a/media/muxers/webm_muxer_unittest.cc b/media/muxers/webm_muxer_unittest.cc
index 3a2798b..2879923c 100644
--- a/media/muxers/webm_muxer_unittest.cc
+++ b/media/muxers/webm_muxer_unittest.cc
@@ -310,7 +310,7 @@
}
TEST_P(WebmMuxerTest, ColorSpaceREC709IsPropagatedToTrack) {
- WebmMuxer::VideoParameters params(gfx::Size(1, 1), 0, media::kCodecVP9,
+ WebmMuxer::VideoParameters params(gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace::CreateREC709());
webm_muxer_->OnEncodedVideo(params, "abab", {}, base::TimeTicks::Now(),
true /* keyframe */);
@@ -323,7 +323,7 @@
TEST_P(WebmMuxerTest, ColorSpaceExtendedSRGBIsPropagatedToTrack) {
WebmMuxer::VideoParameters params(
- gfx::Size(1, 1), 0, media::kCodecVP9,
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT709,
gfx::ColorSpace::TransferID::IEC61966_2_1,
gfx::ColorSpace::MatrixID::BT709,
@@ -339,7 +339,7 @@
TEST_P(WebmMuxerTest, ColorSpaceHDR10IsPropagatedToTrack) {
WebmMuxer::VideoParameters params(
- gfx::Size(1, 1), 0, media::kCodecVP9,
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020,
gfx::ColorSpace::TransferID::SMPTEST2084,
gfx::ColorSpace::MatrixID::BT2020_NCL,
@@ -356,7 +356,7 @@
TEST_P(WebmMuxerTest, ColorSpaceFullRangeHDR10IsPropagatedToTrack) {
WebmMuxer::VideoParameters params(
- gfx::Size(1, 1), 0, media::kCodecVP9,
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020,
gfx::ColorSpace::TransferID::SMPTEST2084,
gfx::ColorSpace::MatrixID::BT2020_NCL,
@@ -425,20 +425,21 @@
}
const TestParams kTestCases[] = {
- {kCodecVP8, kCodecOpus, 1 /* num_video_tracks */, 0 /*num_audio_tracks*/},
- {kCodecVP8, kCodecOpus, 0, 1},
- {kCodecVP8, kCodecOpus, 1, 1},
- {kCodecVP9, kCodecOpus, 1, 0},
- {kCodecVP9, kCodecOpus, 0, 1},
- {kCodecVP9, kCodecOpus, 1, 1},
- {kCodecH264, kCodecOpus, 1, 0},
- {kCodecH264, kCodecOpus, 0, 1},
- {kCodecH264, kCodecOpus, 1, 1},
- {kCodecVP8, kCodecPCM, 0, 1},
- {kCodecVP8, kCodecPCM, 1, 1},
- {kCodecAV1, kCodecOpus, 1, 0},
- {kCodecAV1, kCodecOpus, 0, 1},
- {kCodecAV1, kCodecOpus, 1, 1},
+ {VideoCodec::kVP8, AudioCodec::kOpus, 1 /* num_video_tracks */,
+ 0 /*num_audio_tracks*/},
+ {VideoCodec::kVP8, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kVP8, AudioCodec::kOpus, 1, 1},
+ {VideoCodec::kVP9, AudioCodec::kOpus, 1, 0},
+ {VideoCodec::kVP9, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kVP9, AudioCodec::kOpus, 1, 1},
+ {VideoCodec::kH264, AudioCodec::kOpus, 1, 0},
+ {VideoCodec::kH264, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kH264, AudioCodec::kOpus, 1, 1},
+ {VideoCodec::kVP8, AudioCodec::kPCM, 0, 1},
+ {VideoCodec::kVP8, AudioCodec::kPCM, 1, 1},
+ {VideoCodec::kAV1, AudioCodec::kOpus, 1, 0},
+ {VideoCodec::kAV1, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kAV1, AudioCodec::kOpus, 1, 1},
};
INSTANTIATE_TEST_SUITE_P(All, WebmMuxerTest, ValuesIn(kTestCases));
@@ -448,7 +449,7 @@
WebmMuxerTestUnparametrized()
: environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME),
webm_muxer_(std::make_unique<WebmMuxer>(
- kCodecOpus,
+ AudioCodec::kOpus,
/*has_audio=*/true,
/*has_video=*/true,
std::make_unique<LiveWebmMuxerDelegate>(base::BindRepeating(
@@ -484,8 +485,8 @@
}
void AddVideoAtOffset(int system_timestamp_offset_ms, bool is_key_frame) {
- WebmMuxer::VideoParameters params(gfx::Size(1, 1), 0, media::kCodecVP8,
- gfx::ColorSpace());
+ WebmMuxer::VideoParameters params(
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP8, gfx::ColorSpace());
webm_muxer_->OnEncodedVideo(
params, "video_at_offset", "",
base::TimeTicks() +
diff --git a/media/remoting/courier_renderer_unittest.cc b/media/remoting/courier_renderer_unittest.cc
index 739cbc6..63ea40b 100644
--- a/media/remoting/courier_renderer_unittest.cc
+++ b/media/remoting/courier_renderer_unittest.cc
@@ -642,7 +642,7 @@
TEST_F(CourierRendererTest, OnAudioConfigChange) {
const AudioDecoderConfig kNewAudioConfig(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ AudioCodec::kVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
InitializeRenderer();
// Make sure initial audio config does not match the one we intend to send.
diff --git a/media/remoting/fake_media_resource.cc b/media/remoting/fake_media_resource.cc
index 39ff3c2..ff153b7 100644
--- a/media/remoting/fake_media_resource.cc
+++ b/media/remoting/fake_media_resource.cc
@@ -21,14 +21,14 @@
FakeDemuxerStream::FakeDemuxerStream(bool is_audio) {
type_ = is_audio ? DemuxerStream::AUDIO : DemuxerStream::VIDEO;
if (is_audio) {
- audio_config_.Initialize(kCodecAAC, kSampleFormatS16, CHANNEL_LAYOUT_STEREO,
- 38400, std::vector<uint8_t>(),
- EncryptionScheme::kUnencrypted, base::TimeDelta(),
- 0);
+ audio_config_.Initialize(
+ AudioCodec::kAAC, kSampleFormatS16, CHANNEL_LAYOUT_STEREO, 38400,
+ std::vector<uint8_t>(), EncryptionScheme::kUnencrypted,
+ base::TimeDelta(), 0);
} else {
gfx::Size size(640, 480);
gfx::Rect rect(0, 0, 640, 480);
- video_config_.Initialize(kCodecH264, H264PROFILE_BASELINE,
+ video_config_.Initialize(VideoCodec::kH264, H264PROFILE_BASELINE,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC601(), kNoTransformation, size,
rect, size, std::vector<uint8_t>(),
diff --git a/media/remoting/metrics.cc b/media/remoting/metrics.cc
index c2f9f03..10fb3e9 100644
--- a/media/remoting/metrics.cc
+++ b/media/remoting/metrics.cc
@@ -42,10 +42,10 @@
} // namespace
SessionMetricsRecorder::SessionMetricsRecorder()
- : last_audio_codec_(kUnknownAudioCodec),
+ : last_audio_codec_(AudioCodec::kUnknown),
last_channel_layout_(CHANNEL_LAYOUT_NONE),
last_sample_rate_(0),
- last_video_codec_(kUnknownVideoCodec),
+ last_video_codec_(VideoCodec::kUnknown),
last_video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN) {}
SessionMetricsRecorder::~SessionMetricsRecorder() = default;
@@ -59,9 +59,9 @@
void SessionMetricsRecorder::DidStartSession() {
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.SessionStartTrigger",
*start_trigger_, START_TRIGGER_MAX + 1);
- if (last_audio_codec_ != kUnknownAudioCodec)
+ if (last_audio_codec_ != AudioCodec::kUnknown)
RecordAudioConfiguration();
- if (last_video_codec_ != kUnknownVideoCodec)
+ if (last_video_codec_ != VideoCodec::kUnknown)
RecordVideoConfiguration();
RecordTrackConfiguration();
}
@@ -134,7 +134,7 @@
if (need_to_record_audio_configuration)
RecordAudioConfiguration();
} else {
- last_audio_codec_ = kUnknownAudioCodec;
+ last_audio_codec_ = AudioCodec::kUnknown;
last_channel_layout_ = CHANNEL_LAYOUT_NONE;
last_sample_rate_ = 0;
}
@@ -152,7 +152,7 @@
if (need_to_record_video_configuration)
RecordVideoConfiguration();
} else {
- last_video_codec_ = kUnknownVideoCodec;
+ last_video_codec_ = VideoCodec::kUnknown;
last_video_profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
last_natural_size_ = gfx::Size();
}
@@ -190,8 +190,7 @@
}
void SessionMetricsRecorder::RecordAudioConfiguration() {
- UMA_HISTOGRAM_ENUMERATION("Media.Remoting.AudioCodec", last_audio_codec_,
- kAudioCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.Remoting.AudioCodec", last_audio_codec_);
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.AudioChannelLayout",
last_channel_layout_, CHANNEL_LAYOUT_MAX + 1);
AudioSampleRate asr;
@@ -205,8 +204,7 @@
}
void SessionMetricsRecorder::RecordVideoConfiguration() {
- UMA_HISTOGRAM_ENUMERATION("Media.Remoting.VideoCodec", last_video_codec_,
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.Remoting.VideoCodec", last_video_codec_);
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.VideoCodecProfile",
last_video_profile_, VIDEO_CODEC_PROFILE_MAX + 1);
UMA_HISTOGRAM_CUSTOM_ENUMERATION(
@@ -224,9 +222,9 @@
void SessionMetricsRecorder::RecordTrackConfiguration() {
TrackConfiguration config = NEITHER_AUDIO_NOR_VIDEO;
- if (last_audio_codec_ != kUnknownAudioCodec)
+ if (last_audio_codec_ != AudioCodec::kUnknown)
config = AUDIO_ONLY;
- if (last_video_codec_ != kUnknownVideoCodec) {
+ if (last_video_codec_ != VideoCodec::kUnknown) {
if (config == AUDIO_ONLY)
config = AUDIO_AND_VIDEO;
else
diff --git a/media/remoting/proto_enum_utils.cc b/media/remoting/proto_enum_utils.cc
index ce46988..25c2827 100644
--- a/media/remoting/proto_enum_utils.cc
+++ b/media/remoting/proto_enum_utils.cc
@@ -11,29 +11,33 @@
case OriginType::x: \
return OtherType::x
+#define CASE_RETURN_ORIGIN_TO_OTHER(x, y) \
+ case OriginType::x: \
+ return OtherType::y
+
absl::optional<AudioCodec> ToMediaAudioCodec(
openscreen::cast::AudioDecoderConfig::Codec value) {
using OriginType = openscreen::cast::AudioDecoderConfig;
using OtherType = AudioCodec;
switch (value) {
- CASE_RETURN_OTHER(kUnknownAudioCodec);
- CASE_RETURN_OTHER(kCodecAAC);
- CASE_RETURN_OTHER(kCodecMP3);
- CASE_RETURN_OTHER(kCodecPCM);
- CASE_RETURN_OTHER(kCodecVorbis);
- CASE_RETURN_OTHER(kCodecFLAC);
- CASE_RETURN_OTHER(kCodecAMR_NB);
- CASE_RETURN_OTHER(kCodecAMR_WB);
- CASE_RETURN_OTHER(kCodecPCM_MULAW);
- CASE_RETURN_OTHER(kCodecGSM_MS);
- CASE_RETURN_OTHER(kCodecPCM_S16BE);
- CASE_RETURN_OTHER(kCodecPCM_S24BE);
- CASE_RETURN_OTHER(kCodecOpus);
- CASE_RETURN_OTHER(kCodecEAC3);
- CASE_RETURN_OTHER(kCodecPCM_ALAW);
- CASE_RETURN_OTHER(kCodecALAC);
- CASE_RETURN_OTHER(kCodecAC3);
- CASE_RETURN_OTHER(kCodecMpegHAudio);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknownAudioCodec, kUnknown);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAAC, kAAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMP3, kMP3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM, kPCM);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVorbis, kVorbis);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecFLAC, kFLAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAMR_NB, kAMR_NB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAMR_WB, kAMR_WB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_MULAW, kPCM_MULAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecGSM_MS, kGSM_MS);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_S16BE, kPCM_S16BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_S24BE, kPCM_S24BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecOpus, kOpus);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecEAC3, kEAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_ALAW, kPCM_ALAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecALAC, kALAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAC3, kAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMpegHAudio, kMpegHAudio);
default:
return absl::nullopt;
}
@@ -44,24 +48,24 @@
using OriginType = AudioCodec;
using OtherType = openscreen::cast::AudioDecoderConfig;
switch (value) {
- CASE_RETURN_OTHER(kUnknownAudioCodec);
- CASE_RETURN_OTHER(kCodecAAC);
- CASE_RETURN_OTHER(kCodecMP3);
- CASE_RETURN_OTHER(kCodecPCM);
- CASE_RETURN_OTHER(kCodecVorbis);
- CASE_RETURN_OTHER(kCodecFLAC);
- CASE_RETURN_OTHER(kCodecAMR_NB);
- CASE_RETURN_OTHER(kCodecAMR_WB);
- CASE_RETURN_OTHER(kCodecPCM_MULAW);
- CASE_RETURN_OTHER(kCodecGSM_MS);
- CASE_RETURN_OTHER(kCodecPCM_S16BE);
- CASE_RETURN_OTHER(kCodecPCM_S24BE);
- CASE_RETURN_OTHER(kCodecOpus);
- CASE_RETURN_OTHER(kCodecEAC3);
- CASE_RETURN_OTHER(kCodecPCM_ALAW);
- CASE_RETURN_OTHER(kCodecALAC);
- CASE_RETURN_OTHER(kCodecAC3);
- CASE_RETURN_OTHER(kCodecMpegHAudio);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknown, kUnknownAudioCodec);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAAC, kCodecAAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMP3, kCodecMP3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM, kCodecPCM);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVorbis, kCodecVorbis);
+ CASE_RETURN_ORIGIN_TO_OTHER(kFLAC, kCodecFLAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAMR_NB, kCodecAMR_NB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAMR_WB, kCodecAMR_WB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_MULAW, kCodecPCM_MULAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kGSM_MS, kCodecGSM_MS);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_S16BE, kCodecPCM_S16BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_S24BE, kCodecPCM_S24BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kOpus, kCodecOpus);
+ CASE_RETURN_ORIGIN_TO_OTHER(kEAC3, kCodecEAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_ALAW, kCodecPCM_ALAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kALAC, kCodecALAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAC3, kCodecAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMpegHAudio, kCodecMpegHAudio);
default:
return absl::nullopt;
}
@@ -204,17 +208,17 @@
using OriginType = openscreen::cast::VideoDecoderConfig;
using OtherType = VideoCodec;
switch (value) {
- CASE_RETURN_OTHER(kUnknownVideoCodec);
- CASE_RETURN_OTHER(kCodecH264);
- CASE_RETURN_OTHER(kCodecVC1);
- CASE_RETURN_OTHER(kCodecMPEG2);
- CASE_RETURN_OTHER(kCodecMPEG4);
- CASE_RETURN_OTHER(kCodecTheora);
- CASE_RETURN_OTHER(kCodecVP8);
- CASE_RETURN_OTHER(kCodecVP9);
- CASE_RETURN_OTHER(kCodecHEVC);
- CASE_RETURN_OTHER(kCodecDolbyVision);
- CASE_RETURN_OTHER(kCodecAV1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknownVideoCodec, kUnknown);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecH264, kH264);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVC1, kVC1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMPEG2, kMPEG2);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMPEG4, kMPEG4);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecTheora, kTheora);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVP8, kVP8);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVP9, kVP9);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecHEVC, kHEVC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecDolbyVision, kDolbyVision);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAV1, kAV1);
default:
return absl::nullopt;
}
@@ -225,17 +229,17 @@
using OriginType = VideoCodec;
using OtherType = openscreen::cast::VideoDecoderConfig;
switch (value) {
- CASE_RETURN_OTHER(kUnknownVideoCodec);
- CASE_RETURN_OTHER(kCodecH264);
- CASE_RETURN_OTHER(kCodecVC1);
- CASE_RETURN_OTHER(kCodecMPEG2);
- CASE_RETURN_OTHER(kCodecMPEG4);
- CASE_RETURN_OTHER(kCodecTheora);
- CASE_RETURN_OTHER(kCodecVP8);
- CASE_RETURN_OTHER(kCodecVP9);
- CASE_RETURN_OTHER(kCodecHEVC);
- CASE_RETURN_OTHER(kCodecDolbyVision);
- CASE_RETURN_OTHER(kCodecAV1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknown, kUnknownVideoCodec);
+ CASE_RETURN_ORIGIN_TO_OTHER(kH264, kCodecH264);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVC1, kCodecVC1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMPEG2, kCodecMPEG2);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMPEG4, kCodecMPEG4);
+ CASE_RETURN_ORIGIN_TO_OTHER(kTheora, kCodecTheora);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVP8, kCodecVP8);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVP9, kCodecVP9);
+ CASE_RETURN_ORIGIN_TO_OTHER(kHEVC, kCodecHEVC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kDolbyVision, kCodecDolbyVision);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAV1, kCodecAV1);
default:
return absl::nullopt;
}
diff --git a/media/remoting/proto_utils_unittest.cc b/media/remoting/proto_utils_unittest.cc
index 880d0e2..6c9c297 100644
--- a/media/remoting/proto_utils_unittest.cc
+++ b/media/remoting/proto_utils_unittest.cc
@@ -99,7 +99,7 @@
TEST_F(ProtoUtilsTest, AudioDecoderConfigConversionTest) {
const char extra_data[4] = {'A', 'C', 'E', 'G'};
AudioDecoderConfig audio_config(
- kCodecAAC, kSampleFormatF32, CHANNEL_LAYOUT_MONO, 48000,
+ AudioCodec::kAAC, kSampleFormatF32, CHANNEL_LAYOUT_MONO, 48000,
std::vector<uint8_t>(std::begin(extra_data), std::end(extra_data)),
EncryptionScheme::kUnencrypted);
ASSERT_TRUE(audio_config.IsValidConfig());
diff --git a/media/remoting/renderer_controller.cc b/media/remoting/renderer_controller.cc
index 0fe8e12..728cfa28 100644
--- a/media/remoting/renderer_controller.cc
+++ b/media/remoting/renderer_controller.cc
@@ -375,16 +375,16 @@
bool compatible = false;
switch (pipeline_metadata_.video_decoder_config.codec()) {
- case VideoCodec::kCodecH264:
+ case VideoCodec::kH264:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_H264);
break;
- case VideoCodec::kCodecVP8:
+ case VideoCodec::kVP8:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_VP8);
break;
- case VideoCodec::kCodecVP9:
+ case VideoCodec::kVP9:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_VP9);
break;
- case VideoCodec::kCodecHEVC:
+ case VideoCodec::kHEVC:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_HEVC);
break;
default:
@@ -405,26 +405,26 @@
bool compatible = false;
switch (pipeline_metadata_.audio_decoder_config.codec()) {
- case AudioCodec::kCodecAAC:
+ case AudioCodec::kAAC:
compatible = HasAudioCapability(RemotingSinkAudioCapability::CODEC_AAC);
break;
- case AudioCodec::kCodecOpus:
+ case AudioCodec::kOpus:
compatible = HasAudioCapability(RemotingSinkAudioCapability::CODEC_OPUS);
break;
- case AudioCodec::kCodecMP3:
- case AudioCodec::kCodecPCM:
- case AudioCodec::kCodecVorbis:
- case AudioCodec::kCodecFLAC:
- case AudioCodec::kCodecAMR_NB:
- case AudioCodec::kCodecAMR_WB:
- case AudioCodec::kCodecPCM_MULAW:
- case AudioCodec::kCodecGSM_MS:
- case AudioCodec::kCodecPCM_S16BE:
- case AudioCodec::kCodecPCM_S24BE:
- case AudioCodec::kCodecEAC3:
- case AudioCodec::kCodecPCM_ALAW:
- case AudioCodec::kCodecALAC:
- case AudioCodec::kCodecAC3:
+ case AudioCodec::kMP3:
+ case AudioCodec::kPCM:
+ case AudioCodec::kVorbis:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kGSM_MS:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kALAC:
+ case AudioCodec::kAC3:
compatible =
HasAudioCapability(RemotingSinkAudioCapability::CODEC_BASELINE_SET);
break;
@@ -648,22 +648,22 @@
return false;
switch (pipeline_metadata_.audio_decoder_config.codec()) {
- case AudioCodec::kCodecAAC:
- case AudioCodec::kCodecOpus:
- case AudioCodec::kCodecMP3:
- case AudioCodec::kCodecPCM:
- case AudioCodec::kCodecVorbis:
- case AudioCodec::kCodecFLAC:
- case AudioCodec::kCodecAMR_NB:
- case AudioCodec::kCodecAMR_WB:
- case AudioCodec::kCodecPCM_MULAW:
- case AudioCodec::kCodecGSM_MS:
- case AudioCodec::kCodecPCM_S16BE:
- case AudioCodec::kCodecPCM_S24BE:
- case AudioCodec::kCodecEAC3:
- case AudioCodec::kCodecPCM_ALAW:
- case AudioCodec::kCodecALAC:
- case AudioCodec::kCodecAC3:
+ case AudioCodec::kAAC:
+ case AudioCodec::kOpus:
+ case AudioCodec::kMP3:
+ case AudioCodec::kPCM:
+ case AudioCodec::kVorbis:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kGSM_MS:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kALAC:
+ case AudioCodec::kAC3:
return true;
default:
return false;
@@ -678,10 +678,10 @@
return false;
switch (pipeline_metadata_.video_decoder_config.codec()) {
- case VideoCodec::kCodecH264:
- case VideoCodec::kCodecVP8:
- case VideoCodec::kCodecVP9:
- case VideoCodec::kCodecHEVC:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
+ case VideoCodec::kHEVC:
return true;
default:
return false;
diff --git a/media/remoting/renderer_controller_unittest.cc b/media/remoting/renderer_controller_unittest.cc
index 2446839..aa8aa64 100644
--- a/media/remoting/renderer_controller_unittest.cc
+++ b/media/remoting/renderer_controller_unittest.cc
@@ -173,7 +173,7 @@
};
TEST_F(RendererControllerTest, ToggleRendererOnDominantChange) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
DelayedStartEnds();
RunUntilIdle();
@@ -187,7 +187,7 @@
TEST_F(RendererControllerTest, ToggleRendererOnDisableChange) {
EXPECT_FALSE(is_rendering_remotely_);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
ExpectInDelayedStart();
DelayedStartEnds();
@@ -203,13 +203,13 @@
TEST_F(RendererControllerTest, NotStartForShortContent) {
duration_in_sec_ = 30;
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
ExpectInLocalRendering();
}
TEST_F(RendererControllerTest, ToggleRendererOnSinkCapabilities) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(false));
// An available sink that does not support remote rendering should not cause
// the controller to toggle remote rendering on.
@@ -231,7 +231,7 @@
}
TEST_F(RendererControllerTest, WithVP9VideoCodec) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP9),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP9),
GetDefaultSinkMetadata(true));
// An available sink that does not support VP9 video codec should not cause
// the controller to toggle remote rendering on.
@@ -252,7 +252,7 @@
}
TEST_F(RendererControllerTest, WithHEVCVideoCodec) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecHEVC),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kHEVC),
GetDefaultSinkMetadata(true));
// An available sink that does not support HEVC video codec should not cause
// the controller to toggle remote rendering on.
@@ -276,9 +276,9 @@
TEST_F(RendererControllerTest, WithAACAudioCodec) {
const AudioDecoderConfig audio_config = AudioDecoderConfig(
- AudioCodec::kCodecAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO,
- 44100, EmptyExtraData(), EncryptionScheme::kUnencrypted);
- PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kCodecVP8);
+ AudioCodec::kAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kVP8);
pipeline_metadata.audio_decoder_config = audio_config;
InitializeControllerAndBecomeDominant(pipeline_metadata,
GetDefaultSinkMetadata(true));
@@ -304,9 +304,9 @@
TEST_F(RendererControllerTest, WithOpusAudioCodec) {
const AudioDecoderConfig audio_config = AudioDecoderConfig(
- AudioCodec::kCodecOpus, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO,
- 44100, EmptyExtraData(), EncryptionScheme::kUnencrypted);
- PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kCodecVP8);
+ AudioCodec::kOpus, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kVP8);
pipeline_metadata.audio_decoder_config = audio_config;
InitializeControllerAndBecomeDominant(pipeline_metadata,
GetDefaultSinkMetadata(true));
@@ -330,7 +330,7 @@
}
TEST_F(RendererControllerTest, StartFailedWithHighFrameRate) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
RunUntilIdle();
ExpectInDelayedStart();
@@ -343,7 +343,7 @@
mojom::RemotingSinkMetadata sink_metadata = GetDefaultSinkMetadata(true);
sink_metadata.video_capabilities.push_back(
mojom::RemotingSinkVideoCapability::SUPPORT_4K);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
sink_metadata);
RunUntilIdle();
ExpectInDelayedStart();
@@ -354,7 +354,7 @@
TEST_F(RendererControllerTest, PacingTooSlowly) {
mojom::RemotingSinkMetadata sink_metadata = GetDefaultSinkMetadata(true);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
sink_metadata);
RunUntilIdle();
ExpectInDelayedStart();
@@ -376,7 +376,7 @@
TEST_F(RendererControllerTest, StartFailed) {
controller_ = FakeRemoterFactory::CreateController(true);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
RunUntilIdle();
ExpectInDelayedStart();
@@ -387,7 +387,7 @@
TEST_F(RendererControllerTest, SetClientNullptr) {
controller_ = FakeRemoterFactory::CreateController(true);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
RunUntilIdle();
controller_->SetClient(nullptr);
diff --git a/media/renderers/audio_renderer_impl.cc b/media/renderers/audio_renderer_impl.cc
index 3053692..10b6f9f 100644
--- a/media/renderers/audio_renderer_impl.cc
+++ b/media/renderers/audio_renderer_impl.cc
@@ -476,9 +476,9 @@
if (is_passthrough_) {
AudioParameters::Format format = AudioParameters::AUDIO_FAKE;
- if (codec == kCodecAC3) {
+ if (codec == AudioCodec::kAC3) {
format = AudioParameters::AUDIO_BITSTREAM_AC3;
- } else if (codec == kCodecEAC3) {
+ } else if (codec == AudioCodec::kEAC3) {
format = AudioParameters::AUDIO_BITSTREAM_EAC3;
} else {
NOTREACHED();
diff --git a/media/renderers/audio_renderer_impl_unittest.cc b/media/renderers/audio_renderer_impl_unittest.cc
index 6a76173..5ad4641 100644
--- a/media/renderers/audio_renderer_impl_unittest.cc
+++ b/media/renderers/audio_renderer_impl_unittest.cc
@@ -58,7 +58,7 @@
} // namespace
// Constants to specify the type of audio data used.
-constexpr AudioCodec kCodec = kCodecVorbis;
+constexpr AudioCodec kCodec = AudioCodec::kVorbis;
constexpr SampleFormat kSampleFormat = kSampleFormatPlanarF32;
constexpr ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
constexpr int kChannels = 2;
@@ -251,9 +251,10 @@
hardware_params_.Reset(AudioParameters::AUDIO_BITSTREAM_EAC3,
kChannelLayout, kOutputSamplesPerSecond, 512);
sink_ = base::MakeRefCounted<FakeAudioRendererSink>(hardware_params_);
- AudioDecoderConfig audio_config(
- kCodecAC3, kSampleFormatEac3, kChannelLayout, kInputSamplesPerSecond,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ AudioDecoderConfig audio_config(AudioCodec::kAC3, kSampleFormatEac3,
+ kChannelLayout, kInputSamplesPerSecond,
+ EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
demuxer_stream_.set_audio_decoder_config(audio_config);
ConfigureDemuxerStream(true);
@@ -649,7 +650,7 @@
// Force config change to simulate detected change from decoder stream. Expect
// that RendererClient to be signaled with the new config.
const AudioDecoderConfig kValidAudioConfig(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ AudioCodec::kVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_TRUE(kValidAudioConfig.IsValidConfig());
EXPECT_CALL(*this, OnAudioConfigChange(DecoderConfigEq(kValidAudioConfig)));
@@ -923,7 +924,7 @@
int audio_channels = 9;
AudioDecoderConfig audio_config(
- kCodecOpus, kSampleFormat, CHANNEL_LAYOUT_DISCRETE,
+ AudioCodec::kOpus, kSampleFormat, CHANNEL_LAYOUT_DISCRETE,
kInputSamplesPerSecond, EmptyExtraData(), EncryptionScheme::kUnencrypted);
audio_config.SetChannelsForDiscrete(audio_channels);
demuxer_stream_.set_audio_decoder_config(audio_config);
diff --git a/media/renderers/win/media_foundation_audio_stream.cc b/media/renderers/win/media_foundation_audio_stream.cc
index 5a211ae8..6b9bf37d 100644
--- a/media/renderers/win/media_foundation_audio_stream.cc
+++ b/media/renderers/win/media_foundation_audio_stream.cc
@@ -37,37 +37,37 @@
DVLOG(1) << __func__ << ": codec=" << codec;
switch (codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
return MFAudioFormat_AAC;
- case kCodecMP3:
+ case AudioCodec::kMP3:
return MFAudioFormat_MP3;
- case kCodecPCM:
+ case AudioCodec::kPCM:
return MFAudioFormat_PCM;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return MFAudioFormat_Vorbis;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return MFAudioFormat_FLAC;
- case kCodecAMR_NB:
+ case AudioCodec::kAMR_NB:
return MFAudioFormat_AMR_NB;
- case kCodecAMR_WB:
+ case AudioCodec::kAMR_WB:
return MFAudioFormat_AMR_WB;
- case kCodecPCM_MULAW:
+ case AudioCodec::kPCM_MULAW:
return MediaFoundationSubTypeFromWaveFormat(WAVE_FORMAT_MULAW);
- case kCodecGSM_MS:
+ case AudioCodec::kGSM_MS:
return MediaFoundationSubTypeFromWaveFormat(WAVE_FORMAT_GSM610);
- case kCodecPCM_S16BE:
+ case AudioCodec::kPCM_S16BE:
return MFAudioFormat_PCM;
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM_S24BE:
return MFAudioFormat_PCM;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return MFAudioFormat_Opus;
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return MFAudioFormat_Dolby_DDPlus;
- case kCodecPCM_ALAW:
+ case AudioCodec::kPCM_ALAW:
return MediaFoundationSubTypeFromWaveFormat(WAVE_FORMAT_ALAW);
- case kCodecALAC:
+ case AudioCodec::kALAC:
return MFAudioFormat_ALAC;
- case kCodecAC3:
+ case AudioCodec::kAC3:
return MFAudioFormat_Dolby_AC3;
default:
return GUID_NULL;
@@ -76,9 +76,9 @@
bool IsUncompressedAudio(AudioCodec codec) {
switch (codec) {
- case kCodecPCM:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
return true;
default:
return false;
@@ -206,7 +206,7 @@
AudioCodec codec = demuxer_stream->audio_decoder_config().codec();
switch (codec) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case kCodecAAC:
+ case AudioCodec::kAAC:
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationAACAudioStream>(
&audio_stream, stream_id, parent_source, demuxer_stream));
break;
diff --git a/media/renderers/win/media_foundation_video_stream.cc b/media/renderers/win/media_foundation_video_stream.cc
index 1ef146a..5d31e271 100644
--- a/media/renderers/win/media_foundation_video_stream.cc
+++ b/media/renderers/win/media_foundation_video_stream.cc
@@ -26,27 +26,27 @@
GUID VideoCodecToMFSubtype(VideoCodec codec) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return MFVideoFormat_H264;
- case kCodecVC1:
+ case VideoCodec::kVC1:
return MFVideoFormat_WVC1;
- case kCodecMPEG2:
+ case VideoCodec::kMPEG2:
return MFVideoFormat_MPEG2;
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
return MFVideoFormat_MP4V;
- case kCodecTheora:
+ case VideoCodec::kTheora:
return MFVideoFormat_THEORA;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return MFVideoFormat_VP80;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return MFVideoFormat_VP90;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return MFVideoFormat_HEVC;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
// TODO(frankli): DolbyVision also supports H264 when the profile ID is 9
// (DOLBYVISION_PROFILE9). Will it be fine to use HEVC?
return MFVideoFormat_HEVC;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return MFVideoFormat_AV1;
default:
return GUID_NULL;
@@ -241,16 +241,16 @@
VideoCodec codec = demuxer_stream->video_decoder_config().codec();
switch (codec) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case kCodecH264:
+ case VideoCodec::kH264:
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationH264VideoStream>(
&video_stream, stream_id, parent_source, demuxer_stream));
break;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
#endif
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC) || BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationHEVCVideoStream>(
diff --git a/media/video/software_video_encoder_test.cc b/media/video/software_video_encoder_test.cc
index 88144f0..10350937 100644
--- a/media/video/software_video_encoder_test.cc
+++ b/media/video/software_video_encoder_test.cc
@@ -77,11 +77,11 @@
VideoColorSpace::JPEG(), VideoTransformation(), size, visible_rect,
size, extra_data, EncryptionScheme::kUnencrypted);
- if (codec_ == kCodecH264 || codec_ == kCodecVP8) {
+ if (codec_ == VideoCodec::kH264 || codec_ == VideoCodec::kVP8) {
#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
decoder_ = std::make_unique<FFmpegVideoDecoder>(&media_log_);
#endif
- } else if (codec_ == kCodecVP9) {
+ } else if (codec_ == VideoCodec::kVP9) {
#if BUILDFLAG(ENABLE_LIBVPX)
decoder_ = std::make_unique<VpxVideoDecoder>();
#endif
@@ -151,14 +151,14 @@
std::unique_ptr<VideoEncoder> CreateEncoder(VideoCodec codec) {
switch (codec) {
- case media::kCodecVP8:
- case media::kCodecVP9:
+ case media::VideoCodec::kVP8:
+ case media::VideoCodec::kVP9:
#if BUILDFLAG(ENABLE_LIBVPX)
return std::make_unique<media::VpxVideoEncoder>();
#else
return nullptr;
#endif
- case media::kCodecH264:
+ case media::VideoCodec::kH264:
#if BUILDFLAG(ENABLE_OPENH264)
return std::make_unique<OpenH264VideoEncoder>();
#else
@@ -369,7 +369,7 @@
options.frame_size = gfx::Size(320, 200);
options.bitrate = Bitrate::ConstantBitrate(1e6); // 1Mbps
options.framerate = 25;
- if (codec_ == kCodecH264)
+ if (codec_ == VideoCodec::kH264)
options.avc.produce_annexb = true;
options.keyframe_interval = options.framerate.value() * 3; // every 3s
std::vector<scoped_refptr<VideoFrame>> frames_to_encode;
@@ -434,7 +434,7 @@
options.bitrate = Bitrate::ConstantBitrate(1e6); // 1Mbps
options.framerate = 25;
options.temporal_layers = GetParam().temporal_layers;
- if (codec_ == kCodecH264)
+ if (codec_ == VideoCodec::kH264)
options.avc.produce_annexb = true;
std::vector<scoped_refptr<VideoFrame>> frames_to_encode;
@@ -674,8 +674,8 @@
#if BUILDFLAG(ENABLE_OPENH264)
SwVideoTestParams kH264Params[] = {
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420},
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_XRGB}};
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420},
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_XRGB}};
INSTANTIATE_TEST_SUITE_P(H264Specific,
H264VideoEncoderTest,
@@ -688,9 +688,9 @@
PrintTestParams);
SwVideoTestParams kH264SVCParams[] = {
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 1},
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 2},
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 3}};
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 1},
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 2},
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 3}};
INSTANTIATE_TEST_SUITE_P(H264TemporalSvc,
SVCVideoEncoderTest,
@@ -700,10 +700,10 @@
#if BUILDFLAG(ENABLE_LIBVPX)
SwVideoTestParams kVpxParams[] = {
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420},
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_XRGB},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_XRGB}};
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420},
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_XRGB},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_XRGB}};
INSTANTIATE_TEST_SUITE_P(VpxGeneric,
SoftwareVideoEncoderTest,
@@ -711,12 +711,12 @@
PrintTestParams);
SwVideoTestParams kVpxSVCParams[] = {
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 1},
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 2},
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 3},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 1},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 2},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 3}};
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 1},
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 2},
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 3},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 1},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 2},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 3}};
INSTANTIATE_TEST_SUITE_P(VpxTemporalSvc,
SVCVideoEncoderTest,
diff --git a/media/video/supported_video_decoder_config_unittest.cc b/media/video/supported_video_decoder_config_unittest.cc
index dd3dabd..14f0ad9 100644
--- a/media/video/supported_video_decoder_config_unittest.cc
+++ b/media/video/supported_video_decoder_config_unittest.cc
@@ -13,7 +13,7 @@
public:
SupportedVideoDecoderConfigTest()
: decoder_config_(
- TestVideoConfig::NormalCodecProfile(kCodecH264,
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264,
H264PROFILE_EXTENDED)) {
supported_config_.profile_min = H264PROFILE_MIN;
supported_config_.profile_max = H264PROFILE_MAX;
diff --git a/media/video/video_encode_accelerator.cc b/media/video/video_encode_accelerator.cc
index 7480f62..be5c9912 100644
--- a/media/video/video_encode_accelerator.cc
+++ b/media/video/video_encode_accelerator.cc
@@ -91,7 +91,7 @@
if (gop_length)
str += base::StringPrintf(", gop_length: %u", gop_length.value());
- if (VideoCodecProfileToVideoCodec(output_profile) == kCodecH264) {
+ if (VideoCodecProfileToVideoCodec(output_profile) == VideoCodec::kH264) {
if (h264_output_level) {
str += base::StringPrintf(", h264_output_level: %u",
h264_output_level.value());
diff --git a/third_party/blink/public/platform/media/power_status_helper.h b/third_party/blink/public/platform/media/power_status_helper.h
index 6467d5a..2e09926 100644
--- a/third_party/blink/public/platform/media/power_status_helper.h
+++ b/third_party/blink/public/platform/media/power_status_helper.h
@@ -121,7 +121,7 @@
// Most recent parameters we were given.
bool is_playing_ = false;
bool has_video_ = false;
- media::VideoCodec codec_ = media::VideoCodec::kUnknownVideoCodec;
+ media::VideoCodec codec_ = media::VideoCodec::kUnknown;
media::VideoCodecProfile profile_ =
media::VideoCodecProfile::VIDEO_CODEC_PROFILE_UNKNOWN;
gfx::Size natural_size_;
diff --git a/third_party/blink/renderer/core/exported/web_media_player_impl_unittest.cc b/third_party/blink/renderer/core/exported/web_media_player_impl_unittest.cc
index 398f95c..1cff8b3 100644
--- a/third_party/blink/renderer/core/exported/web_media_player_impl_unittest.cc
+++ b/third_party/blink/renderer/core/exported/web_media_player_impl_unittest.cc
@@ -1802,7 +1802,7 @@
media::PipelineMetadata metadata;
metadata.has_video = true;
metadata.video_decoder_config = TestVideoConfig::NormalCodecProfile(
- media::kCodecVP9, media::VP9PROFILE_PROFILE0);
+ media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE0);
metadata.natural_size = gfx::Size(320, 240);
// Arrival of metadata should trigger creation of reporter with video config
@@ -1814,7 +1814,7 @@
// Changing the codec profile should trigger recreation of the reporter.
auto new_profile_config = TestVideoConfig::NormalCodecProfile(
- media::kCodecVP9, media::VP9PROFILE_PROFILE1);
+ media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE1);
OnVideoConfigChange(new_profile_config);
ASSERT_EQ(media::VP9PROFILE_PROFILE1, GetVideoStatsReporterCodecProfile());
ASSERT_NE(last_reporter, GetVideoStatsReporter());
@@ -1822,7 +1822,8 @@
// Changing the codec (implies changing profile) should similarly trigger
// recreation of the reporter.
- auto new_codec_config = TestVideoConfig::NormalCodecProfile(media::kCodecVP8);
+ auto new_codec_config =
+ TestVideoConfig::NormalCodecProfile(media::VideoCodec::kVP8);
OnVideoConfigChange(new_codec_config);
ASSERT_EQ(media::VP8PROFILE_MIN, GetVideoStatsReporterCodecProfile());
ASSERT_NE(last_reporter, GetVideoStatsReporter());
@@ -1831,7 +1832,7 @@
// Changing other aspects of the config (like colorspace) should not trigger
// recreation of the reporter
media::VideoDecoderConfig new_color_config =
- TestVideoConfig::NormalWithColorSpace(media::kCodecVP8,
+ TestVideoConfig::NormalWithColorSpace(media::VideoCodec::kVP8,
media::VideoColorSpace::REC709());
ASSERT_EQ(media::VP8PROFILE_MIN, new_color_config.profile());
OnVideoConfigChange(new_color_config);
@@ -1846,7 +1847,7 @@
media::PipelineMetadata metadata;
metadata.has_video = true;
metadata.video_decoder_config = TestVideoConfig::NormalCodecProfile(
- media::kCodecVP8, media::VP8PROFILE_MIN);
+ media::VideoCodec::kVP8, media::VP8PROFILE_MIN);
metadata.natural_size = gfx::Size(320, 240);
OnMetadata(metadata);
diff --git a/third_party/blink/renderer/modules/media_capabilities/media_capabilities.cc b/third_party/blink/renderer/modules/media_capabilities/media_capabilities.cc
index 66a0f658c..5f74ced 100644
--- a/third_party/blink/renderer/modules/media_capabilities/media_capabilities.cc
+++ b/third_party/blink/renderer/modules/media_capabilities/media_capabilities.cc
@@ -540,7 +540,7 @@
bool IsAudioCodecValid(const String& mime_type,
const String& codec,
String* console_warning) {
- media::AudioCodec audio_codec = media::kUnknownAudioCodec;
+ media::AudioCodec audio_codec = media::AudioCodec::kUnknown;
bool is_audio_codec_ambiguous = true;
if (!media::ParseAudioCodecString(mime_type.Ascii(), codec.Ascii(),
@@ -603,7 +603,7 @@
const blink::AudioConfiguration* audio_config,
const String& mime_type,
const String& codec) {
- media::AudioCodec audio_codec = media::kUnknownAudioCodec;
+ media::AudioCodec audio_codec = media::AudioCodec::kUnknown;
media::AudioCodecProfile audio_profile = media::AudioCodecProfile::kUnknown;
bool is_audio_codec_ambiguous = true;
bool is_spatial_rendering = false;
@@ -627,7 +627,7 @@
const String& codec,
media::VideoColorSpace video_color_space,
gfx::HdrMetadataType hdr_metadata_type) {
- media::VideoCodec video_codec = media::kUnknownVideoCodec;
+ media::VideoCodec video_codec = media::VideoCodec::kUnknown;
media::VideoCodecProfile video_profile;
uint8_t video_level = 0;
bool is_video_codec_ambiguous = true;
@@ -837,7 +837,7 @@
}
}
- media::VideoCodec video_codec = media::kUnknownVideoCodec;
+ media::VideoCodec video_codec = media::VideoCodec::kUnknown;
media::VideoCodecProfile video_profile = media::VIDEO_CODEC_PROFILE_UNKNOWN;
if ((config->hasAudio() &&
@@ -1284,7 +1284,7 @@
// WebMediaPlayerImpl::UpdateSmoothnessHelper().
// TODO(chcunningham): refactor into something more robust.
Vector<media::learning::FeatureValue> ml_features(
- {media::learning::FeatureValue(video_codec),
+ {media::learning::FeatureValue(static_cast<int>(video_codec)),
media::learning::FeatureValue(video_profile),
media::learning::FeatureValue(width),
media::learning::FeatureValue(framerate)});
diff --git a/third_party/blink/renderer/modules/media_capabilities/media_capabilities_test.cc b/third_party/blink/renderer/modules/media_capabilities/media_capabilities_test.cc
index 94f0a9e..44f70f0bc 100644
--- a/third_party/blink/renderer/modules/media_capabilities/media_capabilities_test.cc
+++ b/third_party/blink/renderer/modules/media_capabilities/media_capabilities_test.cc
@@ -355,7 +355,7 @@
const char kVideoContentType[] = "video/webm; codecs=\"vp09.00.10.08\"";
const char kAudioContentType[] = "audio/webm; codecs=\"opus\"";
const media::VideoCodecProfile kCodecProfile = media::VP9PROFILE_PROFILE0;
-const media::VideoCodec kCodec = media::kCodecVP9;
+const media::VideoCodec kCodec = media::VideoCodec::kVP9;
const double kFramerate = 20.5;
const int kWidth = 3840;
const int kHeight = 2160;
@@ -449,7 +449,7 @@
// WebMediaPlayerImpl::UpdateSmoothnessHelper().
// TODO(chcunningham): refactor into something more robust.
Vector<media::learning::FeatureValue> ml_features(
- {media::learning::FeatureValue(kCodec),
+ {media::learning::FeatureValue(static_cast<int>(kCodec)),
media::learning::FeatureValue(kCodecProfile),
media::learning::FeatureValue(kWidth),
media::learning::FeatureValue(kFramerate)});
diff --git a/third_party/blink/renderer/modules/mediarecorder/fake_encoded_video_frame.h b/third_party/blink/renderer/modules/mediarecorder/fake_encoded_video_frame.h
index 8cc4338..e99ce0f 100644
--- a/third_party/blink/renderer/modules/mediarecorder/fake_encoded_video_frame.h
+++ b/third_party/blink/renderer/modules/mediarecorder/fake_encoded_video_frame.h
@@ -45,7 +45,7 @@
private:
bool is_key_frame_ = false;
std::string data_;
- media::VideoCodec codec_ = media::kCodecVP8;
+ media::VideoCodec codec_ = media::VideoCodec::kVP8;
absl::optional<media::VideoColorSpace> color_space_;
gfx::Size resolution_{0, 0};
};
diff --git a/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler.cc b/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler.cc
index dcae287..629b10f 100644
--- a/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler.cc
+++ b/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler.cc
@@ -51,12 +51,12 @@
VideoTrackRecorder::CodecId CodecIdFromMediaVideoCodec(media::VideoCodec id) {
switch (id) {
- case media::kCodecVP8:
+ case media::VideoCodec::kVP8:
return VideoTrackRecorder::CodecId::VP8;
- case media::kCodecVP9:
+ case media::VideoCodec::kVP9:
return VideoTrackRecorder::CodecId::VP9;
#if BUILDFLAG(RTC_USE_H264)
- case media::kCodecH264:
+ case media::VideoCodec::kH264:
return VideoTrackRecorder::CodecId::H264;
#endif
default:
@@ -69,31 +69,31 @@
media::VideoCodec MediaVideoCodecFromCodecId(VideoTrackRecorder::CodecId id) {
switch (id) {
case VideoTrackRecorder::CodecId::VP8:
- return media::kCodecVP8;
+ return media::VideoCodec::kVP8;
case VideoTrackRecorder::CodecId::VP9:
- return media::kCodecVP9;
+ return media::VideoCodec::kVP9;
#if BUILDFLAG(RTC_USE_H264)
case VideoTrackRecorder::CodecId::H264:
- return media::kCodecH264;
+ return media::VideoCodec::kH264;
#endif
case VideoTrackRecorder::CodecId::LAST:
- return media::kUnknownVideoCodec;
+ return media::VideoCodec::kUnknown;
}
NOTREACHED() << "Unsupported video codec";
- return media::kUnknownVideoCodec;
+ return media::VideoCodec::kUnknown;
}
media::AudioCodec CodecIdToMediaAudioCodec(AudioTrackRecorder::CodecId id) {
switch (id) {
case AudioTrackRecorder::CodecId::PCM:
- return media::kCodecPCM;
+ return media::AudioCodec::kPCM;
case AudioTrackRecorder::CodecId::OPUS:
- return media::kCodecOpus;
+ return media::AudioCodec::kOpus;
case AudioTrackRecorder::CodecId::LAST:
- return media::kUnknownAudioCodec;
+ return media::AudioCodec::kUnknown;
}
NOTREACHED() << "Unsupported audio codec";
- return media::kUnknownAudioCodec;
+ return media::AudioCodec::kUnknown;
}
// Extracts the first recognised CodecId of |codecs| or CodecId::LAST if none
diff --git a/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler_unittest.cc b/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler_unittest.cc
index c9c6e0a..11c2360 100644
--- a/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler_unittest.cc
+++ b/third_party/blink/renderer/modules/mediarecorder/media_recorder_handler_unittest.cc
@@ -567,8 +567,8 @@
media_recorder_handler_->Pause();
EXPECT_CALL(*recorder, WriteData).Times(AtLeast(1));
- media::WebmMuxer::VideoParameters params(gfx::Size(), 1, media::kCodecVP9,
- gfx::ColorSpace());
+ media::WebmMuxer::VideoParameters params(
+ gfx::Size(), 1, media::VideoCodec::kVP9, gfx::ColorSpace());
OnEncodedVideoForTesting(params, "vp9 frame", "", base::TimeTicks::Now(),
true);
@@ -601,8 +601,8 @@
EXPECT_TRUE(media_recorder_handler_->Start(0));
EXPECT_CALL(*recorder, WriteData).Times(AtLeast(1));
- media::WebmMuxer::VideoParameters params(gfx::Size(), 1, media::kCodecVP9,
- gfx::ColorSpace());
+ media::WebmMuxer::VideoParameters params(
+ gfx::Size(), 1, media::VideoCodec::kVP9, gfx::ColorSpace());
OnEncodedVideoForTesting(params, "vp9 frame", "", base::TimeTicks::Now(),
true);
@@ -673,10 +673,10 @@
static const MediaRecorderPassthroughTestParams
kMediaRecorderPassthroughTestParams[] = {
- {"video/webm;codecs=vp8", media::kCodecVP8},
- {"video/webm;codecs=vp9", media::kCodecVP9},
+ {"video/webm;codecs=vp8", media::VideoCodec::kVP8},
+ {"video/webm;codecs=vp9", media::VideoCodec::kVP9},
#if BUILDFLAG(RTC_USE_H264)
- {"video/x-matroska;codecs=avc1", media::kCodecH264},
+ {"video/x-matroska;codecs=avc1", media::VideoCodec::kH264},
#endif
};
@@ -767,21 +767,21 @@
}));
OnVideoFrameForTesting(FakeEncodedVideoFrame::Builder()
.WithKeyFrame(true)
- .WithCodec(media::kCodecVP8)
+ .WithCodec(media::VideoCodec::kVP8)
.WithData(std::string("vp8 frame"))
.BuildRefPtr());
// Switch to VP9 frames. This is expected to cause the call to OnError
// above.
OnVideoFrameForTesting(FakeEncodedVideoFrame::Builder()
.WithKeyFrame(true)
- .WithCodec(media::kCodecVP9)
+ .WithCodec(media::VideoCodec::kVP9)
.WithData(std::string("vp9 frame"))
.BuildRefPtr());
// Send one more frame to verify that continued frame of different codec
// transfer doesn't crash the media recorder.
OnVideoFrameForTesting(FakeEncodedVideoFrame::Builder()
.WithKeyFrame(true)
- .WithCodec(media::kCodecVP8)
+ .WithCodec(media::VideoCodec::kVP8)
.WithData(std::string("vp8 frame"))
.BuildRefPtr());
platform_->RunUntilIdle();
diff --git a/third_party/blink/renderer/modules/mediarecorder/video_track_recorder_unittest.cc b/third_party/blink/renderer/modules/mediarecorder/video_track_recorder_unittest.cc
index c534e793..19044c7 100644
--- a/third_party/blink/renderer/modules/mediarecorder/video_track_recorder_unittest.cc
+++ b/third_party/blink/renderer/modules/mediarecorder/video_track_recorder_unittest.cc
@@ -76,18 +76,18 @@
VideoTrackRecorder::CodecId id) {
switch (id) {
case VideoTrackRecorder::CodecId::VP8:
- return media::kCodecVP8;
+ return media::VideoCodec::kVP8;
case VideoTrackRecorder::CodecId::VP9:
- return media::kCodecVP9;
+ return media::VideoCodec::kVP9;
#if BUILDFLAG(RTC_USE_H264)
case VideoTrackRecorder::CodecId::H264:
- return media::kCodecH264;
+ return media::VideoCodec::kH264;
#endif
default:
- return media::kUnknownVideoCodec;
+ return media::VideoCodec::kUnknown;
}
NOTREACHED() << "Unsupported video codec";
- return media::kUnknownVideoCodec;
+ return media::VideoCodec::kUnknown;
}
} // namespace
diff --git a/third_party/blink/renderer/modules/mediasource/media_source.cc b/third_party/blink/renderer/modules/mediasource/media_source.cc
index 85679fec..d645de97 100644
--- a/third_party/blink/renderer/modules/mediasource/media_source.cc
+++ b/third_party/blink/renderer/modules/mediasource/media_source.cc
@@ -569,14 +569,14 @@
bool first = true;
for (const auto& codec_id : parsed_codec_ids) {
bool is_codec_ambiguous;
- media::VideoCodec video_codec = media::kUnknownVideoCodec;
+ media::VideoCodec video_codec = media::VideoCodec::kUnknown;
media::VideoCodecProfile profile;
uint8_t level = 0;
media::VideoColorSpace color_space;
if (media::ParseVideoCodecString(mime_type.Ascii(), codec_id,
&is_codec_ambiguous, &video_codec,
&profile, &level, &color_space) &&
- !is_codec_ambiguous && video_codec == media::VideoCodec::kCodecHEVC) {
+ !is_codec_ambiguous && video_codec == media::VideoCodec::kHEVC) {
continue;
}
if (first)
diff --git a/third_party/blink/renderer/modules/mediastream/webmediaplayer_ms.cc b/third_party/blink/renderer/modules/mediastream/webmediaplayer_ms.cc
index f1a5b76..05fb8d8d 100644
--- a/third_party/blink/renderer/modules/mediastream/webmediaplayer_ms.cc
+++ b/third_party/blink/renderer/modules/mediastream/webmediaplayer_ms.cc
@@ -1433,7 +1433,7 @@
// TODO(https://crbug.com/1147813) Report codec information once accessible.
watch_time_reporter_->UpdateSecondaryProperties(
media::mojom::SecondaryPlaybackProperties::New(
- media::kUnknownAudioCodec, media::kUnknownVideoCodec,
+ media::AudioCodec::kUnknown, media::VideoCodec::kUnknown,
media::AudioCodecProfile::kUnknown,
media::VideoCodecProfile::VIDEO_CODEC_PROFILE_UNKNOWN,
media::AudioDecoderType::kUnknown, media::VideoDecoderType::kUnknown,
diff --git a/third_party/blink/renderer/modules/webcodecs/audio_decoder.cc b/third_party/blink/renderer/modules/webcodecs/audio_decoder.cc
index 956a696..79da6c1 100644
--- a/third_party/blink/renderer/modules/webcodecs/audio_decoder.cc
+++ b/third_party/blink/renderer/modules/webcodecs/audio_decoder.cc
@@ -4,7 +4,7 @@
#include "third_party/blink/renderer/modules/webcodecs/audio_decoder.h"
-#include "base/metrics/histogram_macros.h"
+#include "base/metrics/histogram_functions.h"
#include "media/base/audio_codecs.h"
#include "media/base/audio_decoder.h"
#include "media/base/audio_decoder_config.h"
@@ -39,14 +39,14 @@
// Match codec strings from the codec registry:
// https://www.w3.org/TR/webcodecs-codec-registry/#audio-codec-registry
if (config.codec() == "ulaw") {
- out_audio_type = {media::kCodecPCM_MULAW};
+ out_audio_type = {media::AudioCodec::kPCM_MULAW};
return true;
} else if (config.codec() == "alaw") {
- out_audio_type = {media::kCodecPCM_ALAW};
+ out_audio_type = {media::AudioCodec::kPCM_ALAW};
return true;
}
- media::AudioCodec codec = media::kUnknownAudioCodec;
+ media::AudioCodec codec = media::AudioCodec::kUnknown;
bool is_codec_ambiguous = true;
const bool parse_succeeded = ParseAudioCodecString(
"", config.codec().Utf8(), &is_codec_ambiguous, &codec);
@@ -105,8 +105,8 @@
std::vector<MediaConfigType>{media_config});
MEDIA_LOG(INFO, media_log)
<< "Initialized AudioDecoder: " << media_config.AsHumanReadableString();
- UMA_HISTOGRAM_ENUMERATION("Blink.WebCodecs.AudioDecoder.Codec",
- media_config.codec(), media::kAudioCodecMax + 1);
+ base::UmaHistogramEnumeration("Blink.WebCodecs.AudioDecoder.Codec",
+ media_config.codec());
}
// static
diff --git a/third_party/blink/renderer/modules/webcodecs/audio_decoder_broker_test.cc b/third_party/blink/renderer/modules/webcodecs/audio_decoder_broker_test.cc
index 0de600b9..a9bf13d 100644
--- a/third_party/blink/renderer/modules/webcodecs/audio_decoder_broker_test.cc
+++ b/third_party/blink/renderer/modules/webcodecs/audio_decoder_broker_test.cc
@@ -41,7 +41,7 @@
namespace {
// Constants to specify the type of audio data used.
-constexpr media::AudioCodec kCodec = media::kCodecVorbis;
+constexpr media::AudioCodec kCodec = media::AudioCodec::kVorbis;
constexpr media::SampleFormat kSampleFormat = media::kSampleFormatPlanarF32;
constexpr media::ChannelLayout kChannelLayout = media::CHANNEL_LAYOUT_STEREO;
constexpr int kChannels = 2;
@@ -328,8 +328,9 @@
// Use an MpegH config to prevent FFmpeg from being selected.
InitializeDecoder(media::AudioDecoderConfig(
- media::kCodecMpegHAudio, kSampleFormat, kChannelLayout, kSamplesPerSecond,
- media::EmptyExtraData(), media::EncryptionScheme::kUnencrypted));
+ media::AudioCodec::kMpegHAudio, kSampleFormat, kChannelLayout,
+ kSamplesPerSecond, media::EmptyExtraData(),
+ media::EncryptionScheme::kUnencrypted));
EXPECT_EQ(GetDecoderType(), media::AudioDecoderType::kTesting);
// Using vorbis buffer here because its easy and the fake decoder generates
diff --git a/third_party/blink/renderer/modules/webcodecs/audio_encoder.cc b/third_party/blink/renderer/modules/webcodecs/audio_encoder.cc
index 484e866..236d2738 100644
--- a/third_party/blink/renderer/modules/webcodecs/audio_encoder.cc
+++ b/third_party/blink/renderer/modules/webcodecs/audio_encoder.cc
@@ -7,7 +7,7 @@
#include <cinttypes>
#include <limits>
-#include "base/metrics/histogram_macros.h"
+#include "base/metrics/histogram_functions.h"
#include "base/numerics/safe_conversions.h"
#include "base/trace_event/common/trace_event_common.h"
#include "base/trace_event/trace_event.h"
@@ -44,7 +44,7 @@
}
auto* result = MakeGarbageCollected<AudioEncoderTraits::ParsedConfig>();
- result->codec = media::kUnknownAudioCodec;
+ result->codec = media::AudioCodec::kUnknown;
bool is_codec_ambiguous = true;
bool parse_succeeded = ParseAudioCodecString(
"", config->codec().Utf8(), &is_codec_ambiguous, &result->codec);
@@ -90,7 +90,7 @@
bool VerifyCodecSupportStatic(AudioEncoderTraits::ParsedConfig* config,
ExceptionState* exception_state) {
switch (config->codec) {
- case media::kCodecOpus: {
+ case media::AudioCodec::kOpus: {
if (config->options.channels > 2) {
// Our Opus implementation only supports up to 2 channels
if (exception_state) {
@@ -170,7 +170,7 @@
DCHECK_NE(state_.AsEnum(), V8CodecState::Enum::kClosed);
DCHECK_EQ(request->type, Request::Type::kConfigure);
DCHECK(active_config_);
- DCHECK_EQ(active_config_->codec, media::kCodecOpus);
+ DCHECK_EQ(active_config_->codec, media::AudioCodec::kOpus);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
request->StartTracing();
@@ -196,8 +196,8 @@
self->HandleError(
self->logger_->MakeException("Encoding error.", status));
} else {
- UMA_HISTOGRAM_ENUMERATION("Blink.WebCodecs.AudioEncoder.Codec", codec,
- media::kAudioCodecMax + 1);
+ base::UmaHistogramEnumeration("Blink.WebCodecs.AudioEncoder.Codec",
+ codec);
}
req->EndTracing();
diff --git a/third_party/blink/renderer/modules/webcodecs/audio_encoder.h b/third_party/blink/renderer/modules/webcodecs/audio_encoder.h
index dc6b8cc..5c0c56e 100644
--- a/third_party/blink/renderer/modules/webcodecs/audio_encoder.h
+++ b/third_party/blink/renderer/modules/webcodecs/audio_encoder.h
@@ -26,7 +26,7 @@
class MODULES_EXPORT AudioEncoderTraits {
public:
struct ParsedConfig final : public GarbageCollected<ParsedConfig> {
- media::AudioCodec codec = media::kUnknownAudioCodec;
+ media::AudioCodec codec = media::AudioCodec::kUnknown;
media::AudioEncoder::Options options;
String codec_string;
diff --git a/third_party/blink/renderer/modules/webcodecs/video_decoder.cc b/third_party/blink/renderer/modules/webcodecs/video_decoder.cc
index e4b11dd..69de6ae 100644
--- a/third_party/blink/renderer/modules/webcodecs/video_decoder.cc
+++ b/third_party/blink/renderer/modules/webcodecs/video_decoder.cc
@@ -7,7 +7,7 @@
#include <utility>
#include <vector>
-#include "base/metrics/histogram_macros.h"
+#include "base/metrics/histogram_functions.h"
#include "base/time/time.h"
#include "media/base/decoder_buffer.h"
#include "media/base/limits.h"
@@ -82,7 +82,7 @@
media::VideoType& out_video_type,
String& out_console_message) {
bool is_codec_ambiguous = true;
- media::VideoCodec codec = media::kUnknownVideoCodec;
+ media::VideoCodec codec = media::VideoCodec::kUnknown;
media::VideoCodecProfile profile = media::VIDEO_CODEC_PROFILE_UNKNOWN;
media::VideoColorSpace color_space = media::VideoColorSpace::REC709();
uint8_t level = 0;
@@ -225,8 +225,8 @@
vpx_codec_stream_info_t stream_info = {0};
stream_info.sz = sizeof(vpx_codec_stream_info_t);
auto status = vpx_codec_peek_stream_info(
- codec == media::kCodecVP8 ? &vpx_codec_vp8_dx_algo
- : &vpx_codec_vp9_dx_algo,
+ codec == media::VideoCodec::kVP8 ? &vpx_codec_vp8_dx_algo
+ : &vpx_codec_vp9_dx_algo,
buffer.data(), static_cast<uint32_t>(buffer.data_size()), &stream_info);
*is_key_frame = (status == VPX_CODEC_OK) && stream_info.is_kf;
#endif
@@ -286,8 +286,8 @@
std::vector<MediaConfigType>{media_config});
MEDIA_LOG(INFO, media_log)
<< "Initialized VideoDecoder: " << media_config.AsHumanReadableString();
- UMA_HISTOGRAM_ENUMERATION("Blink.WebCodecs.VideoDecoder.Codec",
- media_config.codec(), media::kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Blink.WebCodecs.VideoDecoder.Codec",
+ media_config.codec());
}
// static
@@ -426,7 +426,7 @@
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- if (video_type.codec == media::kCodecH264 && !extra_data.empty()) {
+ if (video_type.codec == media::VideoCodec::kH264 && !extra_data.empty()) {
out_h264_avcc =
std::make_unique<media::mp4::AVCDecoderConfigurationRecord>();
out_h264_converter =
@@ -442,7 +442,7 @@
out_h264_converter.reset();
}
#else
- if (video_type.codec == media::kCodecH264) {
+ if (video_type.codec == media::VideoCodec::kH264) {
out_console_message = "H.264 decoding is not supported.";
return CodecConfigEval::kUnsupported;
}
@@ -543,12 +543,12 @@
bool is_key_frame = chunk.type() == "key";
if (verify_key_frame) {
- if (current_codec_ == media::kCodecVP9 ||
- current_codec_ == media::kCodecVP8) {
+ if (current_codec_ == media::VideoCodec::kVP9 ||
+ current_codec_ == media::VideoCodec::kVP8) {
ParseVpxKeyFrame(*decoder_buffer, current_codec_, &is_key_frame);
- } else if (current_codec_ == media::kCodecAV1) {
+ } else if (current_codec_ == media::VideoCodec::kAV1) {
ParseAv1KeyFrame(*decoder_buffer, &is_key_frame);
- } else if (current_codec_ == media::kCodecH264) {
+ } else if (current_codec_ == media::VideoCodec::kH264) {
ParseH264KeyFrame(*decoder_buffer, &is_key_frame);
}
diff --git a/third_party/blink/renderer/modules/webcodecs/video_decoder.h b/third_party/blink/renderer/modules/webcodecs/video_decoder.h
index d2f1992..2a78268 100644
--- a/third_party/blink/renderer/modules/webcodecs/video_decoder.h
+++ b/third_party/blink/renderer/modules/webcodecs/video_decoder.h
@@ -127,7 +127,7 @@
std::unique_ptr<media::mp4::AVCDecoderConfigurationRecord> h264_avcc_;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
- media::VideoCodec current_codec_ = media::kUnknownVideoCodec;
+ media::VideoCodec current_codec_ = media::VideoCodec::kUnknown;
private:
// DecoderTemplate implementation.
diff --git a/third_party/blink/renderer/modules/webcodecs/video_encoder.cc b/third_party/blink/renderer/modules/webcodecs/video_encoder.cc
index 0dae13b..7f1d26f3 100644
--- a/third_party/blink/renderer/modules/webcodecs/video_encoder.cc
+++ b/third_party/blink/renderer/modules/webcodecs/video_encoder.cc
@@ -260,7 +260,7 @@
IDLEnumAsString(config->hardwareAcceleration()));
bool is_codec_ambiguous = true;
- result->codec = media::kUnknownVideoCodec;
+ result->codec = media::VideoCodec::kUnknown;
result->profile = media::VIDEO_CODEC_PROFILE_UNKNOWN;
// TODO(crbug.com/1138680): Default to sRGB if encoding an RGB format.
result->color_space = media::VideoColorSpace::REC709();
@@ -287,7 +287,7 @@
return result;
// We should only get here with H264 codecs.
- if (result->codec != media::VideoCodec::kCodecH264) {
+ if (result->codec != media::VideoCodec::kH264) {
exception_state.ThrowTypeError(
"'avc' field can only be used with AVC codecs");
return nullptr;
@@ -308,10 +308,10 @@
bool VerifyCodecSupportStatic(VideoEncoderTraits::ParsedConfig* config,
ExceptionState* exception_state) {
switch (config->codec) {
- case media::kCodecVP8:
+ case media::VideoCodec::kVP8:
break;
- case media::kCodecVP9:
+ case media::VideoCodec::kVP9:
if (config->profile == media::VideoCodecProfile::VP9PROFILE_PROFILE1 ||
config->profile == media::VideoCodecProfile::VP9PROFILE_PROFILE3) {
if (exception_state) {
@@ -322,7 +322,7 @@
}
break;
- case media::kCodecH264:
+ case media::VideoCodec::kH264:
break;
default:
@@ -474,12 +474,12 @@
case HardwarePreference::kPreferSoftware: {
std::unique_ptr<media::VideoEncoder> result;
switch (config.codec) {
- case media::kCodecVP8:
- case media::kCodecVP9:
+ case media::VideoCodec::kVP8:
+ case media::VideoCodec::kVP9:
result = CreateVpxVideoEncoder();
UpdateEncoderLog("VpxVideoEncoder", false);
break;
- case media::kCodecH264:
+ case media::VideoCodec::kH264:
result = CreateOpenH264VideoEncoder();
UpdateEncoderLog("OpenH264VideoEncoder", false);
break;
@@ -535,7 +535,7 @@
"Encoder initialization error.", status));
} else {
UMA_HISTOGRAM_ENUMERATION("Blink.WebCodecs.VideoEncoder.Codec", codec,
- media::kVideoCodecMax + 1);
+ media::VideoCodec::kMaxValue);
}
req->EndTracing();
@@ -861,11 +861,11 @@
VideoEncoderTraits::ParsedConfig* config) {
std::unique_ptr<media::VideoEncoder> software_encoder;
switch (config->codec) {
- case media::kCodecVP8:
- case media::kCodecVP9:
+ case media::VideoCodec::kVP8:
+ case media::VideoCodec::kVP9:
software_encoder = CreateVpxVideoEncoder();
break;
- case media::kCodecH264:
+ case media::VideoCodec::kH264:
software_encoder = CreateOpenH264VideoEncoder();
break;
default:
diff --git a/third_party/blink/renderer/platform/media/power_status_helper.cc b/third_party/blink/renderer/platform/media/power_status_helper.cc
index 4aa5009..fa16c8e0 100644
--- a/third_party/blink/renderer/platform/media/power_status_helper.cc
+++ b/third_party/blink/renderer/platform/media/power_status_helper.cc
@@ -109,7 +109,7 @@
int bucket = 0;
- if (codec == media::VideoCodec::kCodecH264)
+ if (codec == media::VideoCodec::kH264)
bucket |= Bits::kCodecBitsH264;
else if (profile == media::VP9PROFILE_PROFILE0)
bucket |= Bits::kCodecBitsVP9Profile0;
diff --git a/third_party/blink/renderer/platform/media/power_status_helper_unittest.cc b/third_party/blink/renderer/platform/media/power_status_helper_unittest.cc
index af97b2a..6a57f48 100644
--- a/third_party/blink/renderer/platform/media/power_status_helper_unittest.cc
+++ b/third_party/blink/renderer/platform/media/power_status_helper_unittest.cc
@@ -110,7 +110,7 @@
media::PipelineMetadata metadata;
metadata.has_video = true;
metadata.video_decoder_config = media::VideoDecoderConfig(
- media::kCodecH264, media::H264PROFILE_MAIN,
+ media::VideoCodec::kH264, media::H264PROFILE_MAIN,
media::VideoDecoderConfig::AlphaMode::kIsOpaque,
media::VideoColorSpace(), media::VideoTransformation(),
gfx::Size(0, 0), /* coded_size */
@@ -365,17 +365,17 @@
media::VideoCodec codec;
media::VideoCodecProfile profile;
if (codec_bits == PowerStatusHelper::Bits::kCodecBitsH264) {
- codec = media::kCodecH264;
+ codec = media::VideoCodec::kH264;
profile = media::H264PROFILE_MAIN;
} else if (codec_bits == PowerStatusHelper::Bits::kCodecBitsVP9Profile0) {
- codec = media::kCodecVP9;
+ codec = media::VideoCodec::kVP9;
profile = media::VP9PROFILE_PROFILE0;
} else if (codec_bits == PowerStatusHelper::Bits::kCodecBitsVP9Profile2) {
- codec = media::kCodecVP9;
+ codec = media::VideoCodec::kVP9;
profile = media::VP9PROFILE_PROFILE2;
} else {
// Some unsupported codec.
- codec = media::kCodecVP8;
+ codec = media::VideoCodec::kVP8;
profile = media::VIDEO_CODEC_PROFILE_UNKNOWN;
expect_bucket = false;
}
diff --git a/third_party/blink/renderer/platform/media/watch_time_reporter_unittest.cc b/third_party/blink/renderer/platform/media/watch_time_reporter_unittest.cc
index fddf4b96..ffa942e 100644
--- a/third_party/blink/renderer/platform/media/watch_time_reporter_unittest.cc
+++ b/third_party/blink/renderer/platform/media/watch_time_reporter_unittest.cc
@@ -1097,8 +1097,8 @@
Initialize(true, true, kSizeJustRight);
auto properties = media::mojom::SecondaryPlaybackProperties::New(
- has_audio_ ? media::kCodecAAC : media::kUnknownAudioCodec,
- has_video_ ? media::kCodecH264 : media::kUnknownVideoCodec,
+ has_audio_ ? media::AudioCodec::kAAC : media::AudioCodec::kUnknown,
+ has_video_ ? media::VideoCodec::kH264 : media::VideoCodec::kUnknown,
has_audio_ ? media::AudioCodecProfile::kXHE_AAC
: media::AudioCodecProfile::kUnknown,
has_video_ ? media::H264PROFILE_MAIN : media::VIDEO_CODEC_PROFILE_UNKNOWN,
@@ -1143,7 +1143,7 @@
.Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->UpdateSecondaryProperties(
media::mojom::SecondaryPlaybackProperties::New(
- media::kUnknownAudioCodec, media::kUnknownVideoCodec,
+ media::AudioCodec::kUnknown, media::VideoCodec::kUnknown,
media::AudioCodecProfile::kUnknown,
media::VIDEO_CODEC_PROFILE_UNKNOWN, media::AudioDecoderType::kUnknown,
media::VideoDecoderType::kUnknown,
@@ -1169,7 +1169,7 @@
.Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->UpdateSecondaryProperties(
media::mojom::SecondaryPlaybackProperties::New(
- media::kUnknownAudioCodec, media::kUnknownVideoCodec,
+ media::AudioCodec::kUnknown, media::VideoCodec::kUnknown,
media::AudioCodecProfile::kUnknown,
media::VIDEO_CODEC_PROFILE_UNKNOWN, media::AudioDecoderType::kUnknown,
media::VideoDecoderType::kUnknown,
diff --git a/third_party/blink/renderer/platform/media/web_media_player_impl.cc b/third_party/blink/renderer/platform/media/web_media_player_impl.cc
index 18b3ef5..4e02831 100644
--- a/third_party/blink/renderer/platform/media/web_media_player_impl.cc
+++ b/third_party/blink/renderer/platform/media/web_media_player_impl.cc
@@ -3863,8 +3863,8 @@
// NOTE: this is a very bad way to do this, since it memorizes the order of
// features in the task. However, it'll do for now.
learning::FeatureVector features;
- features.push_back(
- learning::FeatureValue(pipeline_metadata_.video_decoder_config.codec()));
+ features.push_back(learning::FeatureValue(
+ static_cast<int>(pipeline_metadata_.video_decoder_config.codec())));
features.push_back(learning::FeatureValue(
pipeline_metadata_.video_decoder_config.profile()));
features.push_back(
diff --git a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_adapter.cc b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_adapter.cc
index a1113c7..b48aa00 100644
--- a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_adapter.cc
+++ b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_adapter.cc
@@ -137,7 +137,7 @@
return nullptr;
// Bail early for unknown codecs.
- if (WebRtcToMediaVideoCodec(video_codec_type) == media::kUnknownVideoCodec)
+ if (WebRtcToMediaVideoCodec(video_codec_type) == media::VideoCodec::kUnknown)
return nullptr;
// Avoid the thread hop if the decoder is known not to support the config.
diff --git a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_factory.cc b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_factory.cc
index a589c50..30e9c20 100644
--- a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_factory.cc
+++ b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_factory.cc
@@ -45,14 +45,14 @@
};
constexpr std::array<CodecConfig, 8> kCodecConfigs = {{
- {media::kCodecVP8, media::VP8PROFILE_ANY},
- {media::kCodecVP9, media::VP9PROFILE_PROFILE0},
- {media::kCodecVP9, media::VP9PROFILE_PROFILE1},
- {media::kCodecVP9, media::VP9PROFILE_PROFILE2},
- {media::kCodecH264, media::H264PROFILE_BASELINE},
- {media::kCodecH264, media::H264PROFILE_MAIN},
- {media::kCodecH264, media::H264PROFILE_HIGH},
- {media::kCodecAV1, media::AV1PROFILE_PROFILE_MAIN},
+ {media::VideoCodec::kVP8, media::VP8PROFILE_ANY},
+ {media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE0},
+ {media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE1},
+ {media::VideoCodec::kVP9, media::VP9PROFILE_PROFILE2},
+ {media::VideoCodec::kH264, media::H264PROFILE_BASELINE},
+ {media::VideoCodec::kH264, media::H264PROFILE_MAIN},
+ {media::VideoCodec::kH264, media::H264PROFILE_HIGH},
+ {media::VideoCodec::kAV1, media::AV1PROFILE_PROFILE_MAIN},
}};
// Translate from media::VideoDecoderConfig to webrtc::SdpVideoFormat, or return
@@ -60,11 +60,11 @@
absl::optional<webrtc::SdpVideoFormat> VdcToWebRtcFormat(
const media::VideoDecoderConfig& config) {
switch (config.codec()) {
- case media::VideoCodec::kCodecAV1:
+ case media::VideoCodec::kAV1:
return webrtc::SdpVideoFormat("AV1X");
- case media::VideoCodec::kCodecVP8:
+ case media::VideoCodec::kVP8:
return webrtc::SdpVideoFormat("VP8");
- case media::VideoCodec::kCodecVP9: {
+ case media::VideoCodec::kVP9: {
webrtc::VP9Profile vp9_profile;
switch (config.profile()) {
case media::VP9PROFILE_PROFILE0:
@@ -84,7 +84,7 @@
"VP9", {{webrtc::kVP9FmtpProfileId,
webrtc::VP9ProfileToString(vp9_profile)}});
}
- case media::VideoCodec::kCodecH264: {
+ case media::VideoCodec::kH264: {
webrtc::H264::Profile h264_profile;
switch (config.profile()) {
case media::H264PROFILE_BASELINE:
@@ -275,9 +275,9 @@
// configuration is valid (e.g., H264 doesn't support SVC at all and VP8
// doesn't support spatial layers).
if (!spatial_layers ||
- (codec != media::kCodecVP8 && codec != media::kCodecVP9 &&
- codec != media::kCodecAV1) ||
- (codec == media::kCodecVP8 && *spatial_layers > 1)) {
+ (codec != media::VideoCodec::kVP8 && codec != media::VideoCodec::kVP9 &&
+ codec != media::VideoCodec::kAV1) ||
+ (codec == media::VideoCodec::kVP8 && *spatial_layers > 1)) {
// Ivalid scalability_mode, return unsupported.
return {false, false};
}
@@ -285,7 +285,7 @@
// Most HW decoders cannot handle spatial layers, so return false if the
// configuration contains spatial layers unless we explicitly know that the
// HW decoder can handle spatial layers.
- if (codec == media::kCodecVP9 && *spatial_layers > 1 &&
+ if (codec == media::VideoCodec::kVP9 && *spatial_layers > 1 &&
!RTCVideoDecoderAdapter::Vp9HwSupportForSpatialLayers()) {
return {false, false};
}
diff --git a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_fallback_recorder.cc b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_fallback_recorder.cc
index b088702..2903767 100644
--- a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_fallback_recorder.cc
+++ b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_fallback_recorder.cc
@@ -12,15 +12,15 @@
media::VideoCodec codec,
RTCVideoDecoderFallbackReason fallback_reason) {
switch (codec) {
- case media::VideoCodec::kCodecH264:
+ case media::VideoCodec::kH264:
base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.H264",
fallback_reason);
break;
- case media::VideoCodec::kCodecVP8:
+ case media::VideoCodec::kVP8:
base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.Vp8",
fallback_reason);
break;
- case media::VideoCodec::kCodecVP9:
+ case media::VideoCodec::kVP9:
base::UmaHistogramEnumeration("Media.RTCVideoDecoderFallbackReason.Vp9",
fallback_reason);
break;
diff --git a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_stream_adapter.cc b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_stream_adapter.cc
index e1bd4a9..3b410da4 100644
--- a/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_stream_adapter.cc
+++ b/third_party/blink/renderer/platform/peerconnection/rtc_video_decoder_stream_adapter.cc
@@ -219,7 +219,7 @@
return nullptr;
// Bail early for unknown codecs.
- if (WebRtcToMediaVideoCodec(video_codec_type) == media::kUnknownVideoCodec)
+ if (WebRtcToMediaVideoCodec(video_codec_type) == media::VideoCodec::kUnknown)
return nullptr;
// Avoid the thread hop if the decoder is known not to support the config.
diff --git a/third_party/blink/renderer/platform/peerconnection/rtc_video_encoder_factory.cc b/third_party/blink/renderer/platform/peerconnection/rtc_video_encoder_factory.cc
index bd9dd77..28850ec 100644
--- a/third_party/blink/renderer/platform/peerconnection/rtc_video_encoder_factory.cc
+++ b/third_party/blink/renderer/platform/peerconnection/rtc_video_encoder_factory.cc
@@ -249,9 +249,9 @@
// configuration is valid (e.g., H264 doesn't support SVC at all and VP8
// doesn't support spatial layers).
if (!spatial_layers ||
- (codec != media::kCodecVP8 && codec != media::kCodecVP9 &&
- codec != media::kCodecAV1) ||
- (codec == media::kCodecVP8 && *spatial_layers > 1)) {
+ (codec != media::VideoCodec::kVP8 && codec != media::VideoCodec::kVP9 &&
+ codec != media::VideoCodec::kAV1) ||
+ (codec == media::VideoCodec::kVP8 && *spatial_layers > 1)) {
// Ivalid scalability_mode, return unsupported.
return {false, false};
}
@@ -259,7 +259,7 @@
// Most HW encoders cannot handle spatial layers, so return false if the
// configuration contains spatial layers and spatial layers are not
// supported.
- if (codec == media::kCodecVP9 && *spatial_layers > 1 &&
+ if (codec == media::VideoCodec::kVP9 && *spatial_layers > 1 &&
!RTCVideoEncoder::Vp9HwSupportForSpatialLayers()) {
return {false, false};
}
diff --git a/third_party/blink/renderer/platform/webrtc/webrtc_video_utils.cc b/third_party/blink/renderer/platform/webrtc/webrtc_video_utils.cc
index 7f291c7..f4897e7 100644
--- a/third_party/blink/renderer/platform/webrtc/webrtc_video_utils.cc
+++ b/third_party/blink/renderer/platform/webrtc/webrtc_video_utils.cc
@@ -48,15 +48,15 @@
media::VideoCodec WebRtcToMediaVideoCodec(webrtc::VideoCodecType codec) {
switch (codec) {
case webrtc::kVideoCodecAV1:
- return media::kCodecAV1;
+ return media::VideoCodec::kAV1;
case webrtc::kVideoCodecVP8:
- return media::kCodecVP8;
+ return media::VideoCodec::kVP8;
case webrtc::kVideoCodecVP9:
- return media::kCodecVP9;
+ return media::VideoCodec::kVP9;
case webrtc::kVideoCodecH264:
- return media::kCodecH264;
+ return media::VideoCodec::kH264;
default:
- return media::kUnknownVideoCodec;
+ return media::VideoCodec::kUnknown;
}
}