diff --git a/media/capture/mojom/image_capture.mojom b/media/capture/mojom/image_capture.mojom index 3c90538215a0c..62e33e8e5d7d4 100644 --- a/media/capture/mojom/image_capture.mojom +++ b/media/capture/mojom/image_capture.mojom @@ -201,6 +201,12 @@ struct PhotoState { // non-empty. [MinVersion=4] bool current_background_segmentation_mask_state; + + [MinVersion=5] + bool supported_video_stabilization; + + [MinVersion=5] + bool video_stabilization; }; // Equivalent to idl Point2D. @@ -281,6 +287,8 @@ struct PhotoSettings { EyeGazeCorrectionMode? eye_gaze_correction_mode; [MinVersion=4] bool? background_segmentation_mask_state; + [MinVersion=4] + bool? video_stabilization; }; // This is a mojo move-only equivalent of a Blob, i.e. MIME type and Data. diff --git a/media/capture/mojom/image_capture_types.cc b/media/capture/mojom/image_capture_types.cc index 2fe03af371935..43b82f498c166 100644 --- a/media/capture/mojom/image_capture_types.cc +++ b/media/capture/mojom/image_capture_types.cc @@ -29,6 +29,8 @@ media::mojom::PhotoStatePtr CreateEmptyPhotoState() { photo_capabilities->supported_eye_gaze_correction_modes = {}; photo_capabilities->supported_face_framing_modes = {}; photo_capabilities->supported_background_segmentation_mask_states = {}; + photo_capabilities->supported_video_stabilization = false; + photo_capabilities->video_stabilization = false; return photo_capabilities; } diff --git a/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java b/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java index ab6185bfcb076..5cfde931a9020 100644 --- a/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java +++ b/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java @@ -129,7 +129,10 @@ public abstract class VideoCapture { int fillLightMode, boolean hasTorch, boolean torch, - double colorTemperature); + double colorTemperature, + boolean hasVideoStablization, + boolean videoStablization + ); // Replies by calling VideoCaptureJni.get().onPhotoTaken(). @CalledByNative diff --git a/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java b/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java index a636f30cd87d2..5a301509ffbb4 100644 --- a/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java +++ b/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java @@ -743,7 +743,10 @@ public class VideoCaptureCamera extends VideoCapture int fillLightMode, boolean hasTorch, boolean torch, - double colorTemperature) { + double colorTemperature, + boolean hasVideoStablization, + boolean videoStablization + ) { android.hardware.Camera.Parameters parameters = getCameraParameters(mCamera); if (parameters == null) { mCamera = null; diff --git a/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java b/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java index d4f7c3152ce24..e5c66a064893c 100644 --- a/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java +++ b/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java @@ -57,6 +57,16 @@ import java.util.List; @JNINamespace("media") @NullMarked public class VideoCaptureCamera2 extends VideoCapture { + + private static boolean arrayContains(int[] array, int element) { + for (int e : array) { + if (e == element) { + return true; + } + } + return false; + } + // Inner class to extend a CameraDevice state change listener. private class CrStateListener extends CameraDevice.StateCallback { @Override @@ -745,6 +755,16 @@ public class VideoCaptureCamera2 extends VideoCapture { builder.setFillLightModeArray(integerArrayListToArray(modes)); } + final int[] stabilizationModes = + cameraCharacteristics.get( + CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + if(arrayContains(stabilizationModes, CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON)) { + + boolean currentStabilizationMode = mPreviewRequest.get(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE) + == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON; + builder.setBool(PhotoCapabilityBool.SUPPORTS_VIDEO_STABILIZATION, true); + builder.setBool(PhotoCapabilityBool.VIDEO_STABILIZATION, currentStabilizationMode); + } onGetPhotoCapabilitiesReply(VideoCaptureCamera2.this, mCallbackId, builder.build()); } } @@ -768,6 +788,8 @@ public class VideoCaptureCamera2 extends VideoCapture { public final boolean hasTorch; public final boolean torch; public final double colorTemperature; + public final boolean hasVideoStablization; + public final boolean videoStablization; public PhotoOptions( double zoom, @@ -787,7 +809,10 @@ public class VideoCaptureCamera2 extends VideoCapture { int fillLightMode, boolean hasTorch, boolean torch, - double colorTemperature) { + double colorTemperature, + boolean hasVideoStablization, + boolean videoStablization + ) { this.zoom = zoom; this.focusMode = focusMode; this.currentFocusDistance = currentFocusDistance; @@ -806,6 +831,8 @@ public class VideoCaptureCamera2 extends VideoCapture { this.hasTorch = hasTorch; this.torch = torch; this.colorTemperature = colorTemperature; + this.hasVideoStablization = hasVideoStablization; + this.videoStablization = videoStablization; } } @@ -936,6 +963,8 @@ public class VideoCaptureCamera2 extends VideoCapture { } if (mOptions.hasTorch) mTorch = mOptions.torch; + if (mOptions.hasVideoStablization) mVideoStabilization = mOptions.videoStablization; + if (mPreviewSession != null) { assert mPreviewRequestBuilder != null : "preview request builder"; @@ -1139,6 +1168,8 @@ public class VideoCaptureCamera2 extends VideoCapture { private boolean mTorch; private boolean mEnableFaceDetection; + //Enable by default for maintaining backward compatibility. + private boolean mVideoStabilization = true; // Service function to grab CameraCharacteristics and handle exceptions. private static @Nullable CameraCharacteristics getCameraCharacteristics(int id) { final CameraManager manager = @@ -1218,17 +1249,7 @@ public class VideoCaptureCamera2 extends VideoCapture { // https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_VIDEO_STABILIZATION_MODE final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId); if (cameraCharacteristics == null) return false; - final int[] stabilizationModes = - cameraCharacteristics.get( - CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); - for (int mode : stabilizationModes) { - if (mode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON) { - mPreviewRequestBuilder.set( - CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, - CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON); - break; - } - } + configureCommonCaptureSettings(mPreviewRequestBuilder); @@ -1266,6 +1287,19 @@ public class VideoCaptureCamera2 extends VideoCapture { TraceEvent.scoped("VideoCaptureCamera2.configureCommonCaptureSettings")) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId); + final int[] stabilizationModes = + cameraCharacteristics.get( + CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + for (int mode : stabilizationModes) { + if (mode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON) { + + requestBuilder.set( + CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mVideoStabilization ? + CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON : CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF); + + break; + } + } // |mFocusMode| indicates if we're in auto/continuous, single-shot or manual mode. // AndroidMeteringMode.SINGLE_SHOT is dealt with independently since it needs to be // triggered by a capture. @@ -1892,7 +1926,10 @@ public class VideoCaptureCamera2 extends VideoCapture { int fillLightMode, boolean hasTorch, boolean torch, - double colorTemperature) { + double colorTemperature, + boolean hasVideoStablization, + boolean videoStablization + ) { dCheckCurrentlyOnIncomingTaskRunner(VideoCaptureCamera2.this); mCameraThreadHandler.post( new SetPhotoOptionsTask( @@ -1914,7 +1951,10 @@ public class VideoCaptureCamera2 extends VideoCapture { fillLightMode, hasTorch, torch, - colorTemperature))); + colorTemperature, + hasVideoStablization, + videoStablization + ))); } @Override diff --git a/media/capture/video/android/photo_capabilities.h b/media/capture/video/android/photo_capabilities.h index 47fb92763467e..4cb78bc6b1d05 100644 --- a/media/capture/video/android/photo_capabilities.h +++ b/media/capture/video/android/photo_capabilities.h @@ -58,6 +58,9 @@ class PhotoCapabilities { TORCH, RED_EYE_REDUCTION, + SUPPORTS_VIDEO_STABILIZATION, + VIDEO_STABILIZATION, + NUM_ENTRIES }; diff --git a/media/capture/video/android/video_capture_device_android.cc b/media/capture/video/android/video_capture_device_android.cc index d50c3f2e642f7..0ecbb22cd123d 100644 --- a/media/capture/video/android/video_capture_device_android.cc +++ b/media/capture/video/android/video_capture_device_android.cc @@ -788,7 +788,8 @@ void VideoCaptureDeviceAndroid::DoSetPhotoOptions( static_cast(white_balance_mode), iso, settings->has_red_eye_reduction, settings->red_eye_reduction, static_cast(fill_light_mode), settings->has_torch, settings->torch, - color_temperature); + color_temperature, settings->video_stabilization.has_value(), + settings->video_stabilization.has_value() && settings->video_stabilization.value()); std::move(callback).Run(true); } diff --git a/third_party/blink/public/mojom/use_counter/metrics/web_feature.mojom b/third_party/blink/public/mojom/use_counter/metrics/web_feature.mojom index edd93154a1ab3..106ad1d3b6140 100644 --- a/third_party/blink/public/mojom/use_counter/metrics/web_feature.mojom +++ b/third_party/blink/public/mojom/use_counter/metrics/web_feature.mojom @@ -4633,6 +4633,7 @@ enum WebFeature { kMixedFrameEmbeddedByLocalhost = 5247, kV8AILanguageModelCapabilities_MaxTemperature_AttributeGetter = 5248, kAboutSrcdocToBeControlledByServiceWorker = 5249, + kImageCaptureVideoStabilization = 5250, // Add new features immediately above this line. Don't change assigned // numbers of any item, and don't reuse removed slots. Also don't add extra diff --git a/third_party/blink/renderer/modules/imagecapture/image_capture.cc b/third_party/blink/renderer/modules/imagecapture/image_capture.cc index 33b2658dbf06c..b17fdac6412e6 100644 --- a/third_party/blink/renderer/modules/imagecapture/image_capture.cc +++ b/third_party/blink/renderer/modules/imagecapture/image_capture.cc @@ -209,6 +209,10 @@ void CopyCommonMembers(const T* source, if (source->hasFaceFraming()) { destination->setFaceFraming(source->faceFraming()); } + + if(source->hasVideoStabilization()) { + destination->setVideoStabilization(source->videoStabilization()); + } } void CopyCapabilities(const MediaTrackCapabilities* source, @@ -1816,7 +1820,6 @@ void ImageCapture::SetMediaTrackConstraints( ScriptPromiseResolverBase* resolver, const MediaTrackConstraints* constraints) { DCHECK(constraints); - ExecutionContext* context = GetExecutionContext(); for (const MediaTrackConstraintSet* constraint_set : AllConstraintSets(constraints)) { @@ -1875,6 +1878,9 @@ void ImageCapture::SetMediaTrackConstraints( constraint_set->hasBackgroundBlur()) { UseCounter::Count(context, WebFeature::kImageCaptureBackgroundBlur); } + if (constraint_set->hasVideoStabilization()) { + UseCounter::Count(context, WebFeature::kImageCaptureVideoStabilization); + } } if (!service_.is_bound()) { @@ -1965,7 +1971,9 @@ void ImageCapture::SetVideoTrackDeviceSettingsFromTrack( if (device_settings->background_blur.has_value()) { UseCounter::Count(context, WebFeature::kImageCaptureBackgroundBlur); } - + if (device_settings->video_stabilization.has_value()) { + UseCounter::Count(context, WebFeature::kImageCaptureVideoStabilization); + } auto settings = media::mojom::blink::PhotoSettings::New(); if (device_settings->exposure_compensation.has_value() && @@ -2065,7 +2073,12 @@ void ImageCapture::SetVideoTrackDeviceSettingsFromTrack( *device_settings->face_framing, capabilities_->faceFraming(), settings->has_face_framing_mode, settings->face_framing_mode); } - + if (device_settings->video_stabilization.has_value() && capabilities_->hasVideoStabilization()) { + MaybeSetBoolSetting( + *device_settings->video_stabilization, + capabilities_->videoStabilization(), + settings->video_stabilization); + } if (service_.is_bound() && (settings->has_exposure_compensation || settings->has_exposure_time || settings->has_color_temperature || settings->has_iso || @@ -2076,7 +2089,9 @@ void ImageCapture::SetVideoTrackDeviceSettingsFromTrack( settings->has_background_blur_mode || settings->has_face_framing_mode || settings->eye_gaze_correction_mode.has_value() || - settings->background_segmentation_mask_state.has_value())) { + settings->background_segmentation_mask_state.has_value() || + settings->video_stabilization.has_value() + )) { service_->SetPhotoOptions( SourceId(), std::move(settings), WTF::BindOnce(&ImageCapture::OnSetVideoTrackDeviceSettingsFromTrack, @@ -2358,6 +2373,20 @@ void ImageCapture::ApplyMediaTrackConstraintSetToSettings( settings->face_framing_mode = ParseFaceFraming(setting); } } + + if (constraint_set->hasVideoStabilization() && + effective_capabilities->hasVideoStabilization()) { + bool has_setting = false; + bool setting; + effective_capabilities->setVideoStabilization(ApplyValueConstraint( + &has_setting, &setting, effective_capabilities->videoStabilization(), + constraint_set->videoStabilization(), constraint_set_type)); + if (has_setting) { + settings->video_stabilization = setting; + } else { + settings->video_stabilization.reset(); + } + } } // TODO(crbug.com/708723): Integrate image capture constraints processing with @@ -2559,7 +2588,15 @@ bool ImageCapture::CheckMediaTrackConstraintSet( resolver, "faceFraming", "faceFraming setting value not supported"); return false; } - + if (constraint_set->hasVideoStabilization() && + effective_capabilities->hasVideoStabilization() && + !CheckValueConstraint(effective_capabilities->videoStabilization(), + constraint_set->videoStabilization(), + constraint_set_type)) { + MaybeRejectWithOverconstrainedError( + resolver, "colorTemperature", "colorTemperature setting out of range"); + return false; + } return true; } @@ -2874,7 +2911,11 @@ void ImageCapture::UpdateMediaTrackSettingsAndCapabilities( MeteringMode::NONE); } } - + if (photo_state->supported_video_stabilization) { + capabilities_->setVideoStabilization({false, true}); + } else { + capabilities_->setVideoStabilization({false}); + } std::move(initialized_callback).Run(); } @@ -3070,6 +3111,12 @@ ImageCapture::GetConstraintWithCapabilityExistenceMismatch( constraint_set_type)) { return "faceFraming"; } + if (constraint_set->hasVideoStabilization() && + !CheckIfCapabilityExistenceSatisfiesConstraint( + constraint_set->videoStabilization(), CapabilityExists(capabilities_->hasVideoStabilization()), + constraint_set_type)) { + return "videoStabiliaztion"; + } return std::nullopt; } diff --git a/third_party/blink/renderer/modules/mediastream/image_capture_device_settings.h b/third_party/blink/renderer/modules/mediastream/image_capture_device_settings.h index ce64170e1080c..2e388170959d5 100644 --- a/third_party/blink/renderer/modules/mediastream/image_capture_device_settings.h +++ b/third_party/blink/renderer/modules/mediastream/image_capture_device_settings.h @@ -29,6 +29,7 @@ struct MODULES_EXPORT ImageCaptureDeviceSettings { std::optional background_segmentation_mask; std::optional eye_gaze_correction; std::optional face_framing; + std::optional video_stabilization; }; } // namespace blink diff --git a/third_party/blink/renderer/modules/mediastream/media_constraints.cc b/third_party/blink/renderer/modules/mediastream/media_constraints.cc index d88c0f396b80d..6da2146aea152 100644 --- a/third_party/blink/renderer/modules/mediastream/media_constraints.cc +++ b/third_party/blink/renderer/modules/mediastream/media_constraints.cc @@ -403,7 +403,8 @@ MediaTrackConstraintSetPlatform::MediaTrackConstraintSetPlatform() face_framing("faceFraming"), media_stream_source("mediaStreamSource"), render_to_associated_sink("chromeRenderToAssociatedSink"), - goog_noise_reduction("googNoiseReduction") {} + goog_noise_reduction("googNoiseReduction"), + video_stablization("videoStablization") {} Vector MediaTrackConstraintSetPlatform::AllConstraints() const { @@ -446,7 +447,9 @@ Vector MediaTrackConstraintSetPlatform::AllConstraints() &eye_gaze_correction, &face_framing, &render_to_associated_sink, - &goog_noise_reduction}; + &goog_noise_reduction, + &video_stablization + }; } bool MediaTrackConstraintSetPlatform::IsUnconstrained() const { diff --git a/third_party/blink/renderer/modules/mediastream/media_constraints.h b/third_party/blink/renderer/modules/mediastream/media_constraints.h index 9e12663479ecc..f0c625a68155f 100644 --- a/third_party/blink/renderer/modules/mediastream/media_constraints.h +++ b/third_party/blink/renderer/modules/mediastream/media_constraints.h @@ -276,6 +276,8 @@ struct MediaTrackConstraintSetPlatform { StringConstraint media_stream_source; // tab, screen, desktop, system BooleanConstraint render_to_associated_sink; BooleanConstraint goog_noise_reduction; + + BooleanConstraint video_stablization; MODULES_EXPORT bool IsUnconstrained() const; MODULES_EXPORT bool HasMandatory() const; diff --git a/third_party/blink/renderer/modules/mediastream/media_constraints_impl.cc b/third_party/blink/renderer/modules/mediastream/media_constraints_impl.cc index 46b4bfeb0706f..181c92a798e5f 100644 --- a/third_party/blink/renderer/modules/mediastream/media_constraints_impl.cc +++ b/third_party/blink/renderer/modules/mediastream/media_constraints_impl.cc @@ -667,6 +667,11 @@ bool ValidateAndCopyConstraintSet( naked_treatment, constraint_buffer.suppress_local_audio_playback); } + + if (constraints_in->hasVideoStabilization()) { + CopyBooleanConstraint(constraints_in->videoStabilization(), naked_treatment, + constraint_buffer.video_stablization); + } return true; } @@ -923,6 +928,10 @@ void ConvertConstraintSet(const MediaTrackConstraintSetPlatform& input, output->setSuppressLocalAudioPlayback( ConvertBoolean(input.suppress_local_audio_playback, naked_treatment)); } + if (!input.video_stablization.IsUnconstrained()) { + output->setVideoStabilization( + ConvertBoolean(input.video_stablization, naked_treatment)); + } // TODO(hta): Decide the future of the nonstandard constraints. // If they go forward, they need to be added here. // https://crbug.com/605673 diff --git a/third_party/blink/renderer/modules/mediastream/media_stream_track_impl.cc b/third_party/blink/renderer/modules/mediastream/media_stream_track_impl.cc index 75ab4aa769306..e059c27f1bed9 100644 --- a/third_party/blink/renderer/modules/mediastream/media_stream_track_impl.cc +++ b/third_party/blink/renderer/modules/mediastream/media_stream_track_impl.cc @@ -96,7 +96,8 @@ bool ConstraintSetHasImageCapture( constraint_set->hasTorch() || constraint_set->hasBackgroundBlur() || constraint_set->hasBackgroundSegmentationMask() || constraint_set->hasEyeGazeCorrection() || - constraint_set->hasFaceFraming(); + constraint_set->hasFaceFraming() || + constraint_set->hasVideoStabilization(); } bool ConstraintSetHasNonImageCapture( diff --git a/third_party/blink/renderer/modules/mediastream/media_track_capabilities.idl b/third_party/blink/renderer/modules/mediastream/media_track_capabilities.idl index 299bd57baf955..3aa9a852b802a 100644 --- a/third_party/blink/renderer/modules/mediastream/media_track_capabilities.idl +++ b/third_party/blink/renderer/modules/mediastream/media_track_capabilities.idl @@ -39,6 +39,8 @@ dictionary MediaTrackCapabilities { MediaSettingsRange tilt; MediaSettingsRange zoom; boolean torch; + sequence videoStabilization; + // W3C Media Capture Extensions [RuntimeEnabled=MediaCaptureBackgroundBlur] sequence backgroundBlur; [RuntimeEnabled=MediaCaptureCameraControls] sequence backgroundSegmentationMask; diff --git a/third_party/blink/renderer/modules/mediastream/media_track_constraint_set.idl b/third_party/blink/renderer/modules/mediastream/media_track_constraint_set.idl index f684315b828b7..ab905f934db34 100644 --- a/third_party/blink/renderer/modules/mediastream/media_track_constraint_set.idl +++ b/third_party/blink/renderer/modules/mediastream/media_track_constraint_set.idl @@ -48,6 +48,7 @@ dictionary MediaTrackConstraintSet { (boolean or ConstrainDouble) tilt; (boolean or ConstrainDouble) zoom; ConstrainBoolean torch; + ConstrainBoolean videoStabilization; // W3C Media Capture Extensions [RuntimeEnabled=MediaCaptureBackgroundBlur] ConstrainBoolean backgroundBlur; diff --git a/third_party/blink/renderer/modules/mediastream/media_track_settings.idl b/third_party/blink/renderer/modules/mediastream/media_track_settings.idl index 1104f5ba788a9..3bd7bdebfea79 100644 --- a/third_party/blink/renderer/modules/mediastream/media_track_settings.idl +++ b/third_party/blink/renderer/modules/mediastream/media_track_settings.idl @@ -43,6 +43,7 @@ dictionary MediaTrackSettings { double tilt; double zoom; boolean torch; + boolean videoStabilization; // W3C Media Capture Extensions [RuntimeEnabled=MediaCaptureBackgroundBlur] boolean backgroundBlur; diff --git a/third_party/blink/renderer/modules/mediastream/media_track_supported_constraints.idl b/third_party/blink/renderer/modules/mediastream/media_track_supported_constraints.idl index 52a9cbbc48b2c..c9755798c4d6b 100644 --- a/third_party/blink/renderer/modules/mediastream/media_track_supported_constraints.idl +++ b/third_party/blink/renderer/modules/mediastream/media_track_supported_constraints.idl @@ -46,6 +46,7 @@ dictionary MediaTrackSupportedConstraints { boolean tilt = true; boolean zoom = true; boolean torch = true; + boolean videoStabilization = true; // W3C Media Capture Extensions [RuntimeEnabled=MediaCaptureBackgroundBlur] boolean backgroundBlur = true;