Skip to content

VideoToolbox iOS xcode26.0 b5

Alex Soto edited this page Aug 5, 2025 · 1 revision

#VideoToolbox.framework

diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h	2025-07-17 00:18:43
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor.h	2025-07-28 23:34:07
@@ -25,104 +25,104 @@
 #endif // ! TARGET_OS_SIMULATOR
 #import <Metal/Metal.h>
 
-/*!
-	@header VTFrameProcessor.h
-	@abstract
-		This header defines the interface for a VTrameProcessor object, an interface which is used to access a range of different video processing features.
-		
-	@discussion
-		The VTrameProcessor gives access to a set of powerful video procesing implemenation suitable for different use csaes.
-		Each processor implementation is defined by a Configuration object (conforming to the VTFrameProcessorConfiguration protocol), used to pass initialization and configuration parameters for the processor and a Parameter object (conforming to the VTFrameProcessorParameters protocol) which provides the parameters for each individual processing operation.  These Configuration and Parameters objects for each implementation are defined in a processor-specific header file.
-		The following processors are available:
-			- VTFrameProcessor_MotionBlur.h: a high quality motion blur processor suitable for professional video applications.  May not be suitable for realtime scenarios.
-			- VTFrameProcessor_FrameRateconversion.h: a high quality Frame Interpolation processor suitable for professional video applications.  May not be suitable for realtime scenarios.
-			- VTFrameProcessor_OpticalFlow.h: a processor which generates formward and backward OpticalFlow between two source frames.
-*/
 
-
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
-/*!
-	@class		VTFrameProcessor
-	@abstract	Creates a new VTFrameProcessor for the configured video effect
-	@discussion	The VTFrameProcessor class is the main class to perform frame processing. Users can specify a video effect by passing a VTFrameProcessorConfiguration based object to the startSessionWithConfiguration call. Once a session is created, the processWithParameters method is called in a loop to process the frames one by one. Once all the frames are processed, endSession needs to called to finish all pending processing.  The caller needs to ensure that all buffers passed to the processWithParameters interface are unmodified (inclduing attachments) until the function returns or the callback is received in the case of asynchronous mode.
- */
-
+/// Provides a unified interface you can use to apply video effects to frames.
+///
+/// The VTFrameProcessor gives access to a set of powerful video processing implementation suitable for different use cases.
+/// A configuration object (conforming to the ``VTFrameProcessorConfiguration`` protocol) passes initialization and
+/// configuration parameters for the processor. A Parameter object (conforming to the ``VTFrameProcessorParameters``
+/// protocol) provides the parameters for each individual processing operation. A Configuration object and a Parameter
+/// object define each processor implementation. These Configuration and Parameters objects for each implementation are
+/// defined in a processor-specific header file.
+///
+/// Use an instance of this class to apply configured video effects either directly to pixel buffers or as a
+/// part of Metal pipeline. The video effect must be specified as a ``VTFrameProcessorConfiguration`` instance at session
+/// startup. Once a session is started, you need to call one of the process methods for each input frame. After all input
+/// frames have been provided, session must be ended for the system to finish all pending processing.
+///
+/// After you call the process function, you must not modify input and output buffers (including attachments) before the
+/// function returns or the system receives the callback, in the case of asynchronous processing.
 API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 @interface VTFrameProcessor : NSObject
 
+/// Create a new instance of the frame processor.
 - (instancetype) init;
 
-/*!
-	@method		startSessionWithConfiguration:error
-	@abstract	Starts a new session and configures the processor pipeline.
-	@param		configuration
-		A VTFrameProcessorConfiguration based object corresponding to the video effect that will be applied in the subsequent processWithParameters calls.
-	@param		error
-		Will contain error information if any. You may specify NULL for this parameter if you do not want the error information.
- */
-
+/// Starts a new session and configures the processor pipeline for an effect.
+///
+/// - Parameters:
+///   - configuration: The system uses this parameter to create an effect pipeline for processing frames. This object
+///       must conform to the ``VTFrameProcessorConfiguration`` interface.
+///   - error: Contains error information if any. You may specify NULL for this parameter if you do not want the error information.
 - (BOOL) startSessionWithConfiguration:(id<VTFrameProcessorConfiguration>)configuration
                                  error:(NSError * _Nullable * _Nullable)error NS_SWIFT_NAME(startSession(configuration:));
 
-/*!
-	@method		processWithParameters:error
-	@abstract	Synchronously performs the effect specified in startSessionWithConfigurations.
-	@discussion	Frame level settings and frame level input/output parameters are passed by using the respective VTFrameProcessorParameters for the effect that VTFrameProcessor is configured for.
-	@param		parameters
-		A VTFrameProcessorParameters based object to specify additional frame based parameters to be used during processing. it needs to match the configuration type used during start session.
-	@param		error
-		Will contain error information if any. You may specify NULL for this parameter if you do not want the error information.
- */
-
+/// Synchronously performs the processor effects.
+///
+/// Use the respective ``VTFrameProcessorParameters`` to pass frame level settings and frame level input/output parameters
+/// for the effect that you configured this session for by calling ``startSessionWithConfiguration:error``.
+///
+/// - Parameters:
+///   - parameters: A `VTFrameProcessorParameters` based object to specify additional frame based parameters to use
+///     during processing. It needs to match the configuration type used during start session.
+///   - error: Contains error information if any. You may specify NULL for this parameter if you do not want the error information.
 - (BOOL) processWithParameters:(id<VTFrameProcessorParameters>)parameters
                          error:(NSError * _Nullable * _Nullable)error NS_SWIFT_UNAVAILABLE("use the async version processWithParameters:completionHandler instead");
 
-/*!
-	 @method	processWithParameters:completionHandler
-	 @abstract	Asynchronously performs the effect specified in startSessionWithConfigurations.
-	 @param		parameters
-		A VTFrameProcessorParameters based object to specify additional frame based parameters to be used during processing. it needs to match the configuration type used during start session.
-	 @param		completionHandler
-		This completion handler will be called when frame processing in competed.  The completion handler will receive the same parameters object that was provided tot he original call, as well as an NSError which will contain an error code if processing was not successful.
- */
-
+/// Asynchronously performs the processor effects.
+///
+/// - Parameters:
+///   - parameters: A `VTFrameProcessorParameters` based object to specify additional frame based parameters to use
+///     during processing. It needs to match the configuration type used during start session.
+///   - completionHandler: This completion handler is called when frame processing is completed. The completion handler
+///     receives the same parameters object that you provided to the original call, as well as an `NSError` which contains
+///     an error code if processing was not successful.
 - (void) processWithParameters:(id<VTFrameProcessorParameters>)parameters
              completionHandler:(void (^)(id<VTFrameProcessorParameters> , NSError * _Nullable) )completionHandler NS_SWIFT_NAME(process(parameters:completionHandler:));
 
-/*!
-	 @method	processWithParameters:frameOutputHandler
-	 @abstract	Used with VTFrameProcessor configurations which allow multiple output frames from a single processing call, such as frame rate conversion processor cases when the client needs access to output frames as they become available, rather than waiting for all output frames to be complete.
-	 @discussion This interface is suitable for low-latnecy scenarios when a call would generate multiple output frames, but waiting for all frames to be generated before beginning to use the frames is not ideal.  Because the frames that are returned may be used as references for frames still being generated, the output frames are strictly read-only.  If you want to modify the frames, you must create a copy first.
-	 @param		parameters
-		A VTFrameProcessorParameters based object to specify additional frame based parameters to be used during processing. it needs to match the configuration type used during start session.
-	 @param		frameOutputHandler
-		This frame output handler will be called once for each destination frame in the provided parameters if no errors are encountered.  The output handler will receive the same parameters object that was provided to the original call, a flag indicating if this is the final output to be called for this processing request, and the CMTime value associated with the VTFrameProcessorFrame that it is being called for.  An NSError parameter will contain an error code if processing was not successful.
- */
-
+/// Asynchronously performs the processor effects and outputs each frame separately.
+///
+/// Use with frame processor configurations which allow multiple output frames from a single processing call, such
+/// as frame rate conversion processor cases when you need access to output frames as they become available, rather than
+/// waiting for all output frames to be complete.
+///
+/// This interface is suitable for low-latency scenarios when a call would generate multiple output frames, but waiting
+/// for all frames to be generated before beginning to use the frames is not ideal. Because the processor may use the
+/// output frames as references for frames still being generated, the output frames are strictly read-only. If you want
+/// to modify the frames, you must create a copy first.
+///
+/// - Parameters:
+///   - parameters: A `VTFrameProcessorParameters` based object to specify additional frame based parameters to use
+///       during processing. It needs to match the configuration type used during start session.
+///   - frameOutputHandler: This frame output handler is called once for each destination frame in the provided parameters
+///       if no errors are encountered. The output handler receives the same parameters object that you provided to the
+///       original call, a flag indicating if this is the final output to be called for this processing request, and the
+///       presentation timestamp associated with the `VTFrameProcessorFrame` that it is being called for. The `NSError`
+///       parameter contains an error code if processing was not successful.
 - (void) processWithParameters:(id<VTFrameProcessorParameters>)parameters
 			frameOutputHandler:(void (^)(id<VTFrameProcessorParameters> , CMTime, BOOL, NSError * _Nullable) )frameOutputHandler NS_REFINED_FOR_SWIFT;
 
-/*!
-	@method		processWithCommandBuffer:parameters
-	@abstract	This API provides a Metal API friendly version of processWithParameters.
-	@discussion	This function allows clients to add the effect to an existing Metal command buffer. This can be used by clients that have an existing Metal pipeline and want to add this effect to it. Note: this function will wait until all previously inserted tasks in the command buffer finished before running. Tasks inserted after the processWithCommandBuffer will run after the effect is applied.  Processing does not happen until the commandBuffer is executed.
-	@param		commandBuffer
-		An existing Metal command buffer where the frame processing will be inserted.
-	@param		parameters
-		A VTFrameProcessorParameters based object to specify additional frame based parameters to be used during processing. it needs to match the configuration type used during start session.
-*/
-
+/// Performs effects in a Metal command buffer.
+///
+/// This function allows you to add the effect to an existing Metal command buffer. The clients that have an existing
+/// Metal pipeline and want to add this effect to it can use this function.
+///
+/// > Note: this function waits until all previously inserted tasks in the command buffer finish before running. Tasks
+/// inserted after the `processWithCommandBuffer` returns are run by the system after the effect is applied. Processing
+/// does not happen until the commandBuffer is executed.
+///
+/// - Parameters:
+///   - commandBuffer: An existing Metal command buffer where the frame processing is inserted.
+///   - parameters: A `VTFrameProcessorParameters` based object to specify additional frame based parameters to use
+///       during processing. It needs to match the configuration type used during start session.
 - (void) processWithCommandBuffer:(id <MTLCommandBuffer>) commandBuffer
                        parameters:(id<VTFrameProcessorParameters>)parameters;
 
-
-
-/*!
-	@method		endSession
-	@abstract	Performs all necessary tasks to end the session. After this call completes, no new frames can be processed unless startSessionWithConfigurations is called again.
-*/
-
+/// Performs all necessary tasks to end the session.
+///
+/// After this call completes, you can process no new frames unless you call ``startSessionWithConfiguration`` again.
 - (void) endSession;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h	2025-07-17 00:27:41
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorConfiguration.h	2025-07-30 00:08:43
@@ -18,70 +18,45 @@
 
 NS_HEADER_AUDIT_BEGIN(nullability)
 
-/*!
-	@protocol VTFrameProcessorConfiguration
-	@abstract The VTFrameProcessorConfiguration protocol describes the configuration of a processor to be used during a video processing session.
- 
-	@discussion VTFrameProcessorConfiguration protocol conformance is used to start an frame processing session.  These properties can be  queried on an implementation conforming to VTFrameProcessorConfiguration without starting a session.
-*/
 
+/// The protocol that describes the configuration of a processor for a video frame processing session.
+///
+/// Use `VTFrameProcessorConfiguration` protocol conformance to start a frame processing session. You can query these
+/// properties on an implementation conforming to `VTFrameProcessorConfiguration` without starting a session.
 API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 NS_SWIFT_SENDABLE
 @protocol VTFrameProcessorConfiguration <NSObject>
 
 @required
 
-/*!
-	@property supported
-	@abstract Returns a Boolean indicating whether the processor supported on the current config.
-*/
+/// Returns a Boolean indicating whether the system supports this processor on the current configuration.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 
-/*!
-	@property frameSupportedPixelFormats
-	@abstract Returns a list of supported pixel formats for the current configuration
-*/
-
+/// List of supported pixel formats for source frames for the current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/*!
-	@property sourcePixelBufferAttributes
-	@abstract Returns a dictionary of CVPixelBuffer attributes which source and reference frames passed to the processor must conform to.
-*/
-
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/*!
-	@property destinationPixelBufferAttributes
-	@abstract Returns a dictionary of CVPixelBuffer attributes which output frames passed to the processor must conform to.
-*/
-
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
 @optional // WARNING: Optional properties must be refined for swift
 
-/*!
-	@property nextFrameCount
-	@abstract returns the number of "next" frames that this processor requires for processing.
-*/
+/// Returns the number of "next" frames that this processor requires for processing.
 @property (nonatomic, readonly) NSInteger nextFrameCount NS_REFINED_FOR_SWIFT;
 
-/*!
-	@property previousFrameCount
-	@abstract returns the number of "previous" frames that this processor requires for processing.
-*/
+/// Returns the number of "previous" frames that this processor requires for processing.
 @property (nonatomic, readonly) NSInteger previousFrameCount NS_REFINED_FOR_SWIFT;
 
-/*!
-	@property maximumDimensions
-	@abstract returns the maximum dimensions for a sourceFrame for the processor
-*/
+/// Returns the maximum dimensions for a `sourceFrame` for the processor.
 @property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions NS_REFINED_FOR_SWIFT;
 
-/*!
-	@property minimumDimensions
-	@abstract returns the minimum dimensions for a sourceFrame for the processor
-*/
+/// Returns the minimum dimensions for a `sourceFrame` for the processor.
 @property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions NS_REFINED_FOR_SWIFT;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h	2025-07-17 00:27:40
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorErrors.h	2025-07-30 00:08:42
@@ -19,60 +19,42 @@
 
 extern NSErrorDomain _Nonnull const VTFrameProcessorErrorDomain;
 
-/*!
-	@enum       	VTFrameProcessorError
-	@abstract		VTFrameProcessor  error codes.
-	@discussion		These error codes are returned in the NSError object in the event a method fails.
-	@constant		VTFrameProcessorUnknownError
-		Returned if the processor failed for unknown reason.
-	@constant		VTFrameProcessorUnsupportedResolution
-		Returned if the processor failed due to an unsupported resolution.
-	@constant		VTFrameProcessorSessionNotStarted
-		Returned if the session is used to process frames without being started.
-	@constant		VTFrameProcessorSessionAlreadyActive
-		Returned if a startSessionWithConfiguration call is made on a session which has already been started.
-	@constant		VTFrameProcessorFatalError
-		Returned if a fatal error is encoutnered during processing.
-	@constant		VTFrameProcessorSessionLevelError
-		Returned if processing failed and current session should be stopped.
-	@constant		VTFrameProcessorInitializationFailed
-		Returned if the session failed to initialize the processing pipeline.
-	@constant		VTFrameProcessorUnsupportedInput
-		Returned to indicate that one or more frames is in a format which is not supproted by the processor.
-	@constant		VTFrameProcessorMemoryAllocationFailure
-		Returned if the session or processor is unable to allocate required memory.
-	@constant		VTFrameProcessorRevisionNotSupported
-		Returned if the specifed revision is not supported by the configured processor.
-	@constant		VTFrameProcessorProcessingError
-		Returned if the processor encountered an issue preventing it from processing the provided frame.
-	@constant		VTFrameProcessorInvalidParameterError
-		Returned if one of the provided parameters is not valid.
-	@constant		VTFrameProcessorInvalidFrameTiming
-		Returned if one of the provided VTFrameProcessorFrame objects has a PTS which is not supported by the processor, either invalid or out-of-order.
-	@constant		VTFrameProcessorAssetDownloadFailed
-		Returned if download of a required model asset for the processor failed
-*/
 
-
+/// `VTFrameProcessor` error codes.
+///
+/// These error codes are returned in the `NSError` object in the event a method fails.
 typedef NS_ERROR_ENUM(VTFrameProcessorErrorDomain, VTFrameProcessorError)
 {
+	/// Returned if the processor failed for unknown reason.
 	VTFrameProcessorUnknownError				= -19730,
+	/// Returned if the processor failed due to an unsupported resolution.
 	VTFrameProcessorUnsupportedResolution		= -19731,
+	/// Returned if the session is used to process frames without being started.
 	VTFrameProcessorSessionNotStarted			= -19732,
+	/// Returned if a `startSessionWithConfiguration` call is made on a session which has already been started.
 	VTFrameProcessorSessionAlreadyActive		= -19733,
+	/// Returned if a fatal error is encoutnered during processing.
 	VTFrameProcessorFatalError					= -19734,
+	/// Returned if processing failed and current session should be stopped.
 	VTFrameProcessorSessionLevelError			= -19735,
+	/// Returned if the session failed to initialize the processing pipeline.
 	VTFrameProcessorInitializationFailed		= -19736,
+	/// Returned to indicate that one or more frames is in a format which is not supproted by the processor.
 	VTFrameProcessorUnsupportedInput			= -19737,
+	/// Returned if the session or processor is unable to allocate required memory.
 	VTFrameProcessorMemoryAllocationFailure		= -19738,
+	/// Returned if the specifed revision is not supported by the configured processor.
 	VTFrameProcessorRevisionNotSupported		= -19739,
+	/// Returned if the processor encountered an issue preventing it from processing the provided frame.
 	VTFrameProcessorProcessingError				= -19740,
+	/// Returned if one of the provided parameters is not valid.
 	VTFrameProcessorInvalidParameterError		= -19741,
+	/// Returned if one of the provided `VTFrameProcessorFrame` objects has a presentation time which is not supported by the processor, either invalid or out-of-order.
 	VTFrameProcessorInvalidFrameTiming			= -19742,
+	/// Returned if download of a required model asset for the processor failed
 	VTFrameProcessorAssetDownloadFailed			= -19743,
 };
 
 #endif // __OBJC__
 
 #endif // VTFRAMEPROCESSORERRORS_H
-
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h	2025-07-17 00:27:41
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorFrame.h	2025-07-30 00:08:43
@@ -18,74 +18,58 @@
 
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
-/*!
-	@class     VTFrameProcessorFrame
-	@abstract  Helper class to wrap video frames that will be sent to the processor, as source frames, reference frames, or output frames.  Instances retain the buffer backing them.
-*/
 
+/// Helper class to wrap pixel buffers as video frames.
+///
+/// You can use the frames as source frames, reference frames, or output frames of a processor. Frame instances retain
+/// the backing pixel buffer.
 API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 @interface VTFrameProcessorFrame : NSObject
-
-/*!
-	@method    initWithBuffer
-	@abstract  initialize class with a CVPixelBufferRef and a presentation time. Buffer is retained.  Returns nil if no CVPixelBuffer is provided or CVPixelBuffer is not IOSurface backed.
-	@param     buffer The CVPixelBufferRef that this VTFrameProcessorFrame will wrap.  Must not be nil and must be IOSurface backed.
-	@param     presentationTimeStamp   The presentation timestamp of the buffer.
-*/
+/// Creates a new instance of frame with a pixel buffer and presentation timestamp.
+///
+/// The `CVPixelBuffer` is retained in this object.
+/// Returns `nil` if the ``CVPixelBuffer`` you provided is NULL or the ``CVPixelBuffer`` is not backed by ``IOSurface``.
+///
+/// - Parameters:
+///   - buffer: The ``CVPixelBuffer`` that this frame wraps; it must not be `nil` and must be ``IOSurface`` backed.
+///   - presentationTimeStamp: The presentation timestamp of the buffer.
 - (nullable instancetype)initWithBuffer:(CVPixelBufferRef)buffer
          presentationTimeStamp:(CMTime)presentationTimeStamp;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/*!
-	@property buffer
-	@abstract Returns the CVPixelBufferRef  that was provided when the object was initialized with.
-*/
-
+/// Pixel buffer that you provided when you initialized the object.
 @property(nonatomic,readonly) CVPixelBufferRef buffer;
 
-/*!
-	@property presentationTimeStamp
-	@abstract Returns the presentation timestamp that was provided when the object was initialized with
-*/
+/// Presentation timestamp that you provided when you initialized the object.
 @property(nonatomic,readonly) CMTime presentationTimeStamp;
 
-
 @end
 
-
-/*!
-	@class     VTFrameProcessorOpticalFlow
-	@abstract  Helper class to wrap optical flow that will be sent to the processor.   Instances retain the buffers backing them.
-*/
+/// Helper class to wrap optical flow.
+///
+/// Instances retain the backing pixel buffers that you provide.
 API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 @interface VTFrameProcessorOpticalFlow : NSObject
-
-/*!
-	@method    initWithForwardFlow
-	@abstract  initialize class with forward and backward optical flow CVPixelBufferRefs. Instances retain the buffers backing them. Returns nil if a nil CVPixelBuffer is provided or CVPixelBuffers are not IOSurface backed.
-	@param     forwardFlow CVPixelBufferRef that contains forward optical flow. Must not be nil and must be IOSurface backed.
-	@param     backwardFlow CVPixelBufferRef that contains backward optical flow. Must not be nil and must be IOSurface backed.
-*/
+/// Creates a new instance of forward and backward optical flow with pixel buffers.
+///
+/// Create a new instance with forward and backward optical flow ``CVPixelBuffer``s. Instances retain the pixel buffers
+/// you provide to this method. Returns `nil` if either `CVPixelBuffer` is NULL or the `CVPixelBuffer`s are not `IOSurface` backed.
+///
+/// - Parameters:
+///   - forwardFlow: `CVPixelBuffer` that contains forward optical flow; it must not be `nil` and must be `IOSurface` backed.
+///   - backwardFlow: `CVPixelBuffer` that contains backward optical flow; it must not be `nil` and must be `IOSurface` backed.
 - (nullable instancetype)initWithForwardFlow:(CVPixelBufferRef)forwardFlow
                        backwardFlow:(CVPixelBufferRef)backwardFlow;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/*!
-	@property forwardFlow
-	@abstract Returns the forward optical flow CVPixelBufferRef that was provided when the object was initialized.
-*/
-
+/// Returns the forward optical flow `CVPixelBuffer` that you provided when you initialized the object.
 @property(nonatomic, readonly) CVPixelBufferRef forwardFlow;
 
-/*!
-	@property backwardFlow
-	@abstract Returns the backward optical flow CVPixelBufferRef that was provided when the object was initialized.
-*/
-
+/// Returns the backward optical flow `CVPixelBuffer` that you provided when you initialized the object.
 @property(nonatomic, readonly) CVPixelBufferRef backwardFlow;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h	2025-07-17 00:27:40
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessorParameters.h	2025-07-30 00:08:42
@@ -18,37 +18,24 @@
 
 NS_HEADER_AUDIT_BEGIN(nullability)
 
-/*!
-@protocol VTFrameProcessorParameters
-@abstract VTFrameProcessorParameters is the base protocol for input and output processing parameters for a VTFrameProcessor processing implementation..  An instance of a class corresponding to this protocol is passed to processFrameWithParameters calls, and in async versions of those APIs, the same instance is returned in the completion.
-*/
 
+/// The base protocol for input and output processing parameters for a Video Toolbox frame processor implementation.
+///
+/// Pass an instance of a class corresponding to this protocol to `processFrameWithParameters` calls. In async versions of those APIs, the completion handler returns the same instance.
 API_AVAILABLE(macos(15.4), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 @protocol VTFrameProcessorParameters <NSObject>
 
 @required
 
-/**
- * @property sourceFrame
- * @abstract VTFrameProcessorFrame that contains the current source frame to be used for all processing features. Must be non-null
-*/
-
+/// Use `VTFrameProcessorFrame` that contains the current source frame for all processing features; must be non-null.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
 @optional // WARNING: Optional properties must be refined for swift
 
-/**
- * @property destinationFrame
- * @abstract VTFrameProcessorFrame that contains the destination frame for processors which output a single processed frame.
-*/
-
+/// Destination frame that contains the destination frame for processors which output a single processed frame.
 @property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame NS_REFINED_FOR_SWIFT;
 
-/**
- * @property destinationFrames
- * @abstract NSArray of VTFrameProcessorFrame that contains the destination frames for processors which may output more than one processed frame.
-*/
-
+/// Array of destination frames for processors which may output more than one processed frame.
 @property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> * destinationFrames NS_REFINED_FOR_SWIFT;
 
 
@@ -59,4 +46,3 @@
 #endif // __OBJC__
 
 #endif // VTFRAMEPROCESSORPARAMETERS_H
-
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h	2025-07-17 00:27:40
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_FrameRateConversion.h	2025-07-29 23:35:25
@@ -19,72 +19,71 @@
 #import <VideoToolbox/VTFrameProcessorParameters.h>
 #import <VideoToolbox/VTFrameProcessorFrame.h>
 
-/*!
-    @brief Interfaces for creating and using a FrameRateConversion processor
-
-    @details The VTFrameRateConversion processor Configuration and Parameters objects are used with the VTFrameProcessor interface defined in VTFrameProcessor.h.
-*/
-
-/*!
- @brief Quality prioritization levels to favor quality or performance.
-*/
+/// Configuration value you set to prioritize quality or performance.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTFrameRateConversionConfigurationQualityPrioritization) {
 	VTFrameRateConversionConfigurationQualityPrioritizationNormal = 1,
 	VTFrameRateConversionConfigurationQualityPrioritizationQuality = 2,
 } NS_SWIFT_NAME(VTFrameRateConversionConfiguration.QualityPrioritization);
 
-/*!
- @brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
- */
-
+/// Available algorithm revisions.
+///
+/// A new enum case with higher revision number is added when the processing algorithm is updated.
+/// The ``VTFrameRateConversionConfiguration/defaultRevision`` property provides the default algorithm revision.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTFrameRateConversionConfigurationRevision) {
 	VTFrameRateConversionConfigurationRevision1           = 1,    // revision 1
 } NS_SWIFT_NAME(VTFrameRateConversionConfiguration.Revision);
 
-/*!
- @brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
- */
+/// Indicates the order of input frames.
+///
+/// When submitting ``VTFrameRateConversionParameters`` to the processor, you need to provide one of these values based on
+/// how the input frames are related to each other.
+///
+/// Use ``VTFrameRateConversionParametersSubmissionModeSequential`` to indicate that the current submission follows
+/// presentation time order without jump or skip, when compared to previous submissions. This value provides better
+/// processor performance than other values.
+///
+/// Use ``VTFrameRateConversionParametersSubmissionModeRandom`` to indicate that the current submission has no relation
+/// to the previous submission. Typically, this indicates a jump or a skip in the frame sequence. The processor clears
+/// internal caches when it receives this value in ``VTFrameProcessor/processWithParameters`` function call.
+///
+/// Use ``VTFrameRateConversionParametersSubmissionModeSequentialReferencesUnchanged`` to indicate that the frames are
+/// in sequential order however, the reference frames are unchanged.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTFrameRateConversionParametersSubmissionMode) {
-	VTFrameRateConversionParametersSubmissionModeRandom							= 1,    // Frames are submitted in non-sequential order
-	VTFrameRateConversionParametersSubmissionModeSequential						= 2,    // Frames are submitted sequentially following presentation time order
-	VTFrameRateConversionParametersSubmissionModeSequentialReferencesUnchanged	= 3,    // Frames are being submitted sequentially.  This processing request uses the same source and next reference frames as the previous submission.
+	/// You are submitting frames in non-sequential order.
+	VTFrameRateConversionParametersSubmissionModeRandom							= 1,
+	/// You are submitting frames sequentially following presentation time order.
+	VTFrameRateConversionParametersSubmissionModeSequential						= 2,
+	/// You are submitting frames sequentially.
+	///
+	/// This processing request uses the same source and next reference frames as the previous submission.
+	VTFrameRateConversionParametersSubmissionModeSequentialReferencesUnchanged	= 3,
 } NS_SWIFT_NAME(VTFrameRateConversionParameters.SubmissionMode);
 
 
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
-
-/*!
- @class VTFrameRateConversionConfiguration
- @abstract Configuration that is used to set up the FrameRateConversion Processor.
-
- @discussion This configuration enables the FrameRateConversion on a VTFrameProcesing session.
-*/
-
+/// Configuration that you use to set up the frame rate conversion processor.
+///
+/// This configuration enables the frame-rate conversion on a `VTFrameProcessor` session.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 NS_SWIFT_SENDABLE
 @interface VTFrameRateConversionConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
- @abstract Creates a new VTFrameRateConversionConfiguration with specified flow width and height.
 
- @discussion init will return nil if dimensions are out of range or revision is unsupported.
-
- @param frameWidth    Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
-
- @param frameHeight   Height of source frame in pixels. Maximum value is 4320 for macOS, and 2160 for iOS.
-
- @param usePrecomputedFlow  Boolean value to indicate that Optical Flow will be provided by the user, if false this configuration will compute the optical flow on the fly.
-
- @param qualityPrioritization Used to control quality and performance levels. See VTFrameRateConversionConfigurationQualityPrioritization for more info.
- 
- @param revision The specific algorithm or configuration revision that is to be used to perform the request.
- 
-*/
+/// Creates a new frame-rate conversion configuration.
+///
+/// Returns `nil` if dimensions are out of range or revision is unsupported.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels; the maximum value is 8192 for macOS, and 4096 for iOS.
+///   - frameHeight: Height of source frame in pixels; the maximum value is 4320 for macOS, and 2160 for iOS.
+///   - usePrecomputedFlow: A Boolean value that indicates whether you are providing Optical Flow. If false, optical flow is computed on the fly.
+///   - qualityPrioritization: A level you use to prioritize quality or performance; for more information about supported levels, see ``VTFrameRateConversionConfigurationQualityPrioritization``.
+///   - revision: The specific algorithm or configuration revision you use to perform the request.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
 								frameHeight:(NSInteger)frameHeight
 						 usePrecomputedFlow:(BOOL)usePrecomputedFlow
@@ -94,104 +93,76 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property frameWidth
- * @abstract Width of source frame in pixels.
- */
+/// Width of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/**
- * @property frameHeight
- * @abstract Height of source frame in pixels.
- */
+/// Height of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/**
- * @property usePrecomputedFlow
- * @abstract Indicates that caller will provide optical flow.
-*/
+/// Indicates that caller provides optical flow.
 @property (nonatomic, readonly) BOOL usePrecomputedFlow;
 
-/**
- * @property qualityPrioritization
- * @abstract parameter used to control quality and performance levels. See VTFrameRateConversionConfigurationQualityPrioritization for more info.
-*/
+/// A parameter you use to control quality and performance levels.
+///
+/// For more information about supported levels, see ``VTFrameRateConversionConfigurationQualityPrioritization``.
 @property (nonatomic, readonly) VTFrameRateConversionConfigurationQualityPrioritization qualityPrioritization;
 
-/*!
- @property revision
- @abstract The specific algorithm or configuration revision that is to be used to perform the request.
- */
+/// The specific algorithm or configuration revision you use to perform the request.
 @property (nonatomic, readonly) VTFrameRateConversionConfigurationRevision revision;
 
-/*!
- @property supportedRevisions
- @abstract Provides the collection of currently-supported algorithm or configuration revisions for the class of configuration.
- @discussion This property allows clients to introspect at runtime what revisions are available for each configuration.
- */
+/// Provides the collection of currently supported algorithms or configuration revisions for the class of configuration.
+///
+/// A property you use to introspect at runtime which revisions are available for each configuration.
 @property (class, nonatomic, readonly) NSIndexSet* supportedRevisions;
 
-/*!
- @property defaultRevision
- @abstract Provides the default revision of a particular algorithm or configuration.
- */
+/// Provides the default revision of a specific algorithm or configuration.
 @property (class, nonatomic, readonly) VTFrameRateConversionConfigurationRevision defaultRevision;
 
-/**
- * @property frameSupportedPixelFormats
- * @abstract list of source frame supported pixel formats for current configuration
- */
+/// Supported pixel formats available for source frames for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/**
- * @property sourcePixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/**
- * @property destinationPixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-/*!
-	@property supported
-	@abstract reports whether this processor is supported
-*/
+/// Reports whether the system supports this processor.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 @property (class, nonatomic, readonly) Boolean processorSupported API_DEPRECATED_WITH_REPLACEMENT("isSupported", macos(15.4, 26.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos);
 
 @end
 
-
-/*!
- @class VTFrameRateConversionParameters
- @abstract VTFrameRateConversionParameters object contains both input and output parameters needed to run the FrameRateConversion processor on a frame. This object is used in the processWithParameters call of VTFrameProcessor class. The output parameter for this class is destinationFrame where the output frame is returned (as VTFrameProcessorMutableFrame) back to the caller function once the processWithParameters completes.
- 
- @discussion VTFrameRateConversionParameters are frame level parameters.
-*/
-
+/// An object that contains both input and output parameters, which the frame-rate conversion processor needs to process a frame.
+///
+/// Use this object as a parameter to the ``VTFrameProcessor/processWithParameters`` method. The output parameter for
+/// this class is ``destinationFrame`` where the processor returns output frame (as mutable ``VTFrameProcessorFrame``)
+/// back to you once the `processWithParameters` completes.
+///
+/// `VTFrameRateConversionParameters` are frame-level parameters.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 @interface VTFrameRateConversionParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
- @abstract Creates a new VTFrameRateConversionParameters .
- 
- @discussion init will return nil if sourceFrame or nextFrame is nil, if sourceFrame and reference frames don't have the same pixelformat, or if interpolationPhase array count does not match destinationFrames array count.
-
- @param sourceFrame Current source frame. Must be non nil.
-  
- @param nextFrame Next source frame in presentation time order.  Must be non nil.
-  
- @param opticalFlow Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with next frame. Only needed if optical flow is pre-computed. For the first frame this will always be nil.
- 
- @param interpolationPhase Array of float numbers to indicate at what intervals to insert a frame between current and next frame. Array size indicates how many frames to interpolate and needs to match destinationFrames size, one interval for each destination frame. Float number values should be between 0 and 1, e.g to insert one frame in the middle a value of 0.5 can be used.
-
- @param submissionMode Set to VTFrameRateConversionParametersSubmissionModeSequential to indicate that current submission follow presentation time order without jump or skip when compared to previous submission. VTFrameRateConversionParametersSubmissionModeSequential will yield better performance. Set to  VTFrameRateConversionParametersSubmissionModeRandom to indicate a skip or a jump in frame sequence. If VTFrameRateConversionParametersSubmissionModeRandom is set internal cache will be cleared during processWithParameters call.
- 
- @param destinationFrames Caller-allocated NSArray of VTFrameProcessorFrame that contains  pixel buffers that will receive the results.  Must contain the same number of elements as interpolationPhase NSArray.
-*/
+/// Creates new frame rate conversion parameters.
+///
+/// Returns `nil` if `sourceFrame` or `nextFrame` is `nil`, if `sourceFrame` and reference frames don't have the same pixel format, or if `interpolationPhase` array count does not match `destinationFrames` array count.
+///
+/// - Parameters:
+///   - sourceFrame: Current source frame; must be non `nil`.
+///   - nextFrame: Next source frame in presentation time order; must be non `nil`.
+///   - opticalFlow: Optional ``VTFrameProcessorOpticalFlow`` object that contains forward and backward optical flow with
+///   next frame. You only need to use this if the optical flow is pre-computed. For the first frame this is always `nil`.
+///   - interpolationPhase: Array of float numbers that indicate intervals at which the processor inserts a frame between
+///   current and next frame. The array size indicates how many frames to interpolate and this size must match
+///   `destinationFrames` size, with one interval for each destination frame. Use float number values between 0 and 1,
+///   for example, to insert one frame in the middle use a value of 0.5.
+///   - submissionMode: Provides a hint to let the processor know whether you are submitting frames in presentation
+///   sequence. For more information about supported modes see ``VTFrameRateConversionParametersSubmissionMode``.
+///   - destinationFrames: Caller-allocated array of ``VTFrameProcessorFrame`` that contains pixel buffers to receive the results. Must contain the same number of elements as `interpolationPhase`.
 - (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
 									nextFrame:(VTFrameProcessorFrame *)nextFrame
 								  opticalFlow:(VTFrameProcessorOpticalFlow * _Nullable)opticalFlow
@@ -202,45 +173,28 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property sourceFrame
- * @abstract sourceFrame Current source frame. Must be non nil
-*/
-
+/// Current source frame, which must be non `nil`.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/**
- * @property nextFrame
- * @abstract Next source frame in presentation time order. For the last frame this will be nil.
-*/
-
+/// The next source frame in presentation time order, which is `nil` for the last frame.
 @property(nonatomic, readonly, nullable) VTFrameProcessorFrame * nextFrame;
 
-/**
- * @property opticalFlow
- * @abstract Optional VTFrameProcessorReadOnlyOpticalFlow object that contains forward and backward optical flow with next frame. Only needed if optical flow is pre-computed. For the last frame this will be nil.
-*/
-
+/// An optional object that contains forward and backward optical flow with next frame.
+///
+/// Only needed if optical flow is pre-computed. For the last frame this is `nil`.
 @property(nonatomic, readonly, nullable) VTFrameProcessorOpticalFlow * opticalFlow;
 
-/**
- * @property interpolationPhase
- * @abstract Array of float numbers to indicate at what intervals to insert a frame between current and next frame. Array size indicates how many frames to interpolate and needs to match destinationFrames size, one interval for each destination frame. Float number values should be between 0 and 1, e.g to insert one frame in the middle a value of 0.5 can be used.
-*/
+/// Array of float numbers that indicate intervals at which the processor inserts a frame between the current and next frame.
+///
+/// Array size indicates how many frames to interpolate and must match `destinationFrames` size, one interval for each destination frame. Use float number values between 0 and 1, for example, to insert one frame in the middle use a value of 0.5.
 @property (nonatomic, readonly) NSArray<NSNumber *> * interpolationPhase NS_REFINED_FOR_SWIFT;
 
-/**
- * @property submissionMode
- * @abstract A VTFrameRateConversionParametersSubmissionMode value describing the processing request in this Parameters object .
-*/
+/// Ordering of the input frames in this submission relative to the previous submission.
 @property (nonatomic, readonly) VTFrameRateConversionParametersSubmissionMode submissionMode;
 
-
-/**
- * @property destinationFrames
- * @abstract Caller-allocated NSArray of VTFrameProcessorFrame that contains  pixel buffers that will receive the results.  Must contain the same number of elements as interpolationPhase NSArray.
-*/
-
+/// Caller-allocated array of video frame objects that contain pixel buffers to receive the results.
+///
+/// Must contain the same number of elements as `interpolationPhase` NSArray.
 @property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> * destinationFrames;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h	2025-07-17 00:18:43
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencyFrameInterpolation.h	2025-07-29 23:35:54
@@ -23,45 +23,50 @@
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
 
-/*!
- @class VTLowLatencyFrameInterpolationConfiguration
- @abstract Configuration that is used to program VTFrameProcessor for Low Latency Frame Interpolation.  This can either do purely temporal interpolation (Frame Rate Conversion) or it can do temporal and spatial interpolation (Scaling and Frame Rate Conversion).
-
- @discussion This processor requires a source frame and a previous frame.  It does temporal scaling, interpolating frames between the previous frame and the source frame.  When performing both temporal and spatial interpolation, the processor can only perform 2x upscaling, and a single frame of temporal interpolation.  When performing spatial scaling, the processor will produce upscaled intermediate frames as well as an upscaled sourceFrame but will not upscale the previous reference frame provided. Important: When calling [VTFrameProcessor startSessionWithConfiguration:] to create a VTLowLatencyFrameInterpolation session, ML model loading may take longer than a frame time. Avoid blocking the UI thread or stalling frame rendering pipelines during this call.
-*/
-
+/// Configuration that you use to program Video Toolbox frame processor for low-latency frame interpolation.
+///
+/// This configuration can do either purely temporal interpolation (frame-rate conversion) or temporal and spatial
+/// interpolation (scaling and frame-rate conversion). This processor requires a source frame and a previous frame. It
+/// does temporal scaling, which interpolates frames between the previous frame and the source frame. When performing
+/// both temporal and spatial interpolation, the processor can only perform 2x upscaling, and a single frame of temporal
+/// interpolation. When performing spatial scaling, the processor produces upscaled intermediate frames and an upscaled
+/// `sourceFrame`, but it does not upscale the previous reference frame you provided.
+///
+/// > Important: When calling ``VTFrameProcessor/startSessionWithConfiguration:error:`` to create a `VTLowLatencyFrameInterpolation`
+/// session, ML model loading may take longer than a frame time. Avoid blocking the UI thread or stalling frame rendering
+/// pipelines during this call.
 API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 NS_SWIFT_SENDABLE
 @interface VTLowLatencyFrameInterpolationConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
- @abstract Creates a new VTLowLatencyFrameInterpolationConfiguration with specified frame width and height, configured for temporal interpolation (Frame Rate Conversion).
 
- @param frameWidth    Width of source frame in pixels.
- 
- @param frameHeight   Height of source frame in pixels.
-
- @param numberOfInterpolatedFrames   The number of uniformly spaced frames that you want to have available for interpolation.
- 
- @discussion The available interpolation points will be be the next value of  (2^x -1) which is greater than or equal to numberOfInterpolatedFrames.  For example, if 1 interpolated frame is requested, 1 interpolation point at 0.5 is available.  If 2 interpolated frames are requested, 3 interpolation points at 0.25, 0.5 and 0.75 are available.  Not all available interpolation points need to be used.  Setting a higher numberOfInterpolatedFrames increases the resolution of interpolation in some cases, but will also increase latency.
-*/
+/// Creates a new low-latency frame interpolation configuration for frame-rate conversion.
+///
+/// The available interpolation points are the equal to the value of (2^x - 1), where x is equal to `numberOfInterpolatedFrames`.
+/// For example,
+/// - If you request 1 interpolated frame, 1 interpolation point at 0.5 is available.
+/// - If you request 2 interpolated frames, 3 interpolation points at 0.25, 0.5 and 0.75 are available.
+/// You don't need to use all available interpolation points. Setting a higher `numberOfInterpolatedFrames` increases
+/// the resolution of interpolation in some cases, but also increases latency.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels.
+///   - frameHeight: Height of source frame in pixels.
+///   - numberOfInterpolatedFrames: The number of uniformly spaced frames that you want to be used for interpolation.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
 								frameHeight:(NSInteger)frameHeight
 				 numberOfInterpolatedFrames:(NSInteger)numberOfInterpolatedFrames;
 
-
-/*!
- @abstract Creates a new VTLowLatencyFrameInterpolationConfiguration with specified frame width and height, configured for spatial scaling as well as temporal scaling.
-
- @param frameWidth    Width of source frame in pixels.
- 
- @param frameHeight   Height of source frame in pixels.
-
- @param spatialScaleFactor   The requested spatial scale factor as an integer.  Currently, only 2x spatial scaling is supported.
- 
- @discussion When configured for spatial scaling, the VTLowLatencyFrameInterpolation processor only supports 2x spatial upscaling and a single frame of temporal interpolation at a 0.5 interpolation phase.  Setting the numberOfInterpolatedFrames property will be ignored in this case.
-*/
+/// Creates a new low-latency frame interpolation configuration for spatial scaling and temporal scaling.
+///
+/// When you configure the processor for spatial scaling, the low-latency frame interpolation processor only supports 2x
+/// spatial upscaling and a single frame of temporal interpolation at a 0.5 interpolation phase.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels.
+///   - frameHeight: Height of source frame in pixels.
+///   - spatialScaleFactor: The requested spatial scale factor as an integer. Currently, the processor supports only 2x spatial scaling.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
 								frameHeight:(NSInteger)frameHeight
 						 spatialScaleFactor:(NSInteger)spatialScaleFactor;
@@ -69,80 +74,55 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property frameWidth
- * @abstract Returns the width of source frames in pixels.
-*/
+/// Width of source frames in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/**
- * @property frameHeight
- * @abstract Returns the height of source frames in pixels.
-*/
+/// Height of source frames in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/**
- * @property spatialScaleFactor
- * @abstract Returns the configured spatial scale factor as an integer.
-*/
+/// Configured spatial scale factor as an integer.
 @property (nonatomic, readonly) NSInteger spatialScaleFactor;
 
-/**
- * @property numberOfInterpolatedFrames
- * @abstract Returns the number of uniformly spaced frames that the processor is configured for..
-*/
+/// Number of uniformly spaced frames for which you configured the processor.
 @property (nonatomic, readonly) NSInteger numberOfInterpolatedFrames;
 
-/**
- * @property frameSupportedPixelFormats
- * @abstract Returns a list of supported pixel formats for current configuration
- */
-
+/// Available supported pixel formats for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/**
- * @property sourcePixelBufferAttributes
- * @abstract Returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source and reference frames
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/**
- * @property destinationPixelBufferAttributes
- * @abstract Returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-/*!
-	@property supported
-	@abstract reports whether this processor is supported
-*/
+/// Reports whether the system supports this processor.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 
 @end
 
-
-/*!
- @class VTLowLatencyFrameInterpolationParameters
- @abstract VTLowLatencyFrameInterpolationParameters object contains both input and output parameters needed for the Temporal Noise Filter Frame Processor. This object is used in the processWithParameters call of VTFrameProcessor class.
- 
- @discussion VTLowLatencyFrameInterpolationParameters are frame level parameters.
-*/
-
+/// An object that contains both input and output parameters that the low-latency frame interpolation processor needs.
+///
+/// Use this object in the `processWithParameters` call of `VTFrameProcessor` class.
+///
+/// `VTLowLatencyFrameInterpolationParameters` are frame-level parameters.
 API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 @interface VTLowLatencyFrameInterpolationParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
- @abstract Creates a new VTLowLatencyFrameInterpolationParameters used to generate interpolated frames between a previous frame and a sourceFrame.
-
- @param sourceFrame Current source frame. Must be non nil.
-  
- @param previousFrame  Previous frame used for interpolation.  Must be non nil.
-
- @param interpolationPhase The list of interpolation phase locations for the frames to be interpolated.  Must be greater than 0 and less than 1.0  0.5 is midway between the previous frame and the source frame.  If spatial scaling has been enabled, the only supported interpolation phase is 0.5.
- 
- @param destinationFrames The list of VTFrameProcessorFrame to receive the interpolated frames.  This must have the same number of elements as the the interpolationPhase.  If spatial scaling is enabled, it must also contain an element to hold the scaled version of sourceFrame.
- 
-*/
+/// Creates a new low-latency frame interpolation parameters object.
+///
+/// - Parameters:
+///   - sourceFrame: Current frame to use for interpolation; must be non `nil`.
+///   - previousFrame: Previous frame used for interpolation; must be non `nil`.
+///   - interpolationPhase: Array of float numbers that indicate interpolation phase locations at which the processor
+///   interpolates the frames. Must be greater than 0 and less than 1.0; for example 0.5 is midway between the previous
+///   frame and the source frame. If you enable spatial scaling, the only supported interpolation phase is 0.5.
+///   - destinationFrames: Caller-allocated array of `VTFrameProcessorFrame` to receive the interpolated frames. This
+///   must have the same number of elements as the the `interpolationPhase`. If you enable spatial scaling, it must also
+///   contain an element to hold the scaled version of sourceFrame.
 - (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
 					   previousFrame:(VTFrameProcessorFrame *)previousFrame
 				  interpolationPhase:(NSArray<NSNumber *> *) interpolationPhase
@@ -151,28 +131,16 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property sourceFrame
- * @abstract Returns the source frame that was provided when the VTLowLatencyFrameInterpolationParameters object was created.
-*/
+/// Source frame that you provided when creating the low-latency frame interpolation parameters object.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/**
- * @property previousFrame
- * @abstract Returns the previous frame that was provided when the VTLowLatencyFrameInterpolationParameters object was created.
-*/
+/// Previous frame that you provided when creating the low-latency frame interpolation parameters object.
 @property(nonatomic, readonly) VTFrameProcessorFrame * previousFrame;
 
-/**
- * @property interpolationPhase
- * @abstract Returns the array of interpolation [phases that were provided when the VTLowLatencyFrameInterpolationParameters object was created.
-*/
+/// Array of interpolation phases that you provided when creating the low-latency frame interpolation parameters object.
 @property (nonatomic, readonly) NSArray<NSNumber *> * interpolationPhase NS_REFINED_FOR_SWIFT;
 
-/**
- * @property destinationFrames
- * @abstract Returns the array of destination frames that were provided when the VTLowLatencyFrameInterpolationParameters object was created.
-*/
+/// Array of destination frames that you provided when creating the low-latency frame interpolation parameters object.
 @property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> * destinationFrames;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h	2025-07-17 00:24:34
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_LowLatencySuperResolutionScaler.h	2025-07-28 23:34:06
@@ -23,27 +23,26 @@
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
 
-/*!
- @class VTLowLatencySuperResolutionScalerConfiguration
- @abstract Creates an object which is used to configure VTFrameProcessor for Low Latency Super Resolution Scaler processing.
-
- @discussion VTLowLatencySuperResolutionScalerConfiguration is used to configure a VTFrameProcessor.  This interface can also queried for important operating details, like the pixel buffer attributes required for frames submitted to the processor.   Important: When calling [VTFrameProcessor startSessionWithConfiguration:] to create a VTLowLatencySuperResolutionScaler session, ML model loading may take longer than a frame time. Avoid blocking the UI thread or stalling frame rendering pipelines during this call.
-*/
-
+/// An object you use to configure frame processor for low-latency super-resolution scaler processing.
+///
+/// Use this object to configure a ``VTFrameProcessor``. Query this interface also for important operating details, like
+/// the pixel buffer attributes required for frames you submit to the processor.
+///
+/// > Important: When calling ``VTFrameProcessor/startSessionWithConfiguration:error:`` to create a `VTLowLatencySuperResolutionScaler`
+/// session, ML model loading may take longer than a frame time. Avoid blocking the UI thread or stalling frame rendering
+/// pipelines during this call.
 API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 NS_SWIFT_SENDABLE
 @interface VTLowLatencySuperResolutionScalerConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
- @abstract Creates a new VTLowLatencySuperResolutionScalerConfiguration with specified frame width and height.
 
- @param frameWidth    Width of source frame in pixels.
-
- @param frameHeight   Height of source frame in pixels.
- 
- @param scaleFactor   The scale factor to be applied.  This must be a supported value returned by supportedScaleFactorsForFrameWidth:frameHeight.
-*/
+/// Creates a new low-latency super-resolution scaler configuration with specified frame width and height.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels.
+///   - frameHeight: Height of source frame in pixels.
+///   - scaleFactor: The scale factor to apply. This must be a supported value that ``supportedScaleFactorsForFrameWidth:frameHeight:`` returns.
 - (instancetype)initWithFrameWidth:(NSInteger)frameWidth
 					   frameHeight:(NSInteger)frameHeight
 					   scaleFactor:(float)scaleFactor;
@@ -51,107 +50,67 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property frameWidth
- * @abstract Width of source frame in pixels.
-*/
+/// Width of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/**
- * @property frameHeight
- * @abstract Height of source frame in pixels.
-*/
+/// Height of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/**
- * @property frameSupportedPixelFormats
- * @abstract list of pixel formats for source frames for the current configuration
- */
-
+/// Available supported pixel formats for source frames for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/**
- * @property sourcePixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source and reference frames
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/**
- * @property destinationPixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-/**
- * @property scaleFactor
- * @abstract Returns the scale factor that the configuration was initialized with.
-*/
+/// Scale factor with which you initialized the configuration.
 @property (nonatomic, readonly) float scaleFactor;
 
-/*!
-	@property maximumDimensions
-	@abstract returns the maximum dimensions for a sourceFrame for the processor
-*/
+/// Maximum dimensions for a source frame for the processor.
 @property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions;
 
-/*!
-	@property minimumDimensions
-	@abstract returns the minimum dimensions for a sourceFrame for the processor
-*/
+/// Minimum dimensions for a source frame for the processor.
 @property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions;
 
-/*!
-	@property supported
-	@abstract reports whether this processor is supported on the current config.
-*/
+/// Reports whether the system supports this processor on the current configuration.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 
-/**
- * Returns the supported scale factors for the provided input dimensions.
- *
- * @abstract returns an array of supported scale factors values, or an empty list if the dimensions are unsupported.
-*/
+/// Returns an array of supported scale factors values, or an empty list if the processor doesn't support the dimensions.
 + (NSArray<NSNumber*>*) supportedScaleFactorsForFrameWidth:(NSInteger)frameWidth
 											   frameHeight:(NSInteger)frameHeight NS_REFINED_FOR_SWIFT;
 
 @end
 
-
-/*!
- @class VTLowLatencySuperResolutionScalerParameters
- @abstract VTLowLatencySuperResolutionScalerParameters object contains both input and output parameters needed for the Low Latency Super Resolution Scaler Frame Processor. This object is used in the processWithParameters call of VTFrameProcessor class.
-
- @discussion VTLowLatencySuperResolutionScalerParameters are frame level parameters.
-*/
-
+/// An object that contains both input and output parameters that the low-latency super-resolution scaler frame processor needs.
+///
+/// Use this object in the `processWithParameters` call of `VTFrameProcessor` class.
+///
+/// `VTLowLatencySuperResolutionScalerParameters` are frame-level parameters.
 API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos)
 
 @interface VTLowLatencySuperResolutionScalerParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
- @abstract Creates a new VTLowLatencySuperResolutionScalerParameters object.
-
- @param sourceFrame Current source frame. Must be non nil.
-
- @param destinationFrame User allocated pixel buffer that will receive the results.  Must be non nil.
-*/
+/// Creates a new low-latency, super-resolution scaler parameters object.
+///
+/// - Parameters:
+///   - sourceFrame: Current source frame; must be non `nil`.
+///   - destinationFrame: User-allocated pixel buffer that receives the scaled processor output; must be non `nil`.
 - (instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
 					destinationFrame:(VTFrameProcessorFrame *)destinationFrame;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property sourceFrame
- * @abstract sourceFrame Current source frame. Must be non nil
-*/
+/// Current source frame, which must be non `nil`.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/**
- * @property destinationFrame
- * @abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the results.
-*/
-
+/// Destination frame that contains user-allocated pixel buffer that receives the scaled processor output.
 @property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h	2025-07-17 00:27:22
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_MotionBlur.h	2025-07-30 00:08:43
@@ -19,68 +19,64 @@
 #import <VideoToolbox/VTFrameProcessorParameters.h>
 #import <VideoToolbox/VTFrameProcessorFrame.h>
 
-/*!
-    @brief Interfaces for creating and using a MotionBlur processor
 
-    @details The VTMotionBlur processor Configuration and Parameters objects are used with the VTFrameProcessor interface defined in VTFrameProcessor.h.
-*/
-
-/*!
- @brief Quality prioritization levels to favor quality or performance.
-*/
+/// Configuration value you set to prioritize quality or performance.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTMotionBlurConfigurationQualityPrioritization) {
 	VTMotionBlurConfigurationQualityPrioritizationNormal = 1,
 	VTMotionBlurConfigurationQualityPrioritizationQuality = 2,
 } NS_SWIFT_NAME(VTMotionBlurConfiguration.QualityPrioritization);
 
-/*!
- @brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
- */
+/// Available algorithm revisions.
+///
+/// A new enum case with higher revision number is added when the processing algorithm is updated.
+/// The ``VTMotionBlurConfiguration/defaultRevision`` property provides the default algorithm revision.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTMotionBlurConfigurationRevision) {
 	VTMotionBlurConfigurationRevision1           = 1,    // revision 1
 } NS_SWIFT_NAME(VTMotionBlurConfiguration.Revision);
 
-/*!
- @brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
- */
+/// Indicates the order of input frames.
+///
+/// When submitting ``VTMotionBlurParameters`` to the processor, you need to provide one of these values based on how
+/// the input frames are related to each other.
+///
+/// Use ``VTMotionBlurParametersSubmissionModeSequential`` to indicate that the current submission follows presentation
+/// time order without jump or skip, when compared to previous submissions. This value provides better processor
+/// performance than other values.
+///
+/// Use ``VTMotionBlurParametersSubmissionModeRandom`` to indicate that the current submission has no relation to the
+/// previous submission. Typically, this indicates a jump or a skip in the frame sequence. The processor clears internal
+/// caches when it receives this value in ``VTFrameProcessor/processWithParameters`` function call.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTMotionBlurParametersSubmissionMode) {
-	VTMotionBlurParametersSubmissionModeRandom             = 1,    // Frames are submitted in non-sequential order
-	VTMotionBlurParametersSubmissionModeSequential         = 2,    // Frames are submitted sequentially following presentation time order
+	VTMotionBlurParametersSubmissionModeRandom             = 1,    // You are submitting frames in non-sequential order.
+	VTMotionBlurParametersSubmissionModeSequential         = 2,    // You are submitting frames sequentially following presentation time order.
 } NS_SWIFT_NAME(VTMotionBlurParameters.SubmissionMode);
 
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
-/*!
- @class VTMotionBlurConfiguration
- @abstract Configuration that is used to set up the MotionBlur Processor.
-
- @discussion This configuration enables the MotionBlur on a VTFrameProcesing session.
-*/
-
+/// Configuration that you use to set up the motion blur processor.
+///
+/// This configuration enables the motion blur on a `VTFrameProcessor` session.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 NS_SWIFT_SENDABLE
 @interface VTMotionBlurConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
- @abstract Creates a new VTMotionBlurConfiguration with specified flow width and height.
 
- @discussion init will return nil if dimensions are out of range or revision is unsupported.
-
- @param frameWidth    Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
-
- @param frameHeight   Height of source frame in pixels. Maximum value is 4320 for macOS, and 2160 for iOS.
-
- @param usePrecomputedFlow  Boolean value to indicate that Optical Flow will be provided by the user, if false this configuration will compute the optical flow on the fly.
-
- @param qualityPrioritization Used to control quality and performance levels. See VTMotionBlurConfigurationQualityPrioritization for more info.
- 
- @param revision The specific algorithm or configuration revision that is to be used to perform the request.
- 
-*/
+/// Creates a new motion blur configuration.
+///
+/// Returns `nil` if dimensions are out of range or revision is unsupported.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels; the maximum value is 8192 for macOS, and 4096 for iOS.
+///   - frameHeight: Height of source frame in pixels; the maximum value is 4320 for macOS, and 2160 for iOS.
+///   - usePrecomputedFlow: Boolean value that indicates whether you will provide optical flow; if false, this
+///    configuration computes the optical flow on the fly.
+///   - qualityPrioritization: A level you use to prioritize quality or performance; for more information about supported
+///    levels, see ``VTMotionBlurConfigurationQualityPrioritization``.
+///   - revision: The specific algorithm or configuration revision you use to perform the request.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
 								frameHeight:(NSInteger)frameHeight
 						 usePrecomputedFlow:(BOOL)usePrecomputedFlow
@@ -90,108 +86,75 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property frameWidth
- * @abstract Width of source frame in pixels.
- */
+/// Width of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/**
- * @property frameHeight
- * @abstract Height of source frame in pixels.
- */
+/// Height of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/**
- * @property usePrecomputedFlow
- * @abstract Indicates that caller will provide optical flow.
-*/
+/// Indicates that you provide optical flow.
 @property (nonatomic, readonly) BOOL usePrecomputedFlow;
 
-/**
- * @property qualityPrioritization
- * @abstract parameter used to control quality and performance levels. See VTMotionBlurConfigurationQualityPrioritization for more info.
-*/
+/// A parameter you use to control quality and performance levels.
+///
+/// For more information about supported levels, see ``VTMotionBlurConfigurationQualityPrioritization``.
 @property (nonatomic, readonly) VTMotionBlurConfigurationQualityPrioritization qualityPrioritization;
 
-/*!
- @property revision
- @abstract The specific algorithm or configuration revision that is to be used to perform the request.
- */
+/// The specific algorithm or configuration revision you use to perform the request.
 @property (nonatomic, readonly) VTMotionBlurConfigurationRevision revision;
 
-/*!
- @property supportedRevisions
- @abstract Provides the collection of currently-supported algorithm or configuration revisions for the class of configuration.
- @discussion This property allows clients to introspect at runtime what revisions are available for each configuration.
- */
+/// Provides the collection of currently supported algorithms or configuration revisions for the class of configuration.
+///
+/// A property you use to introspect at runtime which revisions are available for each configuration.
 @property (class, nonatomic, readonly) NSIndexSet* supportedRevisions;
 
-/*!
- @property defaultRevision
- @abstract Provides the default revision of a particular algorithm or configuration.
- */
+/// Provides the default revision of a specific algorithm or configuration.
 @property (class, nonatomic, readonly) VTMotionBlurConfigurationRevision defaultRevision;
 
-/**
- * @property frameSupportedPixelFormats
- * @abstract list of source frame supported pixel formats for current configuration
- */
+/// Available supported pixel formats for source frames for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/**
- * @property sourcePixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/**
- * @property destinationPixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-
-/*!
-	@property supported
-	@abstract reports whether this processor is supported
-*/
+/// Reports whether the system supports this processor.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 @property (class, nonatomic, readonly) Boolean processorSupported API_DEPRECATED_WITH_REPLACEMENT("isSupported", macos(15.4, 26.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos);
 
 @end
 
-/*!
- @class VTMotionBlurParameters
- @abstract VTMotionBlurParameters object contains both input and output parameters needed to run the MotionBlur processor on a frame. This object is used in the processWithParameters call of VTFrameProcessor class. The output parameter for this class is destinationFrame where the output frame is returned (as VTFrameProcessorFrame) back to the caller function once the processWithParameters completes.
- 
- @discussion VTMotionBlurParameters are frame level parameters.
-*/
-
+/// An object that contains both input and output parameters that the motion blur processor needs to run on a frame.
+///
+/// Use this object in the `processWithParameters` call of `VTFrameProcessor` class. The output parameter for this class is `destinationFrame` where the processor returns the output frame (as `VTFrameProcessorFrame`) back to you once the `processWithParameters` completes.
+///
+/// `VTMotionBlurParameters` are frame-level parameters.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 @interface VTMotionBlurParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
- @abstract Creates a new VTMotionBlurParameters .
-
- @discussion init will return nil if sourceFrame or destinationFrame is nil, sourceFrame and reference frames  are different pixelFormats, or motionBlurStrength is out of range.
-
- @param sourceFrame Current source frame. Must be non nil.
-  
- @param nextFrame Next source frame in presentation time order. For the last frame this can be set to nil.
-
- @param previousFrame  Previous source frame in presentation time order. For the first frame this can be set to nil.
- 
- @param nextOpticalFlow Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with next frame. Only needed if optical flow is pre-computed. For the last frame this will always be nil.
-  
- @param previousOpticalFlow Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with previous frame. Only needed if optical flow is pre-computed. For the first frame this will always be nil.
- 
- @param motionBlurStrength NSInteger number to indicate the strength of blur to apply. Range is from 1 to 100. Default value is 50.
-
- @param submissionMode Set to VTMotionBlurParametersSubmissionModeSequential to indicate that current submission follow presentation time order without jump or skip when compared to previous submission. VTMotionBlurParametersSubmissionModeSequential will yield better performance. Set to VTMotionBlurParametersSubmissionModeRandom to indicate a skip or a jump in frame sequence. If VTMotionBlurParametersSubmissionModeRandom is set internal cache will be cleared during processWithParameters call.
- 
- @param destinationFrame User allocated pixel buffer that will receive the results.
-*/
+/// Creates a new motion blur parameters object.
+///
+/// Returns `nil` if `sourceFrame` or `destinationFrame` is `nil`, `sourceFrame` and reference frames are different pixel
+/// formats, or `motionBlurStrength` is out of range.
+///
+/// - Parameters:
+///   - sourceFrame: Current source frame; must be non `nil`.
+///   - nextFrame: Next source frame in presentation time order; for the last frame you can set this to `nil`.
+///   - previousFrame: Previous source frame in presentation time order; for the first frame you can set this to `nil`.
+///   - nextOpticalFlow: Optional `VTFrameProcessorOpticalFlow` object that contains forward and backward optical flow
+///   with `nextFrame`. You only need this object if optical flow is pre-computed. For the last frame this is always `nil`.
+///   - previousOpticalFlow: Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow
+///   with `previousFrame`. You only need to use this if the optical flow is pre-computed. For the first frame this is always `nil`.
+///   - motionBlurStrength: Number that indicates the strength of blur applied by the processor. Range is from 1 to 100. Default value is 50.
+///   - submissionMode: Provides a hint to let the processor know whether you are submitting frames in presenatation
+///   sequence. For more information about supported modes see ``VTMotionBlurParametersSubmissionMode``.
+///   - destinationFrame: User-allocated pixel buffer that receives a frame with motion blur applied by the processor.
 - (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
 									nextFrame:(VTFrameProcessorFrame * _Nullable)nextFrame
 								previousFrame:(VTFrameProcessorFrame * _Nullable)previousFrame
@@ -204,58 +167,34 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property sourceFrame
- * @abstract sourceFrame Current source frame. Must be non nil
-*/
-
+/// Current source frame, which must be non `nil`.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/**
- * @property nextFrame
- * @abstract Next source frame in presentation time order. For the last frame this will be nil.
-*/
-
+/// The next source frame in presentation time order, which is `nil` for the last frame.
 @property(nonatomic, readonly, nullable) VTFrameProcessorFrame * nextFrame;
 
-/**
- * @property previousFrame
- * @abstract Previous source frame in presentation time order. For the first frame this will be nil.
-*/
-
+/// Previous source frame in presentation time order, which is `nil` for the first frame.
 @property(nonatomic, readonly, nullable) VTFrameProcessorFrame * previousFrame;
 
-/**
- * @property nextOpticalFlow
- * @abstract Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with next frame. Only needed if optical flow is pre-computed. For the last frame this will be nil.
-*/
-
+/// Optional frame processor optical flow object that contains forward and backward optical flow with next frame.
+///
+/// You only need to use this object if the optical flow is pre-computed. For the last frame this is `nil`.
 @property(nonatomic, readonly, nullable) VTFrameProcessorOpticalFlow * nextOpticalFlow;
 
-/**
- * @property previousOpticalFlow
- * @abstract Optional VTFrameProcessorOpticalFlow object  that contains forward and backward optical flow with previous frame. Only needed if optical flow is pre-computed. For the first frame this will be nil.
-*/
-
+/// Optional frame processor optical flow object that contains forward and backward optical flow with previous frame.
+///
+/// You only need to use this object if the optical flow is pre-computed. For the first frame this is `nil`.
 @property(nonatomic, readonly, nullable) VTFrameProcessorOpticalFlow * previousOpticalFlow;
 
-/**
- * @property motionBlurStrength
- * @abstract motionBlurStrength NSInteger number to indicate the strength of blur to apply. Range is from 1 to 100. Default value is 50.
-*/
+/// Number that indicates the strength of motion blur.
+///
+/// The range is from 1 to 100; the default value is 50.
 @property (nonatomic, readonly) NSInteger motionBlurStrength;
 
-/**
- * @property submissionMode
- * @abstract A VTMotionBlurParametersSubmissionMode value describing the processing request in this Parameters object .
-*/
+/// Ordering of the input frames this submission related to the previous submission.
 @property (nonatomic, readonly) VTMotionBlurParametersSubmissionMode submissionMode;
 
-/**
- * @property destinationFrame
- * @abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the results.
-*/
-
+/// Destination frame that contains user-allocated pixel buffer that receive a frame with motion blur applied by the processor.
 @property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h	2025-07-17 00:24:34
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_OpticalFlow.h	2025-07-30 00:14:07
@@ -19,69 +19,62 @@
 #import <VideoToolbox/VTFrameProcessorParameters.h>
 #import <VideoToolbox/VTFrameProcessorFrame.h>
 
-/*!
-    @brief A few words about the OpticalFlow
 
-    @details
-*/
-
-/*!
- @brief Quality prioritization levels to favor quality or performance.
-*/
+/// Configuration value you set to prioritize quality or performance.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTOpticalFlowConfigurationQualityPrioritization) {
 	VTOpticalFlowConfigurationQualityPrioritizationNormal = 1,
 	VTOpticalFlowConfigurationQualityPrioritizationQuality = 2,
 } NS_SWIFT_NAME(VTOpticalFlowConfiguration.QualityPrioritization);
 
-/*!
- @brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
- */
+/// Available algorithm revisions.
+///
+/// A new enum case with higher revision number is added when the processing algorithm is updated.
+/// The ``VTOpticalFlowConfiguration/defaultRevision`` property provides the default algorithm revision.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTOpticalFlowConfigurationRevision) {
 	VTOpticalFlowConfigurationRevision1           = 1,    // revision 1
 } NS_SWIFT_NAME(VTOpticalFlowConfiguration.Revision);
 
-/*!
- @brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
- */
+/// Indicates the order of input frames.
+///
+/// When submitting ``VTOpticalFlowParameters`` to the processor, you need to provide one of these values based on how
+/// the input frames are related to each other.
+///
+/// Use ``VTOpticalFlowParametersSubmissionModeSequential`` to indicate that the current submission follows presentation
+/// time order without jump or skip, when compared to previous submissions. This value provides better processor
+/// performance than other values.
+///
+/// Use ``VTOpticalFlowParametersSubmissionModeRandom`` to indicate that the current submission has no relation to the
+/// previous submission. Typically, this indicates a jump or a skip in the frame sequence. The processor clears internal
+/// caches when it receives this value in ``VTFrameProcessor/processWithParameters`` function call.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTOpticalFlowParametersSubmissionMode) {
-	VTOpticalFlowParametersSubmissionModeRandom             = 1,    // Frames are submitted in non-sequential order
-	VTOpticalFlowParametersSubmissionModeSequential         = 2,    // Frames are submitted sequentially following presentation time order
+	VTOpticalFlowParametersSubmissionModeRandom             = 1,    // You are submitting frames in non-sequential order.
+	VTOpticalFlowParametersSubmissionModeSequential         = 2,    // You are submitting frames sequentially following presentation time order.
 } NS_SWIFT_NAME(VTOpticalFlowParameters.SubmissionMode);
 
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
-
-/*!
- @class VTOpticalFlowConfiguration
- @abstract Configuration that is used to set up an OpticalFlow processor
-
- @discussion This configuration enables the OpticalFlow on a VTFrameProcessing session.
-*/
-
+/// Configuration that you use to set up an optical flow processor
+///
+/// This configuration enables the optical flow on a `VTFrameProcessor` session.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 NS_SWIFT_SENDABLE
 @interface VTOpticalFlowConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
- @abstract Creates a new VTOpticalFlowConfiguration with specified flow width and height.
- 
- @discussion init will return nil if dimensions are out of range or revision is unsupported.
 
- @param frameWidth    Width of source frame in pixels. Maximum value is 8192 for macOS, and 4096 for iOS.
-
- @param frameHeight   Height of source frame in pixels. Maximum value is 4320 for macOS, and 2160 for iOS.
-
- @param usePrecomputedFlow  Boolean value to indicate that Optical Flow will be provided by the user, if false this configuration will compute the optical flow on the fly.
-
- @param qualityPrioritization Used to control quality and performance levels. See VTOpticalFlowConfigurationQualityPrioritization for more info.
- 
- @param revision The specific algorithm or configuration revision that is to be used to perform the request.
- 
-*/
+/// Creates a new optical flow configuration.
+///
+/// Returns ``nil`` if dimensions are out of range or revision is unsupported.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels; the maximum value is 8192 for macOS, and 4096 for iOS.
+///   - frameHeight: Height of source frame in pixels; the maximum value is 4320 for macOS, and 2160 for iOS.
+///   - qualityPrioritization: A level you use to prioritize quality or performance; for more information about supported
+///   levels, see ``VTOpticalFlowConfigurationQualityPrioritization``.
+///   - revision: The specific algorithm or configuration revision you use to perform the request.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
 								frameHeight:(NSInteger)frameHeight
 					  qualityPrioritization:(VTOpticalFlowConfigurationQualityPrioritization)qualityPrioritization
@@ -90,95 +83,65 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property frameWidth
- * @abstract Width of source frame in pixels.
- */
+/// Width of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/**
- * @property frameHeight
- * @abstract Height of source frame in pixels.
- */
+/// Height of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/**
- * @property qualityPrioritization
- * @abstract parameter used to control quality and performance levels. See VTOpticalFlowConfigurationQualityPrioritization for more info.
-*/
+/// A parameter you use to control quality and performance levels.
+///
+/// For more information about supported levels, see ``VTOpticalFlowConfigurationQualityPrioritization``.
 @property (nonatomic, readonly) VTOpticalFlowConfigurationQualityPrioritization qualityPrioritization;
 
-/*!
- @property revision
- @abstract The specific algorithm or configuration revision that is to be used to perform the request.
- */
+/// The specific algorithm or configuration revision you use to perform the request.
 @property (nonatomic, readonly) VTOpticalFlowConfigurationRevision revision;
 
-/*!
- @property supportedRevisions
- @abstract Provides the collection of currently-supported algorithm or configuration revisions for the class of configuration.
- @discussion This property allows clients to introspect at runtime what revisions are available for each configuration.
- */
+/// Provides the collection of currently supported algorithms or configuration revisions for the class of configuration.
+///
+/// A property you use to introspect at runtime which revisions are available for each configuration.
 @property (class, nonatomic, readonly) NSIndexSet* supportedRevisions;
 
-/*!
- @property defaultRevision
- @abstract Provides the default revision of a particular algorithm or configuration.
- */
+/// Provides the default revision of a specific algorithm or configuration.
 @property (class, nonatomic, readonly) VTOpticalFlowConfigurationRevision defaultRevision;
 
-/**
- * @property frameSupportedPixelFormats
- * @abstract list of source frame supported pixel formats for current configuration
- */
+/// Supported pixel formats for source frames for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/**
- * @property sourcePixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/**
- * @property destinationPixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as OpticalFlow buffers
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-
-/*!
-	@property supported
-	@abstract reports whether this processor is supported
-*/
+/// Reports whether the system supports this processor.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 @property (class, nonatomic, readonly) Boolean processorSupported API_DEPRECATED_WITH_REPLACEMENT("isSupported", macos(15.4, 26.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(tvos, watchos, visionos);
 
 @end
 
-
-/*!
- @class VTOpticalFlowParameters
- @abstract VTOpticalFlowParameters object contains both input and output parameters needed to generate optical flow between two frames. This object is used in the processWithParameters call of VTFrameProcessor class. The output parameter for this class is destinationOpticalFlow where the output flow is returned (as VTFrameProcessorMutableOpticalFlow) back to the caller function once the processWithParameters completes.
- 
- @discussion VTOpticalFlowParameters are frame level parameters.
-*/
-
+/// An object that contains both input and output parameters the frame processor needs to generate optical flow between two frames.
+///
+/// Use this object in the `processWithParameters` call of `VTFrameProcessor` class. The output parameter for this class is `destinationOpticalFlow` where the processor returns the output flow (as mutable `VTFrameProcessorOpticalFlow`) back to you once the `processWithParameters` completes.
+///
+/// `VTOpticalFlowParameters` are frame-level parameters.
 API_AVAILABLE(macos(15.4), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 @interface VTOpticalFlowParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
- @abstract Creates a new VTOpticalFlowParameters .
-
- @discussion init will return nil if sourceFrame or nextFrame is nil, or sourceFrame and nextFrame are different pixelFormats..
-
- @param sourceFrame Current source frame. Must be non nil.
-  
- @param nextFrame Next source frame in presentation time order.
-  
- @param submissionMode Set to VTOpticalFlowParametersSubmissionModeSequential to indicate that current submission follow presentation time order without jump or skip when compared to previous submission. VTOpticalFlowParametersSubmissionModeSequential will yield better performance. Set to  VTOpticalFlowParametersSubmissionModeRandom to indicate a skip or a jump in frame sequence. If VTOpticalFlowParametersSubmissionModeRandom is set internal cache will be cleared during processWithParameters call.
- 
- @param destinationOpticalFlow User allocated VTFrameProcessorMutableOpticalFlow that will receive the results.
-*/
+/// Creates a new optical flow parameters object.
+///
+/// Returns `nil` if `sourceFrame` or `nextFrame` is `nil`, or if `sourceFrame` and `nextFrame` have different pixel formats.
+///
+/// - Parameters:
+///   - sourceFrame: Current source frame; must be non `nil`.
+///   - nextFrame: Next source frame in presentation time order.
+///   - submissionMode: Provides a hint to let the processor know whether you are submitting frames in presentation
+///   sequence. For more information about supported modes see ``VTOpticalFlowParametersSubmissionMode``.
+///   - destinationOpticalFlow: User allocated `VTFrameProcessorOpticalFlow` that receives the results.
 - (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
 									nextFrame:(VTFrameProcessorFrame *)nextFrame
 							   submissionMode:(VTOpticalFlowParametersSubmissionMode)submissionMode
@@ -187,31 +150,16 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property sourceFrame
- * @abstract sourceFrame Current source frame. Must be non nil
-*/
-
+/// Current source frame, which must be non `nil`.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/**
- * @property nextFrame
- * @abstract Next source frame in presentation time order.
-*/
-
+/// The next source frame in presentation time order.
 @property(nonatomic, readonly) VTFrameProcessorFrame * nextFrame;
 
-/**
- * @property submissionMode
- * @abstract A VTOpticalFlowParametersSubmissionMode value describing the processing request in this Parameters object .
-*/
+/// Ordering of the input frames in this submission relative to the previous submission.
 @property (nonatomic, readonly) VTOpticalFlowParametersSubmissionMode submissionMode;
 
-/**
- * @property destinationOpticalFlow
- * @abstract VTFrameProcessorMutableOpticalFlow that contains user allocated flow pixel buffers that will receive the results.
-*/
-
+/// Output optical flow calculated by the processor.
 @property(nonatomic, readonly) VTFrameProcessorOpticalFlow * destinationOpticalFlow;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h	2025-07-17 00:27:40
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_SuperResolutionScaler.h	2025-07-30 00:06:36
@@ -19,82 +19,93 @@
 #import <VideoToolbox/VTFrameProcessorParameters.h>
 #import <VideoToolbox/VTFrameProcessorFrame.h>
 
-/*!
-    @brief Interfaces for creating and using a SuperResolution processor
 
-    @details The VTSuperResolutionScaler processor Configuration and Parameters objects are used with the VTFrameProcessor interface defined in VTFrameProcessor.h.
- */
-
-/*!
- @brief Quality prioritization levels to favor quality or performance.
-*/
+/// Configuration value you set to prioritize quality or performance.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationQualityPrioritization) {
 	VTSuperResolutionScalerConfigurationQualityPrioritizationNormal = 1,
 } NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.QualityPrioritization);
 
-/*!
- @brief List of existing algorithm revisions with the highest being the latest. Clients can read defaultRevision property to find the default revision.
- */
+/// Available algorithm revisions.
+///
+/// A new enum case with a higher revision number is added when the processing algorithm is updated.
+/// The ``VTSuperResolutionScalerConfiguration/defaultRevision`` property provides the default algorithm revision.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationRevision) {
 	VTSuperResolutionScalerConfigurationRevision1           = 1,    // revision 1
 } NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.Revision);
 
-/*!
-@brief List of SuperResolution  input types.
-*/
+/// Available super-resolution processor input types.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationInputType) {
 	VTSuperResolutionScalerConfigurationInputTypeVideo		= 1,
 	VTSuperResolutionScalerConfigurationInputTypeImage		= 2,
 } NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.InputType);
 
-/*!
-@brief List of SuperResolution  input types.
-*/
+/// Available super-resolution processor model status types.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTSuperResolutionScalerConfigurationModelStatus) {
 	VTSuperResolutionScalerConfigurationModelStatusDownloadRequired	= 0,
 	VTSuperResolutionScalerConfigurationModelStatusDownloading		= 1,
-	VTSuperResolutionScalerConfigurationModelStatusReady				= 2,
+	VTSuperResolutionScalerConfigurationModelStatusReady			= 2,
 } NS_SWIFT_NAME(VTSuperResolutionScalerConfiguration.ModelStatus);
 
-/*!
- @brief Hint to let the processor know whether frames are being submitted in presenatation sequence, allowing performance optimizations based on previous processing requests
- */
+/// Indicates the order of input frames.
+///
+/// When submitting ``VTSuperResolutionScalerParameters`` to the processor, you need to provide one of these values based on
+/// how the input frames are related to each other.
+///
+/// Use ``VTSuperResolutionScalerParametersSubmissionModeSequential`` to indicate that the current submission follows
+/// presentation time order without jumps or skips, when compared to previous submissions. This value provides better
+/// processor performance than other values.
+///
+/// Use ``VTSuperResolutionScalerParametersSubmissionModeRandom`` to indicate that the current submission has no relation
+/// to the previous submission. Typically, this indicates a jump or skip in the frame sequence. The processor clears
+/// internal caches when it receives this value in ``VTFrameProcessor/processWithParameters`` function call.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 typedef NS_ENUM(NSInteger, VTSuperResolutionScalerParametersSubmissionMode) {
-	VTSuperResolutionScalerParametersSubmissionModeRandom             = 1,    // Frames are submitted in non-sequential order
-	VTSuperResolutionScalerParametersSubmissionModeSequential         = 2,    // Frames are submitted sequentially following presentation time order
+	VTSuperResolutionScalerParametersSubmissionModeRandom             = 1,    // You are submitting frames in non-sequential order.
+	VTSuperResolutionScalerParametersSubmissionModeSequential         = 2,    // You are submitting frames sequentially following presentation time order.
 } NS_SWIFT_NAME(VTSuperResolutionScalerParameters.SubmissionMode);
 
 
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
 
-/*!
- @class VTSuperResolutionScalerConfiguration
- @abstract Configuration that is used to set up the SuperResolution Processor.
-
- @discussion This configuration enables the SuperResolution on a VTFrameProcessing session.  IMPORTANT: The VTSuperResolutionScaler processor may require ML models which need to be downloaded by the framework in order to operate.  Before using calling startSessionWithConfiguration with a VTSuperResolutionScalerConfiguration, it is important that you verify that the necessary models are present by checking the configurationModelStatus on the configuration object.  If models are not available, model download can be triggered using the downloadConfigurationModelWithCompletionHandler method on the configuration object.  Best practice is to confirm availability of models and drive download with user awareness and interaction before engaging workflows where the processor is needed.
-*/
-
+/// Configuration that you use to set up the super-resolution processor.
+///
+/// This configuration enables the super-resolution processor on a `VTFrameProcessor` session.
+///
+/// > Important: The super-resolution processor may require ML models which the framework needs to download in order to
+/// operate. Before calling ``VTFrameProcessor/startSessionWithConfiguration:error:`` with an instance of this class,
+/// it is important that you verify that the necessary models are present by checking ``configurationModelStatus``.
+/// If models are not available, you can trigger model download using the ``downloadConfigurationModelWithCompletionHandler:``
+/// method. Best practice is to confirm availability of models and drive download with user awareness and interaction
+/// before engaging workflows that need this processor.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 NS_SWIFT_SENDABLE
 @interface VTSuperResolutionScalerConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
- @abstract Creates a new VTSuperResolutionScalerConfiguration with specified flow width and height.
- @discussion init will return nil if dimensions are out of range or revision is unsupported.
- @param frameWidth    Width of source frame in pixels. With VTSuperResolutionScalerConfigurationInputTypeVideo, maximum width is 1920 on macOS and 1440 on iOS. With VTSuperResolutionScalerConfigurationInputTypeImage, maximum width is 1920.
- @param frameHeight   Height of source frame in pixels. With VTSuperResolutionScalerConfigurationInputTypeVideo, maximum height is 1080. With VTSuperResolutionScalerConfigurationInputTypeImage, maximum height is 1920.
- @param scaleFactor Indicates the scale factor between input and output.
- @param inputType  Indicates the type of input (video / image ).
- @param usePrecomputedFlow  Boolean value to indicate that Optical Flow will be provided by the user, if false this configuration will compute the optical flow on the fly.
- @param qualityPrioritization Used to control quality and performance levels. See VTSuperResolutionScalerConfigurationQualityPrioritization for more info.
- @param revision The specific algorithm or configuration revision that is to be used to perform the request.
-*/
+
+/// Creates a new super-resolution scaler processor configuration.
+///
+/// This processor increases resolution of an image or video.
+/// Returns `nil` if dimensions are out of range or revision is unsupported.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels. With ``VTSuperResolutionScalerConfigurationInputTypeVideo``,
+///   maximum width is 1920 on macOS and 1440 on iOS. With ``VTSuperResolutionScalerConfigurationInputTypeImage``,
+///   maximum width is 1920.
+///   - frameHeight: Height of source frame in pixels. With ``VTSuperResolutionScalerConfigurationInputTypeVideo``,
+///   maximum height is 1080. With ``VTSuperResolutionScalerConfigurationInputTypeImage``, maximum height is 1920 on
+///   macOS and 1080 on iOS.
+///   - scaleFactor: Indicates the scale factor between input and output.
+///   - inputType: Indicates the type of input, either video or image.
+///   - usePrecomputedFlow: Boolean value to indicate that you provide optical flow; if false, this configuration
+///   computes the optical flow on the fly.
+///   - qualityPrioritization: A level you use to prioritize quality or performance; for more information about
+///   supported levels, see ``VTSuperResolutionScalerConfigurationQualityPrioritization``.
+///   - revision: The specific algorithm or configuration revision you use to perform the request.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
 								frameHeight:(NSInteger)frameHeight
 								scaleFactor:(NSInteger)scaleFactor
@@ -106,135 +117,94 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property frameWidth
- * @abstract Width of source frame in pixels.
- */
+/// Width of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/**
- * @property frameHeight
- * @abstract Height of source frame in pixels.
- */
+/// Height of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/**
- * @property inputType
- * @abstract Indicates the type of input.
-*/
+/// Indicates the type of input.
 @property (nonatomic, readonly) VTSuperResolutionScalerConfigurationInputType inputType;
 
-/**
- * @property precomputedFlow
- * @abstract Indicates that caller will provide optical flow.
-*/
+/// Indicates that you provide optical flow.
 @property (nonatomic, readonly, getter=usesPrecomputedFlow) BOOL precomputedFlow;
 
-/**
- * @property scaleFactor
- * @abstract Indicates the scale factor between input and output.
-*/
+/// Indicates the scale factor between input and output.
 @property (nonatomic, readonly) NSInteger scaleFactor;
 
-/**
- * @property qualityPrioritization
- * @abstract parameter used to control quality and performance levels. See VTSuperResolutionScalerConfigurationQualityPrioritization for more info.
-*/
+/// A parameter to control quality and performance levels.
+///
+/// For more information about supported levels, see ``VTSuperResolutionScalerConfigurationQualityPrioritization``.
 @property (nonatomic, readonly) VTSuperResolutionScalerConfigurationQualityPrioritization qualityPrioritization;
 
-/*!
- @property revision
- @abstract The specific algorithm or configuration revision that is to be used to perform the request.
- */
+/// The specific algorithm or configuration revision you use to perform the request.
 @property (nonatomic, readonly) VTSuperResolutionScalerConfigurationRevision revision;
 
-/*!
- @property supportedRevisions
- @abstract Provides the collection of currently-supported algorithm or configuration revisions for the class of configuration.
- @discussion This property allows clients to introspect at runtime what revisions are available for each configuration.
- */
+/// Provides the collection of currently supported algorithms or configuration revisions for the class of configuration.
+///
+/// A property you use to introspect at runtime which revisions are available for each configuration.
 @property (class, nonatomic, readonly) NSIndexSet* supportedRevisions;
 
-/*!
- @property defaultRevision
- @abstract Provides the default revision of a particular algorithm or configuration.
- */
+/// Provides the default revision of a specific algorithm or configuration.
 @property (class, nonatomic, readonly) VTSuperResolutionScalerConfigurationRevision defaultRevision;
 
-/**
- * @property frameSupportedPixelFormats
- * @abstract list of source frame supported pixel formats for current configuration
- */
+/// Available supported pixel formats for source frames for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/**
- * @property sourcePixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as source frames and reference frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/**
- * @property destinationPixelBufferAttributes
- * @abstract returns a pixelBufferAttributes dictionary describing requirements for pixelBuffers used as destination frames.
-*/
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-/*!
-	@property configurationModelStatus
-	@abstract reports the download status of models required to use VTSuperResolutionScaler for the current configuration.
-*/
+/// Reports the download status of models that the system needs for the current configuration.
 @property (nonatomic, readonly) VTSuperResolutionScalerConfigurationModelStatus configurationModelStatus;
 
-/*!
- @abstract This interface requests that models associated with the VTSuperResolutionScalerConfiguration be downloaded.
+/// Downloads models that the system needs for the current configuration.
+///
+/// This method downloads model assets required for the current configuration in background. You should call this method
+/// if ``configurationModelStatus`` is ``VTSuperResolutionScalerConfigurationModelStatusDownloadRequired``. After this
+/// method is called, you can query ``configurationModelPercentageAvailable`` to determine progress of model asset
+/// download process. If the download fails, the completion handler is invoked with an `NSError`, and the
+/// ``configurationModelStatus`` goes back to ``VTSuperResolutionScalerConfigurationModelStatusDownloadRequired``. If
+/// the download succeeds, the completion handler is invoked with `nil` NSError.
+- (void)downloadConfigurationModelWithCompletionHandler:(void (^)(NSError * _Nullable error))completionHandler;
 
- @discussion This interface can be used to download model assets required for the current VTSuperResolutionScalerConfiguration if the state is currently VTSuperResolutionScalerConfigurationModelStatusDownloadRequired.  The processorModelStatus class property can be queried to see if models are all already present.  If a download has been initiated, processorModelPercentageAvailable can be queried to determine what percentage of the model models are avialable.
- If the download fails, the completion handler will return an NSError, and the status will go back to VTSuperResolutionScalerConfigurationModelStatusDownloadRequired.  If the download succeeds, the NSError return value will be nil.
-*/
-- (void)downloadConfigurationModelWithCompletionHandler:(void (^)( NSError * _Nullable error))completionHandler;
-
-/*!
-	@property configurationModelPercentageAvailable
-	@abstract Returns a floating point value between 0.0 and 1.0 indicating the percentage of required model assets that have been downloaded.
-*/
+/// Returns a floating point value between 0.0 and 1.0 indicating the percentage of required model assets that have been downloaded.
 @property (nonatomic, readonly) float configurationModelPercentageAvailable;
 
-
-/*!
-	@property supported
-	@abstract reports whether this processor is supported
-*/
+/// Reports whether the system supports this processor.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 
-/*!
-	@property supportedScaleFactors
-	@abstract reports the set of supported scale factors that can be used when initializing a VTSuperResolutionScalerConfiguration.
-*/
+/// Reports the set of supported scale factors to use when initializing a super-resolution scaler configuration.
 @property (class, nonatomic, readonly) NSArray<NSNumber*> * supportedScaleFactors NS_REFINED_FOR_SWIFT;
 
 @end
 
-
-/*!
- @class VTSuperResolutionScalerParameters
- @abstract VTSuperResolutionScalerParameters object contains both input and output parameters needed to run the SuperResolution processor on a frame. This object is used in the processWithParameters call of VTFrameProcessor class. The output parameter for this class is destinationFrame where the output frame is returned (as VTFrameProcessorFrame) back to the caller function once the processWithParameters completes.
- 
- @discussion VTSuperResolutionScalerParameters are frame level parameters.
-*/
-
+/// An object that contains both input and output parameters that the super-resolution processor needs to run on a frame.
+///
+/// Use this object in the `processWithParameters` call of the `VTFrameProcessor` class. The output parameter for this class is `destinationFrame`, where the processor returns the output frame (as `VTFrameProcessorFrame`) back to you once `processWithParameters` completes.
+///
+/// `VTSuperResolutionScalerParameters` are frame-level parameters.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(tvos, visionos, watchos)
 @interface VTSuperResolutionScalerParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
- @abstract Creates a new VTSuperResolutionScalerParameters .
- @discussion init will return nil if sourceFrame or destinationFrame is nil, sourceFrame and reference frames  are different pixelFormats.
- @param sourceFrame Current source frame. Must be non nil.
- @param previousFrame  The Previous source frame in presentation time order. For the first frame this can be set to nil.
- @param previousOutputFrame  The Previous output frame in presentation time order. For the first frame this can be set to nil.
- @param opticalFlow Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow between sourceFrame and previousFrame frame. Only needed if optical flow is pre-computed.
- @param submissionMode Set to VTSuperResolutionScalerParametersSubmissionModeSequential to indicate that current submission follow presentation time order without jump or skip when compared to previous submission. VTSuperResolutionScalerParametersSubmissionModeSequential will yield better performance. Set to VTSuperResolutionScalerParametersSubmissionModeRandom to indicate a skip or a jump in frame sequence.
-@param destinationFrame User allocated pixel buffer that will receive the results.
-*/
+/// Creates a new super-resolution scaler parameters instance.
+///
+/// Returns `nil` if `sourceFrame` or `destinationFrame` is `nil`, or if `sourceFrame` and reference frames have different pixel formats.
+///
+/// - Parameters:
+///   - sourceFrame: Current source frame; must be non `nil`.
+///   - previousFrame: The previous source frame in presentation time order. For the first frame you can set this to `nil`.
+///   - previousOutputFrame: The previous output frame in presentation time order. For the first frame you can set this to `nil`.
+///   - opticalFlow: Optional `VTFrameProcessorOpticalFlow` object that contains forward and backward optical flow between the `sourceFrame` and `previousFrame`. You only need this if optical flow is pre-computed.
+///   - submissionMode: Provides a hint to let the processor know whether you are submitting frames in presentation
+///   sequence. For more information about supported modes see ``VTSuperResolutionScalerParametersSubmissionMode``.
+///   - destinationFrame: User-allocated pixel buffer that receives the results.
 - (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
 								previousFrame:(VTFrameProcessorFrame * _Nullable)previousFrame
 						  previousOutputFrame:(VTFrameProcessorFrame * _Nullable)previousOutputFrame
@@ -245,45 +215,24 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/**
- * @property sourceFrame
- * @abstract sourceFrame Current source frame. Must be non nil
-*/
-
+/// Current source frame, which must be non `nil`.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/**
- * @property previousFrame
- * @abstract Previous source frame in presentation time order. For the first frame this will be nil.
-*/
-
+/// Previous source frame in presentation time order, which is `nil` for the first frame.
 @property(nonatomic, readonly, nullable) VTFrameProcessorFrame * previousFrame;
 
-/**
- * @property previousOutputFrame
- * @abstract Previous output frame in presentation time order. For the first frame this will be nil.
-*/
-
+/// Previous output frame in presentation time order, which is `nil` for the first frame.
 @property(nonatomic, readonly, nullable) VTFrameProcessorFrame * previousOutputFrame;
 
-/**
- * @property opticalFlow
- * @abstract Optional VTFrameProcessorOpticalFlow object that contains forward and backward optical flow with the previous frame. Only needed if optical flow is pre-computed. For the first frame this will be nil.
-*/
-
+/// Optional object that contains forward and backward optical flow with the previous frame.
+///
+/// You only need this if optical flow is pre-computed. For the first frame this is `nil`.
 @property(nonatomic, readonly, nullable) VTFrameProcessorOpticalFlow * opticalFlow;
 
-/**
- * @property submissionMode
- * @abstract A VTSuperResolutionScalerSubmissionMode value describing the processing request in this Parameters object .
-*/
+/// Ordering of the input frames in this submission relative to the previous submission.
 @property (nonatomic, readonly) VTSuperResolutionScalerParametersSubmissionMode submissionMode;
 
-/**
- * @property destinationFrame
- * @abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the results.
-*/
-
+/// Destination frame that contains user-allocated pixel buffer that receives the results.
 @property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h	2025-07-17 00:18:42
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTFrameProcessor_TemporalNoiseFilter.h	2025-07-30 00:06:36
@@ -21,129 +21,107 @@
 
 
 NS_HEADER_AUDIT_BEGIN(nullability, sendability)
-/*!
-	@class VTTemporalNoiseFilterConfiguration
-	@abstract A configuration object to initiate VTFrameProcessor and use Temporal Noise Filter processor.
 
-	@discussion
-		The class properties of VTTemporalNoiseFilterConfiguration help to identify the capabilities of Temporal Noise Filter Processor on the current platform, prior to initiating a session.
-		The availability of Temporal Noise Filter processor in the current platform can be confirmed by checking the VTTemporalNoiseFilterConfiguration.isSupported class property.
-		Verify the processor's capability to process source frames by ensuring that the dimensions are no less than VTTemporalNoiseFilterConfiguration.minimumDimensions and no greater than VTTemporalNoiseFilterConfiguration.maximumDimensions.
-		Use the instance properties such as frameSupportedPixelFormats, sourcePixelBufferAttributes, and destinationPixelBufferAttributes to ensure that the input and output pixel buffer formats and attributes of the processor align with the client's specific requirements.
-		The properties previousFrameCount and nextFrameCount represent the maximum number of preceding and subsequent reference frames, used in the processing of a source frame, to achieve optimum noise reduction quality.
- */
-
+/// A configuration object to initiate a frame processor and use temporal noise-filter processor.
+///
+/// The class properties of `VTTemporalNoiseFilterConfiguration` help to identify the capabilities of temporal noise
+/// filter processor on the current platform, prior to initiating a session. You can confirm the availability of temporal
+/// noise-filter processor in the current platform by checking the ``isSupported`` class property. Verify the processor's
+/// capability to process source frames by ensuring that the dimensions are no less than ``minimumDimensions`` and no
+/// greater than ``maximumDimensions``. Use the instance properties such as ``frameSupportedPixelFormats``,
+/// ``sourcePixelBufferAttributes``, and ``destinationPixelBufferAttributes`` to ensure that the input and output pixel
+/// buffer formats and attributes of the processor align with the client's specific requirements. The properties
+/// ``previousFrameCount`` and ``nextFrameCount`` represent the maximum number of preceding and subsequent reference
+/// frames, used in the processing of a source frame, to achieve optimum noise-reduction quality.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(tvos, watchos)
 NS_SWIFT_SENDABLE
 VT_EXPORT @interface VTTemporalNoiseFilterConfiguration : NSObject <VTFrameProcessorConfiguration>
 
 #pragma mark --- init function(s).
-/*!
-	@abstract Creates a new VTTemporalNoiseConfiguration with specified width and height.
 
-	@param frameWidth    Width of source frame in pixels.
- 
-	@param frameHeight   Height of source frame in pixels.
- */
+/// Creates a new temporal noise-processor configuration.
+///
+///	Returns nil if frameWidth, frameHeight, or sourcePixelFormat is unsupported.
+///
+/// - Parameters:
+///   - frameWidth: Width of source frame in pixels.
+///   - frameHeight: Height of source frame in pixels.
 - (nullable instancetype)initWithFrameWidth:(NSInteger)frameWidth
-								frameHeight:(NSInteger)frameHeight;
+								frameHeight:(NSInteger)frameHeight
+						  sourcePixelFormat:(OSType)sourcePixelFormat;
 
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/*!
-	@property frameWidth
-	@abstract Width of source frame in pixels.
- */
+/// Width of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameWidth;
 
-/*!
-	@property frameHeight
-	@abstract Height of source frame in pixels.
- */
+/// Height of source frame in pixels.
 @property (nonatomic, readonly) NSInteger frameHeight;
 
-/*!
-	@property frameSupportedPixelFormats
-	@abstract List of supported pixel formats for source frames.
- */
+/// Supported pixel formats for source frames for current configuration.
 @property (nonatomic, readonly) NSArray<NSNumber *> * frameSupportedPixelFormats NS_REFINED_FOR_SWIFT;
 
-/*!
-	@property sourcePixelBufferAttributes
-	@abstract Supported pixel buffer attributes for source frames.
- */
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent source frames and reference frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE sourcePixelBufferAttributes;
 
-/*!
-	@property destinationPixelBufferAttributes
-	@abstract Supported pixel buffer attributes for destination frames.
- */
+/// Pixel buffer attributes dictionary that describes requirements for pixel buffers which represent destination frames.
+///
+/// Use ``CVPixelBufferCreateResolvedAttributesDictionary`` to combine this dictionary with your pixel buffer attributes dictionary.
 @property (nonatomic, readonly) NSDictionary<NSString *, id> * NS_SWIFT_SENDABLE destinationPixelBufferAttributes;
 
-/*!
-	@property nextFrameCount
-	@abstract Maximum number of future reference frames used to process a source frame.
- */
+/// Maximum number of future reference frames that the processor can use to process a source frame.
 @property (nonatomic, readonly) NSInteger nextFrameCount;
 
-/*!
-	@property previousFrameCount
-	@abstract Maximum number of past reference frames used to process a source frame.
- */
+/// Maximum number of past reference frames that the processor can use to process a source frame.
 @property (nonatomic, readonly) NSInteger previousFrameCount;
 
-/*!
-	@property maximumDimensions
-	@abstract The maximum dimensions of a source frame, supported by the processor.
- */
+///	List of all supported pixel formats for source frames.
+@property (class, nonatomic, readonly) NSArray<NSNumber *> *supportedSourcePixelFormats NS_REFINED_FOR_SWIFT;
+
+/// The maximum dimensions of a source frame that the processor supports.
 @property (class, nonatomic, readonly) CMVideoDimensions maximumDimensions;
 
-/*!
-	@property minimumDimensions
-	@abstract The minimum dimensions of a source frame, supported by the processor.
- */
+/// The minimum dimensions of a source frame that the processor supports.
 @property (class, nonatomic, readonly) CMVideoDimensions minimumDimensions;
 
-/*!
-	@property supported
-	@abstract reports whether this processor is supported
-*/
+/// Reports whether the system supports this processor.
 @property (class, nonatomic, readonly, getter=isSupported) BOOL supported;
 
 @end
 
-
-/*!
-	@class VTTemporalNoiseFilterParameters
-	@abstract VTTemporalNoiseFilterParameters object encapsulates the frame-level parameters necessary for processing a source frame using Temporal Noise Filter processor.
-
-	@discussion
-		This object is intended for sending input parameters into the processWithParameters method of the VTFrameProcessor class.
-		Temporal Noise Filter processor utilizes past and future reference frames, provided in presentation time order, to reduce noise from the source frame. The previousFrameCount and nextFrameCount properties in VTTemporalNoiseFilterConfiguration represent the maximum number of past and future reference frames that can be used by the processor to achieve optimum noise reduction quality. The number of reference frames provided shall depend on their availability, but at a minimum, one reference frame, either past or future, must be provided.
-		The parameter destinationFrame stores the output frame that is returned to the caller upon the successful completion of the processWithParameters operation.
- */
+/// Encapsulates the frame-level parameters necessary for processing a source frame using temporal noise-filter processor.
+///
+/// This object is intended for sending input parameters into the `processWithParameters` method of the `VTFrameProcessor`
+/// class. Temporal noise-filter processor utilizes past and future reference frames, provided in presentation time order,
+/// to reduce noise from the source frame. The `previousFrameCount` and `nextFrameCount` properties in
+/// ``VTTemporalNoiseFilterConfiguration`` represent the maximum number of past and future reference frames that the
+/// processor can use to achieve optimum noise reduction quality. The number of reference frames provided shall depend
+/// on their availability, but at a minimum, you must provide one reference frame, either past or future. The parameter
+/// `destinationFrame` stores the output frame that the processor returns to the caller upon the successful completion
+/// of the `processWithParameters` operation.
 API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(tvos, watchos)
 VT_EXPORT @interface VTTemporalNoiseFilterParameters : NSObject <VTFrameProcessorParameters>
 
-/*!
-	@abstract Creates a new VTTemporalNoiseFilterParameters object.
-
-	@param sourceFrame Current source frame. Must be non nil.
-
-	@param nextFrames Future reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the nextFrameCount property in VTTemporalNoiseFilterConfiguration.
-
-	@param previousFrames Past reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the previousFrameCount property in VTTemporalNoiseFilterConfiguration.
-
-	@param destinationFrame User allocated pixel buffer that will receive the output frame. The pixel format of the destinationFrame must match with that of the sourceFrame.
-
-	@param filterStrength Used to control strength of the noise filtering. The value can range from the minimum strength of 0.0 to the maximum strength of 1.0. Change in filter strength causes the processor to flush all frames in the queue prior to processing the source frame.
-
-	@param hasDiscontinuity Marks sequence discontinuity, forcing the processor to reset prior to processing the source frame.
- */
+/// Creates a new `VTTemporalNoiseFilterParameters` object.
+///
+/// - Parameters:
+///   - sourceFrame: Current source frame; must be non `nil`.
+///   - nextFrames: Future reference frames in presentation time order to use for processing the source frame. The number
+///   of frames can vary from 0 to the number specified by ``VTTemporalNoiseFilterConfiguration/nextFrameCount`` property.
+///   - previousFrames: Past reference frames in presentation time order to use for processing the source frame. The number
+///   of frames can vary from 0 to the number specified by ``VTTemporalNoiseFilterConfiguration/previousFrameCount`` property.
+///   - destinationFrame: User-allocated pixel buffer that receives the output frame. The pixel format of `destinationFrame`
+///   must match with that of the `sourceFrame`.
+///   - filterStrength: Strength of the noise-filtering to use. The value can range from the minimum strength of 0.0 to the
+///   maximum strength of 1.0. Change in filter strength causes the processor to flush all frames in the queue prior to
+///   processing the source frame.
+///   - hasDiscontinuity: Marks sequence discontinuity, forcing the processor to reset prior to processing the source frame.
 - (nullable instancetype) initWithSourceFrame:(VTFrameProcessorFrame *)sourceFrame
-								   nextFrames:(NSArray<VTFrameProcessorFrame *> * _Nullable)nextFrames
-							   previousFrames:(NSArray<VTFrameProcessorFrame *> * _Nullable)previousFrames
+								   nextFrames:(NSArray<VTFrameProcessorFrame *> *)nextFrames
+							   previousFrames:(NSArray<VTFrameProcessorFrame *> *)previousFrames
 							 destinationFrame:(VTFrameProcessorFrame *)destinationFrame
 							   filterStrength:(float)filterStrength
 							 hasDiscontinuity:(Boolean)hasDiscontinuity;
@@ -151,40 +129,26 @@
 - (instancetype) init NS_UNAVAILABLE;
 + (instancetype) new NS_UNAVAILABLE;
 
-/*!
-	@property sourceFrame
-	@abstract Current source frame. Must be non-nil.
- */
+/// Current source frame; must be non `nil`.
 @property(nonatomic, readonly) VTFrameProcessorFrame * sourceFrame;
 
-/*!
-	@property nextFrames
-	@abstract Future reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the nextFrameCount property in VTTemporalNoiseFilterConfiguration.
- */
-@property(nonatomic, readonly, nullable) NSArray<VTFrameProcessorFrame *> * nextFrames;
+/// Future reference frames in presentation time order that you use to process the source frame.
+///
+/// The number of frames can vary from 0 to the number specified by the `nextFrameCount` property in `VTTemporalNoiseFilterConfiguration`.
+@property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> *nextFrames;
 
-/*!
-	@property previousFrames
-	@abstract Past reference frames in presentation time order to be used for processing the source frame. The number of frames can vary from 0 to the number specified by the previousFrameCount property in VTTemporalNoiseFilterConfiguration.
- */
-@property(nonatomic, readonly, nullable) NSArray<VTFrameProcessorFrame *> * previousFrames;
+/// Past reference frames in presentation time order that you use to process the source frame.
+///
+/// The number of frames can vary from 0 to the number specified by the `previousFrameCount` property in `VTTemporalNoiseFilterConfiguration`.
+@property(nonatomic, readonly) NSArray<VTFrameProcessorFrame *> *previousFrames;
 
-/*!
-	@property filterStrength
-	@abstract Parameter used to control strength of the noise filtering. The value can range from the minimum strength of 0.0 to the maximum strength of 1.0. Change in filter strength causes the processor to flush all frames in the queue prior to processing the source frame.
- */
+/// A parameter to control the strength of noise-filtering. The value can range from the minimum strength of 0.0 to the maximum strength of 1.0. Change in filter strength causes the processor to flush all frames in the queue prior to processing the source frame.
 @property (nonatomic) float filterStrength;
 
-/*!
-	@property hasDiscontinuity
-	@abstract Marks sequence discontinuity, forcing the processor to reset prior to processing the source frame.
- */
+/// A Boolean that indicates sequence discontinuity, forcing the processor to reset prior to processing the source frame.
 @property (nonatomic) BOOL hasDiscontinuity;
 
-/*!
-	@property destinationFrame
-	@abstract VTFrameProcessorFrame that contains user allocated pixel buffer that will receive the output frame.
- */
+/// Destination frame that contains a user-allocated pixel buffer that receives the output frame.
 @property(nonatomic, readonly) VTFrameProcessorFrame * destinationFrame;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h	2025-07-17 00:31:06
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSession.h	2025-07-30 00:14:07
@@ -29,77 +29,48 @@
 
 CF_IMPLICIT_BRIDGING_ENABLED
 
-/*!
-	@enum		VTMotionEstimationFrameFlags
-	@abstract	Directives for the motion estimation session and the motion estimation processor passed from the client into
-				motionEstimationFrameFlags parameter of VTMotionEstimationSessionEstimateMotionVectors.
 
-	@constant	kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer
-		A hint to the motion estimation session that the client will reuse the currentBuffer as referenceBuffer in the next call
-		to VTMotionEstimationSessionEstimateMotionVectors. Using this flag allows the motion estimation processor to make some
-		optimizations. 
-*/
+/// Flags to control processing of a frame you pass to the motion-estimation session.
 typedef CF_OPTIONS(uint32_t, VTMotionEstimationFrameFlags) {
+	/// A hint to the motion-estimation session that you are going to reuse the `currentBuffer` as `referenceBuffer` in
+	/// the next call to ``VTMotionEstimationSessionEstimateMotionVectors``. Using this flag allows the motion-estimation
+	/// processor to deliver better performance.
 	kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer = 1<<0,
 } CF_REFINED_FOR_SWIFT;
 
-/*!
-	@enum		VTMotionEstimationInfoFlags
-	@abstract	Directives for the client passed into the VTMotionEstimationOutputHandler from the
-				motion estimation session or the motion estimation processor.
-*/
+/// Directives that provide information back to you with the results of motion-estimation.
 typedef CF_OPTIONS(uint32_t, VTMotionEstimationInfoFlags) {
 	kVTMotionEstimationInfoFlags_Reserved0 = 1<<0,
 } CF_REFINED_FOR_SWIFT;
 
-/*!
-	@typedef	VTMotionEstimationSessionRef
-	@abstract	A reference to a Video Toolbox Motion Estimation Session.
-	@discussion
-		A motion estimation session supports two CVPixelBuffers of the same size and type,
-		and returns motion vectors in the form of a CVPixelBuffer. The session is a
-		reference-counted CF object. To create a motion estimation session, call
-		VTMotionEstimationSessionCreate; then you can optionally configure the session using
-		VTSessionSetProperty; then to create motion estimations, call
-		VTMotionEstimationSessionCreateMotionEstimation. When you are done with the session,
-		you should call VTMotionEstimationSessionInvalidate to tear it down and CFRelease to
-		release your object reference.
- */
+/// A reference to a Video Toolbox motion-estimation session.
+///
+/// A motion-estimation session supports two `CVPixelBuffer`s of the same size and type, and returns motion vectors in
+/// the form of a ``CVPixelBuffer``. The session is a reference-counted CF object. To create a motion-estimation session,
+/// call ``VTMotionEstimationSessionCreate``; then you can optionally configure the session using `VTSessionSetProperty`.
+/// To create motion-estimations, call ``VTMotionEstimationSessionCreateMotionEstimation``. When you are done with the
+/// session, you should call ``VTMotionEstimationSessionInvalidate`` to tear it down and ``CFRelease`` to release the
+/// session object reference.
 typedef struct CM_BRIDGED_TYPE(id) OpaqueVTMotionEstimationSession*  VTMotionEstimationSessionRef CF_REFINED_FOR_SWIFT
 	API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
-/*!
-	@function	VTMotionEstimationSessionGetTypeID
-	@abstract	Get the CFTypeID for a VTMotionEstimationSession.
-	@discussion
-		Get the CFTypeID for a VTMotionEstimationSession.
-*/
+/// Get the CoreFoundation type identifier for motion-estimation session type.
 VT_EXPORT CFTypeID VTMotionEstimationSessionGetTypeID( void ) CF_REFINED_FOR_SWIFT
 	API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
-/*!
-	@function	VTMotionEstimationSessionCreate
-	@abstract	Creates a session for creating CVPixelBuffer of motion vectors from two CVPixelBuffers.
-	@discussion
-		The function creates a session for transferring images between CVPixelBuffers.
-	@param	allocator
-		An allocator for the session.  Pass NULL to use the default allocator.
-	@param	motionVectorProcessorSelectionOptions
-		Available creation Options:
-			kVTMotionEstimationSessionCreationOption_MotionVectorSize CFNumber 16 or 4.
-				The size of the block of pixels 16x16 or 4x4. Default is 16x16.
-			kVTMotionEstimationSessionCreationOption_UseMultiPassSearch can be supplied with kCFBooleanTrue to provide higher quality motion estimation.
-				True motion achieves higher quality by running the motion estimator in multiple passes. The default is kCFBooleanFalse.
-			kVTMotionEstimationSessionCreationOption_Label CFString
-				This option assigns a label for logging and resource tracking.
-	@param	width
-		The width of frames, in pixels.
-	@param	height
-		The height of frames in pixels.
-	@param	motionEstimationSessionOut
-		Points to a variable to receive the new pixel transfer session.
-	
-*/
+/// Creates a session you use to generate a pixel buffer of motion vectors from two pixel buffers.
+///
+/// The function creates a session for computing motion vectors between two pixel buffers.
+///
+/// - Parameters:
+///   - allocator: An allocator for the session. Pass NULL to use the default allocator.
+///   - motionVectorProcessorSelectionOptions: Available creation options are:
+///     - term ``kVTMotionEstimationSessionCreationOption_MotionVectorSize``: Size of the search block.
+///     - term ``kVTMotionEstimationSessionCreationOption_UseMultiPassSearch``: Use multiple passes to detect true motion.
+///     - term ``kVTMotionEstimationSessionCreationOption_Label``: Label used for logging and resource tracking.
+///   - width: The width of frames in pixels.
+///   - height: The height of frames in pixels.
+///   - motionEstimationSessionOut: Points to a variable to receive the new motion-estimation session.
 VT_EXPORT OSStatus
 VTMotionEstimationSessionCreate(
 		CM_NULLABLE CFAllocatorRef	 allocator,
@@ -109,89 +80,68 @@
 		CM_RETURNS_RETAINED_PARAMETER CM_NULLABLE VTMotionEstimationSessionRef * CM_NONNULL motionEstimationSessionOut) CF_REFINED_FOR_SWIFT
 		API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
-/*!
-	@function	VTMotionEstimationSessionCopySourcePixelBufferAttributes
-	@abstract	Copy the expected attributes for source pixel buffers
-	@discussion
-		The function provides a cf dictionary of attributes that must be released. This is
-		routine is for clients to query the VTMotionEstimationSession for the native source
-		attributes. If a client provides an input  CVPixelBuffer that is not compatible with the
-		the attributes returned by this function, VTMotionEstimationSession will automatically
-		convert the input pixel buffer into a compatible pixel buffer for processing.
-	@param	session
-		The motion estimation session.
-	@param	attributesOut
-		Points to a variable to receive the attributes dictionary.
-	
-*/
+/// Copies the attributes for source pixel buffers expected by motion-estimation session.
+///
+/// This function provides a `CFDictionary` of attributes that you must release. Use this function to query
+/// ``VTMotionEstimationSession`` for the native source attributes. If you provide an input ``CVPixelBuffer`` that is
+/// not compatible with the attributes that this function returns, ``VTMotionEstimationSession`` automatically
+/// converts the input pixel buffer into a compatible pixel buffer for processing.
+///
+/// - Parameters:
+///   - session: The motion-estimation session.
+///   - attributesOut: Points to a variable to receive the attributes dictionary.
 VT_EXPORT OSStatus
 VTMotionEstimationSessionCopySourcePixelBufferAttributes(
 		CM_NONNULL VTMotionEstimationSessionRef	 motionEstimationSession,
 		CM_RETURNS_RETAINED_PARAMETER CM_NULLABLE CFDictionaryRef * CM_NONNULL attributesOut) CF_REFINED_FOR_SWIFT
 		API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
-/*!
-	@function	VTMotionEstimationSessionInvalidate
-	@abstract	Tears down a motion estimation session.
-    @discussion
-    	When you are done with a motion estimation session you created, call VTMotionEstimationSessionInvalidate
-    	to tear it down and then CFRelease to release your object reference. When a motion estimation session's
-		retain count reaches zero, it is automatically invalidated, but since sessions may be retained by multiple
-		parties, it can be hard to predict when this will happen. Calling VTMotionEstimationSessionInvalidate
-		ensures a deterministic, orderly teardown.
-*/
+/// Tears down a motion-estimation session.
+///
+/// When you are done with a motion-estimation session you created, call this function to tear
+/// it down and then `CFRelease` to release the session object reference. When a motion-estimation session's retain count
+/// reaches zero, the system automatically invalidates it, but because multiple parties may retain sessions, it can be
+/// hard to predict when this happens. Calling this function ensures a deterministic, orderly teardown.
 VT_EXPORT void
 VTMotionEstimationSessionInvalidate(
 		CM_NONNULL VTMotionEstimationSessionRef session ) CF_REFINED_FOR_SWIFT
 		API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
-/*!
-	@typedef	VTMotionEstimationOutputHandler
-	@abstract	Block invoked when frame processing is complete.
-	@discussion
-		When the client requests a motion estimation, the client passes in a callback block to be called
-		for that result of that request. If the VTMotionEstimationSessionCreateMotionEstimation call returns
-		an error, the block will not be called.
-	@param	status
-		noErr if processing request was successful; an error code if motion estimation was not successful.
-	@param	infoFlags
-		A bit field containing information about the processing operation.
-	@param	additionalInfo
-		Additional processing information about the processing operation that can not fit in infoFlags.
-		Currently, this is expected to be NULL.
-	@param	motionVectorPixelBuffer
-		A CVPixelBuffer containing the motion vector information, if processing request was successful;
-		otherwise, NULL.
- */
+/// A block invoked by motion-estimation session when frame processing is complete.
+///
+/// When the client requests a motion-estimation, the client passes in a callback block that the system invokes for the
+/// result of that request. If the ``VTMotionEstimationSessionCreateMotionEstimation`` call returns an error, the system
+/// does not invoke this block.
+///
+/// - Parameters:
+///   - status: `noErr` if processing request was successful; an error code if motion-estimation was not successful.
+///   - infoFlags: A bit field that contains information about the processing operation.
+///   - additionalInfo: Additional processing information about the operation that cannot fit in `infoFlags`.
+///   Currently, the system expects this to be NULL.
+///   - motionVectorPixelBuffer: A `CVPixelBuffer` that contains the motion vector information, if processing request
+///   was successful; otherwise, NULL.
 typedef void (^VTMotionEstimationOutputHandler)(
 		OSStatus status,
 		VTMotionEstimationInfoFlags infoFlags,
 		CM_NULLABLE CFDictionaryRef additionalInfo,
 		CM_NULLABLE CVPixelBufferRef motionVectors);
 
-/*!
-	@function	VTMotionEstimationSessionEstimateMotionVectors
-	@abstract	Given two CVPixelBuffers, creates a CVPixelBuffer representing the motion estimate.
-	@discussion
-		The motion estimation session will compare the reference frame to the current frame, and
-		generate motion vectors in the form of a CVPixelBuffer.
-	@param	session
-		The motion estimation session.
-	@param	referenceImage
-		The reference image.
-	@param	currentImage
-		The current image.
-	@param	motionEstimationFrameFlags
-		A bit field with per-frame options.  See kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer.
-	@param	additionalFrameOptions
-		A way to pass additional information that will not fit in motionEstimationFrameFlags; currently expected to be NULL.
-	@param	outputHandler
-		The block to be called when the processing request is completed.  If the
-		VTMotionEstimationSessionCreateMotionEstimation call returns an error, the block will not
-		be called.
-	@result
-		If the call was successful, noErr; otherwise an error code, such as kVTMotionEstimationNotSupportedErr.
-*/
+/// Creates a new pixel buffer that contains motion vectors between the input pixel buffers.
+///
+/// The motion-estimation session compares the reference frame to the current frame, and generates motion vectors in
+/// the form of a `CVPixelBuffer`.
+///
+/// - Parameters:
+///   - session: The motion-estimation session.
+///   - referenceImage: The reference image.
+///   - currentImage: The current image.
+///   - motionEstimationFrameFlags: A bit field with per-frame options. See ``kVTMotionEstimationFrameFlags_CurrentBufferWillBeNextReferenceBuffer``.
+///   - additionalFrameOptions: A way to pass additional information that doesn't fit in `motionEstimationFrameFlags`;
+///   currently the system expects it to be `NULL`.
+///   - outputHandler: The block invoked by the syetem when the processing request is completed. If the
+///   `VTMotionEstimationSessionCreateMotionEstimation` call returns an error, the system does not invoke the block.
+///
+/// - Returns: If the call was successful, returns `noErr`; otherwise, returns an error code, such as `kVTMotionEstimationNotSupportedErr`.
 VT_EXPORT OSStatus
 VTMotionEstimationSessionEstimateMotionVectors(
 		CM_NONNULL VTMotionEstimationSessionRef    session,
@@ -202,13 +152,9 @@
 		CM_NONNULL VTMotionEstimationOutputHandler outputHandler ) CF_REFINED_FOR_SWIFT
 		API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
-/*!
-	@function VTMotionEstimationSessionCompleteFrames
-	@abstract Directs the motion estimation session to emit all pending frames and waits for completion.
-	@discussion
-		Directs the motion estimation session to emit all pending frames, then waits for all outstanding
-		requests to complete, then returns.
-*/
+/// Directs the motion-estimation session to emit all pending frames and waits for completion.
+///
+/// Directs the motion-estimation session to emit all pending frames, then waits for all outstanding requests to complete, then returns.
 VT_EXPORT OSStatus
 VTMotionEstimationSessionCompleteFrames(
 		CM_NONNULL VTMotionEstimationSessionRef		session) CF_REFINED_FOR_SWIFT
@@ -216,8 +162,8 @@
 
 CF_IMPLICIT_BRIDGING_DISABLED
 
-// See VTSession.h for property access APIs on VTMotionEstimationSession.
-// See VTMotionEstimationSessionProperties.h for standard property keys and values for pixel transfer sessions.
+// For information on property access APIs on ``VTMotionEstimationSession``, see ``VTSession.h``.
+// For information on standard property keys and values for pixel transfer sessions, see ``VTMotionEstimationSessionProperties.h``.
 
 #endif // __BLOCKS__
 
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h	2025-07-17 00:27:41
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VTMotionEstimationSessionProperties.h	2025-07-30 00:08:43
@@ -23,71 +23,49 @@
 
 #pragma mark Creation Options
 
-/*!
-	@header
-	@abstract
-		Video Toolbox motion estimation session creation options
 
-	@discussion
-		These keys may be used in the motionVectorProcessorSelectionOptions parameter to 
-		VTMotionEstimationSessionCreate to configure the created session.
-*/
+/// Options you use to create motione-estimation sessions.
+///
+/// Use these keys in the ``motionVectorProcessorSelectionOptions`` parameter to ``VTMotionEstimationSessionCreate`` to
+/// configure the created session.
 
-/*!
-	@constant	kVTMotionEstimationSessionCreationOption_MotionVectorSize
-	@abstract
-		The size of the search blocks used in VTMotionEstimationSession.
-	@discussion
-		VTMotionEstimationSessionCreate takes a dictionary of creation options, motionVectorProcessorSelectionOptions.
-		kVTMotionEstimationSessionCreationOption_MotionVectorSize can be supplied with CFNumber to override the default search block size.
-		Currently supported motion vector size is 4 or 16, meaning 4x4 or 16x16 respectively. 16x16 is the default if this key is not provided.
-*/
+
+/// The size of the search blocks that motion estimation session uses.
+///
+/// ``VTMotionEstimationSessionCreate`` takes a dictionary of creation options, `motionVectorProcessorSelectionOptions`.
+/// You can supply ``kVTMotionEstimationSessionCreationOption_MotionVectorSize`` with `CFNumber` to override the default
+/// search block size. Supported motion vector size is 4 or 16, meaning 4x4 or 16x16 respectively. 16x16 is the default
+/// if you don't provide this key.
 VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_MotionVectorSize API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFNumber
 
-/*!
-	@constant	kVTMotionEstimationSessionCreationOption_UseMultiPassSearch
-	@abstract
-		An option used for higher quality motion estimation
-	@discussion
-		VTMotionEstimationSessionCreate takes a dictionary of creation options, motionVectorProcessorSelectionOptions.
-		kVTMotionEstimationSessionCreationOption_UseMultiPassSearch can be supplied with kCFBooleanTrue to provide higher quality motion estimation.
-		True motion achieves higher quality by running the motion estimator in multiple passes. The default is kCFBooleanFalse.
-*/
+/// An option to use for higher quality motion estimation.
+///
+/// ``VTMotionEstimationSessionCreate`` takes a dictionary of creation options, `motionVectorProcessorSelectionOptions`.
+/// You can supply ``kVTMotionEstimationSessionCreationOption_UseMultiPassSearch`` with `kCFBooleanTrue` to provide
+/// higher quality motion estimation. True-motion achieves higher quality by running the motion estimator in multiple
+/// passes. The default is `kCFBooleanFalse`.
 VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_UseMultiPassSearch API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFBoolean
 
-/*!
-	@constant	kVTMotionEstimationSessionCreationOption_DetectTrueMotion
-	@abstract
-		Renamed to kVTMotionEstimationSessionCreationOption_UseMultiPassSearch
-*/
-VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_DetectTrueMotion API_UNAVAILABLE(macos, ios, tvos, visionos, watchos); // Read/write, CFBoolean DEPRECATED
+/// Enable multi pass true motion detection.
+///
+/// Renamed to `kVTMotionEstimationSessionCreationOption_UseMultiPassSearch`.
+VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_DetectTrueMotion API_UNAVAILABLE(macos, ios, tvos, watchos, visionos); // Read/write, CFBoolean DEPRECATED
 
-/*!
-	@constant	kVTMotionEstimationSessionCreationOption_Label
-	@abstract
-		A label used for logging and resource tracking.
-	@discussion
-		VTMotionEstimationSessionCreate takes a dictionary of creation options, motionVectorProcessorSelectionOptions.
-		kVTMotionEstimationSessionCreationOption_Label can be supplied with CFString to specify a label used in logging and
-		resource tracking.
-*/
+/// A label you use to log and track resources.
+///
+/// ``VTMotionEstimationSessionCreate`` takes a dictionary of creation options, `motionVectorProcessorSelectionOptions`.
+/// You can supply ``kVTMotionEstimationSessionCreationOption_Label`` with `CFString` to specify a label used in logging
+/// and resource tracking.
 VT_EXPORT const CFStringRef kVTMotionEstimationSessionCreationOption_Label API_AVAILABLE(macos(26.0), ios(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos); // Read/write, CFString
 
 #pragma mark Properties
 
-/*!
-	@header
-	@abstract
-		Video Toolbox motion estimation session properties
-
-	@discussion
-		This file defines private properties used to configure motion estimation sessions after creation.  
-
-		Clients can query supported properties by calling VTSessionCopySupportedPropertyDictionary,
-		and use VTSessionSetProperty and VTSessionCopyProperty.
-*/
-
-// there are none yet
+/// Properties of motion-estimation sessions.
+///
+/// This file defines properties the you may uses to configure motion-estimation sessions after creation.
+///
+/// Clients can query supported properties by calling ``VTSessionCopySupportedPropertyDictionary``, and use
+/// ``VTSessionSetProperty`` and ``VTSessionCopyProperty``.
 
 #pragma pack(pop)
 
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.apinotes /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.apinotes
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.apinotes	2025-07-17 01:01:01
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/VideoToolbox.framework/Headers/VideoToolbox.apinotes	2025-07-30 00:01:30
@@ -33,8 +33,12 @@
     SwiftName: "VTDecompressionSessionGetTypeID()"
   - Name: VTDecompressionSessionDecodeFrame
     SwiftName: "VTDecompressionSessionDecodeFrame(_:sampleBuffer:flags:frameRefcon:infoFlagsOut:)"
+  - Name: VTDecompressionSessionDecodeFrameWithOptions
+    SwiftName: "VTDecompressionSessionDecodeFrame(_:sampleBuffer:flags:frameOptions:frameRefcon:infoFlagsOut:)"
   - Name: VTDecompressionSessionDecodeFrameWithOutputHandler
     SwiftName: "VTDecompressionSessionDecodeFrame(_:sampleBuffer:flags:infoFlagsOut:outputHandler:)"
+  - Name: VTDecompressionSessionDecodeFrameWithOptionsAndOutputHandler
+    SwiftName: "VTDecompressionSessionDecodeFrame(_:sampleBuffer:flags:frameOptions:infoFlagsOut:outputHandler:)"
   - Name: VTDecompressionSessionFinishDelayedFrames
     SwiftName: "VTDecompressionSessionFinishDelayedFrames(_:)"
   - Name: VTDecompressionSessionCanAcceptFormatDescription
Clone this wiki locally