-
Notifications
You must be signed in to change notification settings - Fork 549
CoreImage iOS xcode26.0 b5
Alex Soto edited this page Aug 5, 2025
·
2 revisions
#CoreImage.framework
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIColor.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIColor.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIColor.h 2025-07-12 03:16:09
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIColor.h 2025-07-25 20:48:15
@@ -22,13 +22,13 @@
/// Many of the built-in Core Image filters have one or more `CIColor` inputs that you can set to affect the filter's
/// behavior.
///
-/// * Color Model
+/// ### Color Model
///
/// A color is defined as a N-dimensional model where each dimension's color component is represented
/// by intensity values. A color component may also be referred to as a color channel. An RGB color model, for example,
/// is three-dimensional and the red, green, and blue component intensities define each unique color.
///
-/// ## Color Space
+/// ### Color Space
///
/// A color is also defined by a color space that locates the axes of N-dimensional model within the greater
/// volume of human perceivable colors. Core Image uses `CGColorSpace` instances to specify a variety of different
@@ -36,13 +36,13 @@
/// linearly or in a non-linear perceptual curve.
/// (For more information on `CGColorSpace` see <doc://com.apple.documentation/documentation/coregraphics/cgcolorspace>)
///
-/// ## Color Range
+/// ### Color Range
///
/// Standard dynamic range (SDR) color color component values range from `0.0` to `1.0`, with `0.0`
/// representing an 0% of that component and `1.0` representing 100%. In contrast, high dynamic range (HDR) color values
/// can be less than `0.0` (for more saturation) or greater than `1.0` (for more brightness).
///
-/// ## Color Opacity
+/// ### Color Opacity
///
/// `CIColor` instances also have an alpha component, which represents the opacity of the color, with 0.0 meaning completely
/// transparent and 1.0 meaning completely opaque. If a color does not have an explicit alpha component, Core Image
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIContext.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIContext.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIContext.h 2025-07-11 22:48:45
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIContext.h 2025-07-25 22:48:42
@@ -33,15 +33,15 @@
/// The Core Image context class provides an evaluation context for Core Image processing with Metal, OpenGL, or OpenCL.
///
-/// You use `CIContext` instance to render a ``CIImage`` instance which represents a graph of image processing operations
+/// You use a `CIContext` instance to render a ``CIImage`` instance which represents a graph of image processing operations
/// which are built using other Core Image classes, such as ``CIFilter-class``, ``CIKernel``, ``CIColor`` and ``CIImage``.
/// You can also use a `CIContext` with the ``CIDetector`` class to analyze images — for example, to detect faces
/// or barcodes.
///
/// Contexts support automatic color management by performing all processing operations in a working color space.
/// This means that unless told otherwise:
-/// * all input images are color matched from the input's color space to the working space.
-/// * all renders are color matched from the working space to the destination space.
+/// * All input images are color matched from the input's color space to the working space.
+/// * All renders are color matched from the working space to the destination space.
/// (For more information on `CGColorSpace` see <doc://com.apple.documentation/documentation/coregraphics/cgcolorspace>)
///
/// `CIContext` and ``CIImage`` instances are immutable, so multiple threads can use the same ``CIContext`` instance
@@ -49,7 +49,7 @@
/// threads. Each thread must take case not to access or modify a ``CIFilter-class`` instance while it is being used by
/// another thread.
///
-/// The `CIContext` manages various internal state such `MTLCommandQueue` and caches for compiled kernels
+/// The `CIContext` manages various internal state such as `MTLCommandQueue` and caches for compiled kernels
/// and intermediate buffers. For this reason it is not recommended to create many `CIContext` instances. As a rule,
/// it recommended that you create one `CIContext` instance for each view that renders ``CIImage`` or each background task.
///
@@ -202,7 +202,7 @@
///
CORE_IMAGE_EXPORT CIContextOption const kCIContextAllowLowPower NS_AVAILABLE(10_12, 13_0);
-/// A Boolean value to specifying a client-provided name for a context.
+/// A Boolean value to specify a client-provided name for a context.
///
/// This name will be used in QuickLook graphs and the output of CI_PRINT_TREE.
///
@@ -325,16 +325,28 @@
#pragma mark - properties
-// The working color space of the CIContext
-// The property will be null if the context was created with color management disabled.
+/// The working color space of the CIContext.
+///
+/// The working color space determines the color space used when executing filter kernels.
+/// You specify a working color space using the ``kCIContextWorkingColorSpace`` option when creating a ``CIContext``.
+/// * All input images are color matched from the input's color space to the working space.
+/// * All renders are color matched from the working space to the destination space.
+///
+/// The property will be `null` if the context was created with color management disabled.
+///
@property (nullable, nonatomic, readonly) CGColorSpaceRef workingColorSpace NS_AVAILABLE(10_11,9_0);
-// The working pixel format of the CIContext used for intermediate buffers
+/// The working pixel format that the CIContext uses for intermediate buffers.
+///
+/// The working format determines the pixel format that Core Image uses to create intermediate buffers for rendering images.
+/// You specify a working pixel format using the ``kCIContextWorkingFormat`` option when creating a ``CIContext``.
+///
@property (nonatomic, readonly) CIFormat workingFormat NS_AVAILABLE(10_11,9_0);
-// A NSNumber that specifies the maximum memory footprint (in megabytes) that
-// the CIContext allocates for render tasks. Larger values could increase memory
-// footprint while smaller values could reduce performance.
+/// A number value to control the maximum memory in megabytes that the context allocates for render tasks.
+///
+/// Larger values could increase memory footprint while smaller values could reduce performance.
+///
CORE_IMAGE_EXPORT CIContextOption const kCIContextMemoryLimit NS_AVAILABLE(14_0, 17_0);
#pragma mark - render methods
@@ -480,7 +492,7 @@
/// For example, if `kCIFormatRGBX16` is specified, then the created `CGImage` will
/// be 16 bits-per-component and opaque.
/// - colorSpace: The `CGColorSpace` for the output image.
-/// This color space must have either `CGColorSpaceModel.rgb` or `CGColorSpaceModel.monochrome` and
+/// This color space must have either `CGColorSpaceModel.rgb` or `CGColorSpaceModel.monochrome`
/// and be compatible with the specified pixel format.
///
/// - Returns:
@@ -506,11 +518,11 @@
/// For example, if `kCIFormatRGBX16` is specified, then the created `CGImage` will
/// be 16 bits-per-component and opaque.
/// - colorSpace: The `CGColorSpace` for the output image.
-/// This color space must have either `CGColorSpaceModel.rgb` or `CGColorSpaceModel.monochrome` and
+/// This color space must have either `CGColorSpaceModel.rgb` or `CGColorSpaceModel.monochrome`
/// and be compatible with the specified pixel format.
/// - deferred: Controls when Core Image renders `image`.
-/// * `YES` : rendering of `image` is deferred until the created `CGImage` rendered.
-/// * `NO` : the `image` is rendered immediately.
+/// * True: rendering of `image` is deferred until the created `CGImage` rendered.
+/// * False: the `image` is rendered immediately.
///
/// - Returns:
/// Returns a new `CGImage` instance.
@@ -536,15 +548,15 @@
/// For example, if `kCIFormatRGBX16` is specified, then the created `CGImage` will
/// be 16 bits-per-component and opaque.
/// - colorSpace: The `CGColorSpace` for the output image.
-/// This color space must have either `CGColorSpaceModel.rgb` or `CGColorSpaceModel.monochrome` and
+/// This color space must have either `CGColorSpaceModel.rgb` or `CGColorSpaceModel.monochrome`
/// and be compatible with the specified pixel format.
/// - deferred: Controls when Core Image renders `image`.
-/// * `YES` : rendering of `image` is deferred until the created `CGImage` rendered.
-/// * `NO` : the `image` is rendered immediately.
+/// * True: rendering of `image` is deferred until the created `CGImage` rendered.
+/// * False: the `image` is rendered immediately.
/// - calculateHDRStats: Controls if Core Image calculates HDR statistics.
-/// * `YES` : Core Image will immediately render `image`, calculate the HDR statistics
+/// * True: Core Image will immediately render `image`, calculate the HDR statistics
/// and create a `CGImage` that has the calculated values.
-/// * `NO` : the created `CGImage` will not have any HDR statistics.
+/// * False: the created `CGImage` will not have any HDR statistics.
///
/// - Returns:
/// Returns a new `CGImage` instance.
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFeature.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFeature.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFeature.h 2025-07-13 23:38:32
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFeature.h 2025-07-25 22:37:12
@@ -17,7 +17,7 @@
///
/// > Note: In macOS 10.13, iOS 11, and tvOS 11 or later, the Vision framework replaces these classes
/// for identifying and analyzing image features.
-/// See <doc://com.apple.documentation/documentation/vision/vnbservation>)
+/// See <doc://com.apple.documentation/documentation/vision/vnobservation>)
///
/// A `CIFeature` object represents a portion of an image that a detector believes matches its criteria.
/// Subclasses of CIFeature holds additional information specific to the detector that discovered the feature.
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFilter.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFilter.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFilter.h 2025-07-11 23:47:05
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIFilter.h 2025-07-25 22:24:13
@@ -237,6 +237,7 @@
CORE_IMAGE_EXPORT NSString * const kCIInputRadius1Key NS_AVAILABLE(16_0, 19_0);
/// A key to get or set the geometric angle value of a Core Image filter. Typically the angle is in radians.
+///
/// The value for this key needs to be an `NSNumber` instance.
CORE_IMAGE_EXPORT NSString * const kCIInputAngleKey NS_AVAILABLE(10_5, 7_0);
@@ -261,6 +262,7 @@
CORE_IMAGE_EXPORT NSString * const kCIInputIntensityKey NS_AVAILABLE(10_5, 7_0);
/// A key to get or set the scalar EV value of a Core Image filter that specifies how many F-stops brighter or darker to make the image.
+///
/// The value for this key needs to be an `NSNumber` instance.
CORE_IMAGE_EXPORT NSString * const kCIInputEVKey NS_AVAILABLE(10_5, 7_0);
@@ -300,10 +302,12 @@
CORE_IMAGE_EXPORT NSString * const kCIInputContrastKey NS_AVAILABLE(10_5, 7_0);
/// A key to get or set the boolean behavior of a Core Image filter that specifies if the filter should extrapolate a table beyond the defined range.
+///
/// The value for this key needs to be an `NSNumber` instance.
CORE_IMAGE_EXPORT NSString * const kCIInputExtrapolateKey NS_AVAILABLE(16_0, 19_0);
/// A key to get or set the boolean behavior of a Core Image filter that specifies if the filter should operate in linear or perceptual colors.
+///
/// The value for this key needs to be an `NSNumber` instance.
CORE_IMAGE_EXPORT NSString * const kCIInputPerceptualKey NS_AVAILABLE(16_0, 19_0);
@@ -323,6 +327,7 @@
CORE_IMAGE_EXPORT NSString * const kCIInputWeightsKey NS_AVAILABLE(10_11, 9_0);
/// A key to get or set the gradient map image of a Core Image filter that maps luminance to a color with alpha.
+///
/// The value for this key needs to be a 1 pixel tall ``CIImage`` instance.
CORE_IMAGE_EXPORT NSString * const kCIInputGradientImageKey NS_AVAILABLE(10_5, 9_0);
@@ -337,6 +342,7 @@
CORE_IMAGE_EXPORT NSString * const kCIInputMatteImageKey NS_AVAILABLE(10_14, 12_0);
/// A key to get or set the environment map image of a Core Image filter that maps normal directions to a color with alpha.
+///
/// The value for this key needs to be a ``CIImage`` instance.
CORE_IMAGE_EXPORT NSString * const kCIInputShadingImageKey NS_AVAILABLE(10_5, 9_0);
@@ -372,21 +378,26 @@
CORE_IMAGE_EXPORT NSString * const kCIInputVersionKey NS_AVAILABLE(10_11, 6_0);
/// An enum string type that your code can use to select different System Tone Mapping modes.
+///
/// These options are consistent with the analogous options available in Core Graphics,
/// Core Animation, AppKit, UIKit, and SwiftUI,
/// In Core Image, this option can be set on the `CISystemToneMap` filter.
typedef NSString * CIDynamicRangeOption NS_TYPED_ENUM;
-/// Standard dynamic range.
+/// Use Standard dynamic range.
+///
/// Images with `contentHeadroom` metadata will be tone mapped to a maximum pixel value of 1.0.
CORE_IMAGE_EXPORT CIDynamicRangeOption const kCIDynamicRangeStandard NS_AVAILABLE(16_0, 19_0);
/// Use extended dynamic range, but brightness is modulated to optimize for
-/// co-existence with other composited content. For best results, images should
-/// contain `contentAverageLightLevel` metadata.
+/// co-existence with other composited content.
+///
+/// For best results, images should contain `contentAverageLightLevel` metadata.
CORE_IMAGE_EXPORT CIDynamicRangeOption const kCIDynamicRangeConstrainedHigh NS_AVAILABLE(16_0, 19_0);
-/// Use High dynamic range. Provides the best HDR quality. This needs to be reserved
+/// Use High dynamic range.
+///
+/// The provides the best HDR quality and needs to be reserved
/// for situations where the user is focused on the media, such as larger views in
/// an image editing/viewing app, or annotating/drawing with HDR colors
CORE_IMAGE_EXPORT CIDynamicRangeOption const kCIDynamicRangeHigh NS_AVAILABLE(16_0, 19_0);
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIImageProcessor.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIImageProcessor.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIImageProcessor.h 2025-07-11 22:48:46
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIImageProcessor.h 2025-07-25 22:49:46
@@ -35,12 +35,56 @@
/// Accelerate vImage operations, or your own CPU-based image-processing routines—with a Core Image filter chain.
///
/// Your custom image processing operation is invoked by your subclassed image processor kernel's
-/// ``processWithInputs:arguments:output:error:`` method. The method can accept zero, one or more inputs.
+/// ``processWithInputs:arguments:output:error:`` method. The method can accept zero, one or more `input` objects.
/// Processors that generate imagery (such as a noise or pattern generator) need no inputs, while kernels that
-/// composite source images together require multiple inputs. The arguments dictionary allows the caller to pass in
-/// additional parameter values (such as the radius of a blur) and the output contains the destination for your
+/// composite source images together require multiple inputs. The `arguments` dictionary allows the caller to pass in
+/// additional parameter values (such as the radius of a blur) and the `output` contains the destination for your
/// image processing code to write to.
///
+/// The following code shows how you can subclass `CIImageProcessorKernel` to apply the Metal Performance Shader
+/// <doc://com.apple.documentation/documentation/metalperformanceshaders/mpsimagethresholdbinary> kernel to a ``CIImage``:
+///
+/// ```swift
+/// class ThresholdImageProcessorKernel: CIImageProcessorKernel {
+/// override class func process(with inputs: [CIImageProcessorInput]?, arguments: [String : Any]?, output: CIImageProcessorOutput) throws {
+/// guard
+/// let commandBuffer = output.metalCommandBuffer,
+/// let input = inputs?.first,
+/// let sourceTexture = input.metalTexture,
+/// let destinationTexture = output.metalTexture,
+/// let thresholdValue = arguments?["thresholdValue"] as? Float else {
+/// return
+/// }
+///
+/// let threshold = MPSImageThresholdBinary(
+/// device: commandBuffer.device,
+/// thresholdValue: thresholdValue,
+/// maximumValue: 1.0,
+/// linearGrayColorTransform: nil)
+///
+/// threshold.encode(
+/// commandBuffer: commandBuffer,
+/// sourceTexture: sourceTexture,
+/// destinationTexture: destinationTexture)
+/// }
+/// }
+/// ```
+///
+/// To apply to kernel to an image, the calling side invokes the image processor's `apply(withExtent:inputs:arguments:)`
+/// method. The following code generates a new ``CIImage`` object named `result` which contains a thresholded version of
+/// the source image, `inputImage`.
+///
+/// ```swift
+/// let result = try? ThresholdImageProcessorKernel.apply(
+/// withExtent: inputImage.extent,
+/// inputs: [inputImage],
+/// arguments: ["thresholdValue": 0.25])
+/// ```
+///
+/// > Important: Core Image will concatenate kernels in a render into as fewer programs as possible, avoiding the creation
+/// of intermediate buffers. However, it is unable to do this with image processor kernels. To get the best performance,
+/// you should use `CIImageProcessorKernel` objects only when your algorithms can't be expressed as a ``CIKernel``.
+///
/// ## Subclassing Notes
///
/// The `CIImageProcessorKernel` class is abstract; to create a custom image processor, you define a subclass of this class.
@@ -70,10 +114,14 @@
/// Override this class method to implement your Core Image Processor Kernel subclass.
///
-/// The class method will be called to produce the requested region of the output image
-/// given the required regions of the input images and other arguments.
+/// When a `CIImage` containing your `CIImageProcessorKernel` class is rendered, your class' implementation of
+/// this method will be called as needed for that render. The method may be called more than once if Core Image
+/// needs to tile to limit memory usage.
+///
+/// When your implementation of this class method is called, use the provided `inputs` and `arguments` objects
+/// to return processed pixel data to Core Image via `output`.
///
-/// > Important: this is a class method you cannot use or capture any state by accident.
+/// > Important: this is a class method so that you cannot use or capture any state by accident.
/// All the parameters that affect the output results must be passed to
/// ``applyWithExtent:inputs:arguments:error:``.
///
@@ -99,7 +147,7 @@
///
/// The default implementation would return outputRect.
///
-/// > Important: this is a class method you cannot use or capture any state by accident.
+/// > Important: this is a class method so that you cannot use or capture any state by accident.
/// All the parameters that affect the output results must be passed to
/// ``applyWithExtent:inputs:arguments:error:``.
///
@@ -123,7 +171,7 @@
/// * as CoreImage prepares for a render, this method will be called for each input to return an ROI tile array.
/// * as CoreImage performs the render, the method ``processWithInputs:arguments:output:error:`` will be called once for each tile.
///
-/// > Important: this is a class method you cannot use or capture any state by accident.
+/// > Important: this is a class method so that you cannot use or capture any state by accident.
/// All the parameters that affect the output results must be passed to
/// ``applyWithExtent:inputs:arguments:error:``.
///
@@ -222,10 +270,14 @@
///
/// This supports 0, 1, 2 or more input images and 2 or more output images.
///
-/// The class method will be called to produce the requested region of the output images
-/// given the required regions of the input images and other arguments.
+/// When a `CIImage` containing your `CIImageProcessorKernel` class is rendered, your class' implementation of
+/// this method will be called as needed for that render. The method may be called more than once if Core Image
+/// needs to tile to limit memory usage.
+///
+/// When your implementation of this class method is called, use the provided `inputs` and `arguments` objects
+/// to return processed pixel data to Core Image via multiple `outputs`.
///
-/// > Important: this is a class method you cannot use or capture any state by accident.
+/// > Important: this is a class method so that you cannot use or capture any state by accident.
/// All the parameters that affect the output results must be passed to
/// ``applyWithExtent:inputs:arguments:error:``.
///
@@ -263,7 +315,7 @@
NS_AVAILABLE(16_0, 19_0);
/// Call this method on your multiple-output Core Image Processor Kernel subclass
-/// to create an `NSArray` of new ``CIImage``s given the specified `NSArray` of extents.
+/// to create an array of new image objects given the specified array of extents.
///
/// The inputs and arguments will be retained so that your subclass can be called when the image is drawn.
///
@@ -294,6 +346,26 @@
@end
+/// Your app does not define classes that adopt this protocol; Core Image provides an object of this type
+/// when rendering a custom image processor you create with a ``CIImageProcessorKernel`` subclass.
+///
+/// When a `CIImage` containing your `CIImageProcessorKernel` class is rendered, your
+/// ``CIImageProcessorKernel/processWithInputs:arguments:output:error:`` class method will be called as
+/// needed for that render. The method may be called more than once if Core Image needs to tile to
+/// limit memory usage.
+///
+/// When your image processor class method is called, use the provided `CIImageProcessorInput` object to
+/// access the image data and supporting information to perform your custom image processing routine.
+/// For example, if you process the image using a Metal shader, use the `metalTexture` property to bind the
+/// image as an input texture. Or, if you process the image using a CPU-based routine, use the `baseAddress`
+/// property to access pixel data in memory.
+///
+/// You should use the input's `region` property to determine which portion of the input image is available
+/// to be processed.
+///
+/// To finish setting up or performing your image processing routine, use the provided ``CIImageProcessorOutput``
+/// object to return processed pixel data to Core Image.
+///
NS_CLASS_AVAILABLE(10_12, 10_0)
@protocol CIImageProcessorInput
@@ -314,12 +386,12 @@
#if COREIMAGE_SUPPORTS_IOSURFACE
-/// An input `IOSurface` that your Core Image Processor Kernel can read from.
+/// An input surface object that your Core Image Processor Kernel can read from.
/// > Warning: This surface must not be modified by the ``CIImageProcessorKernel``.
@property (nonatomic, readonly) IOSurfaceRef surface;
#endif
-/// An input `CVPixelBuffer` that your Core Image Processor Kernel can read from.
+/// An input pixel buffer object that your Core Image Processor Kernel can read from.
/// > Warning: This buffer must not be modified by the ``CIImageProcessorKernel``.
@property (nonatomic, readonly, nullable) CVPixelBufferRef pixelBuffer;
@@ -333,21 +405,43 @@
/// This digest will change if the graph of the input changes in any way.
@property (nonatomic, readonly) uint64_t digest NS_AVAILABLE(13_0, 16_0);
-/// This property tell processors that implement ``/CIImageProcessorKernel/roiTileArrayForInput:arguments:outputRect:``
-/// which input tile index is being processed.
+/// This property tells a tiled-input processor which input tile index is being processed.
///
+/// This property is only relevant if your processor implements ``/CIImageProcessorKernel/roiTileArrayForInput:arguments:outputRect:``
+///
/// This can be useful if the processor needs to clear the ``CIImageProcessorOutput`` before the first tile is processed.
@property (nonatomic, readonly) NSUInteger roiTileIndex NS_AVAILABLE(14_0, 17_0);
-/// This property tell processors that implement ``/CIImageProcessorKernel/roiTileArrayForInput:arguments:outputRect:``
-/// how many input tiles will be processed.
+/// This property tells a tiled-input processor how many input tiles will be processed.
///
+/// This property is only relevant if your processor implements ``/CIImageProcessorKernel/roiTileArrayForInput:arguments:outputRect:``
+///
/// This can be useful if the processor needs to do work ``CIImageProcessorOutput`` after the last tile is processed.
@property (nonatomic, readonly) NSUInteger roiTileCount NS_AVAILABLE(14_0, 17_0);
@end
+/// Your app does not define classes that adopt this protocol; Core Image provides an object of this type
+/// when rendering a custom image processor you create with a ``CIImageProcessorKernel`` subclass.
+///
+/// When a `CIImage` containing your `CIImageProcessorKernel` class is rendered, your
+/// ``CIImageProcessorKernel/processWithInputs:arguments:output:error:`` class method will be called as
+/// needed for that render. The method may be called more than once if Core Image needs to tile to
+/// limit memory usage.
+///
+/// When your image processor class method is called, use the provided `CIImageProcessorOutput` object to return
+/// processed pixel data to Core Image. For example, if you process the image using a Metal shader, bind the `metalTexture`
+/// property as an attachment in a render pass or as an output texture in a compute pass. Or, if you process the image
+/// using a CPU-based routine, write processed pixel data to memory using the `baseAddress` pointer.
+///
+/// You should use the output's `region` property to determine which portion of the output image needs to be processed.
+/// Your code should fill the entirety of the `region`. This includes setting to zero any pixels in the `region` that
+/// are outside the extent passed extent `applyWithExtent:inputs:arguments:error:`.
+///
+/// > Important: You must provide rendered output using only one of the following properties of the output:
+/// `baseAddress`, `surface`, `pixelBuffer`, `metalTexture`.
+///
NS_CLASS_AVAILABLE(10_12, 10_0)
@protocol CIImageProcessorOutput
@@ -367,18 +461,18 @@
#if COREIMAGE_SUPPORTS_IOSURFACE
-/// An output `IOSurface` that your Core Image Processor Kernel can write to.
+/// An output surface object that your Core Image Processor Kernel can write to.
@property (nonatomic, readonly) IOSurfaceRef surface;
#endif
-/// An output `CVPixelBuffer` that your Core Image Processor Kernel can write to.
+/// An output pixelBuffer object that your Core Image Processor Kernel can write to.
@property (nonatomic, readonly, nullable) CVPixelBufferRef pixelBuffer;
-/// A `MTLTexture` object that can be bound for output using Metal.
+/// A Metal texture object that can be bound for output using Metal.
@property (nonatomic, readonly, nullable) id<MTLTexture> metalTexture;
-/// Returns a `MTLCommandBuffer` that can be used for encoding commands.
+/// Returns a Metal command buffer object that can be used for encoding commands.
@property (nonatomic, readonly, nullable) id<MTLCommandBuffer> metalCommandBuffer;
/// A 64-bit digest that uniquely describes the contents of the output of a processor.
diff -ruN /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIRenderDestination.h /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIRenderDestination.h
--- /Applications/Xcode_26.0.0-beta4.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIRenderDestination.h 2025-07-12 03:16:09
+++ /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/CoreImage.framework/Headers/CIRenderDestination.h 2025-07-25 22:24:13
@@ -184,7 +184,7 @@
// This is false by default.
@property BOOL blendsInDestinationColorSpace;
-/// Tell the next using this destination to capture a Metal trace.
+/// Tell the next render using this destination to capture a Metal trace.
///
/// If this property is set to a file-based URL, then the next render using this
/// destination will capture a Metal trace, deleting any existing file if present.