Reputation: 6477
I have the following two contexts:
private lazy var firstContext: CIContext = {
let options:[CIContextOption:Any] = [CIContextOption.cacheIntermediates: false];
return CIContext(mtlDevice: self.device!, options: options)
}()
private lazy var secondContext: CIContext = {
let options:[CIContextOption:Any] = [CIContextOption.cacheIntermediates: false, CIContextOption.outputColorSpace: NSNull(), CIContextOption.workingColorSpace: NSNull()];
return CIContext(mtlDevice: self.device!, options: options)
}()
public var useFirstContext = true
private var context: CIContext {
return useFirstContext ? firstContext : secondContext
}
And my MTKView
is configured as follows:
metalLayer = self.layer as? CAMetalLayer
metalLayer?.wantsExtendedDynamicRangeContent = true
metalLayer.colorspace = CGColorSpace(name: CGColorSpace.itur_2100_HLG)
colorPixelFormat = .bgr10a2Unorm
When I render incoming pixel buffers using ciContext, I see different outputs on MTKView
with the two contexts. There is no filter chain configured, except for rescaling and transforming CIImage. Why is the output different with the two contexts when incoming buffers are in the same color space? Here is the drawing code:
func drawCIImage(_ ciImage:CIImage?) {
guard let image = ciImage,
let currentDrawable = currentDrawable,
let commandBuffer = commandQueue?.makeCommandBuffer()
else {
return
}
let drawableSize = self.drawableSize
let scaleX = drawableSize.width / image.extent.width
let scaleY = drawableSize.height / image.extent.height
let scaledImage = image.transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY))
let filteredImage = scaledImage
let destination = CIRenderDestination(width: Int(drawableSize.width),
height: Int(drawableSize.height),
pixelFormat: self.colorPixelFormat,
commandBuffer: commandBuffer,
mtlTextureProvider: { () -> MTLTexture in
return currentDrawable.texture
})
_ = try? context.startTask(toRender: filteredImage, to: destination)
commandBuffer.present(currentDrawable)
commandBuffer.commit()
}
Upvotes: 0
Views: 68