Hi Joe,
It's involved and I have not verified i'm using all the best APIs. I made an effort to ensure that Idid not make extra buffer copies. Your implementation may have a different optimal route depending on your texture source
But this shows the essence of working with the drawable queue.
code-block
func drawNextTexture(pixelBuffer: CVPixelBuffer) {
guard let textureResource = textureResource else { return }
guard let drawableQueue = drawableQueue else { return }
guard let scalePipelineState = scalePipelineState else { return }
guard let scalePipelineDescriptor = scalePipelineDescriptor else { return }
guard let commandQueue = commandQueue else { return }
guard let textureCache = textureCache else { return }
let srcWidth = CVPixelBufferGetWidth(pixelBuffer)
let srcHeight = CVPixelBufferGetHeight(pixelBuffer)
autoreleasepool {
var drawableTry: TextureResource.Drawable?
do {
drawableTry = try drawableQueue.nextDrawable() // may stall for up to 1 second.
guard drawableTry != nil else {
return // no frame needed
}
} catch {
print("Exception obtaining drawable: \(error)")
return
}
guard let drawable = drawableTry else { return }
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
return
}
var cvMetalTextureTry: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
.bgra8Unorm_srgb, // linear color; todo try srgb
srcWidth,
srcHeight,
0,
&cvMetalTextureTry)
guard let cvMetalTexture = cvMetalTextureTry,
let sourceTexture = CVMetalTextureGetTexture(cvMetalTexture) else {
return
}
// Check if the sizes match
if srcWidth == textureResource.width && srcHeight == textureResource.height {
// Sizes match, use a blit command encoder to copy the data to the drawable's texture
if let blitEncoder = commandBuffer.makeBlitCommandEncoder() {
blitEncoder.copy(from: sourceTexture,
sourceSlice: 0,
sourceLevel: 0,
sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
sourceSize: MTLSize(width: srcWidth, height: srcHeight, depth: 1),
to: drawable.texture,
destinationSlice: 0,
destinationLevel: 0,
destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
blitEncoder.endEncoding()
}
} else {
// Sizes do not match, need to scale the source texture to fit the destination texture
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0, 0, 0, 1) // Clear to opaque black
renderPassDescriptor.colorAttachments[0].storeAction = .store
if let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) {
renderEncoder.setRenderPipelineState(scalePipelineState)
renderEncoder.setVertexBuffer(scaleVertexBuffer, offset: 0, index: 0)
renderEncoder.setVertexBuffer(scaleTexCoordBuffer, offset: 0, index: 1)
renderEncoder.setFragmentTexture(sourceTexture, index: 0)
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
renderEncoder.endEncoding()
}
}
commandBuffer.present(drawable)
commandBuffer.commit()
}
}
Good luck.