I am trying to achieve an animated gradient effect that changes values over time based on the current seconds. I am also using AVPlayer and AVMutableVideoComposition along with custom instruction and class to generate the effect. I didn't want to load any video file, but rather generate a custom video with my own set of instructions. I used Metal Compute shaders to generate the effects and make the video to be 20 seconds.
However, when I run the code, I get a frozen player with the gradient applied, but when I try to play the video, I get this warning in the console :- Visual isTranslatable: NO; reason: observation failure: noObservations
Here is the screenshot :-
My entire code :-
import AVFoundation
import Metal
class GradientVideoCompositorTest: NSObject, AVVideoCompositing {
var sourcePixelBufferAttributes: [String: Any]? = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
var requiredPixelBufferAttributesForRenderContext: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
private var renderContext: AVVideoCompositionRenderContext?
private var metalDevice: MTLDevice!
private var metalCommandQueue: MTLCommandQueue!
private var metalLibrary: MTLLibrary!
private var metalPipeline: MTLComputePipelineState!
override init() {
super.init()
setupMetal()
}
func setupMetal() {
guard let device = MTLCreateSystemDefaultDevice(),
let queue = device.makeCommandQueue(),
let library = try? device.makeDefaultLibrary(),
let function = library.makeFunction(name: "gradientShader") else {
fatalError("Metal setup failed")
}
self.metalDevice = device
self.metalCommandQueue = queue
self.metalLibrary = library
self.metalPipeline = try? device.makeComputePipelineState(function: function)
}
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
renderContext = newRenderContext
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
guard let outputPixelBuffer = renderContext?.newPixelBuffer(),
let metalTexture = createMetalTexture(from: outputPixelBuffer) else {
request.finish(with: NSError(domain: "com.example.gradient", code: -1, userInfo: nil))
return
}
var time = Float(request.compositionTime.seconds)
renderGradient(to: metalTexture, time: time)
request.finish(withComposedVideoFrame: outputPixelBuffer)
}
private func createMetalTexture(from pixelBuffer: CVPixelBuffer) -> MTLTexture? {
var texture: MTLTexture?
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .bgra8Unorm,
width: width,
height: height,
mipmapped: false
)
textureDescriptor.usage = [.shaderWrite, .shaderRead]
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
if let textureCache = createTextureCache(), let cvTexture = createCVMetalTexture(from: pixelBuffer, cache: textureCache) {
texture = CVMetalTextureGetTexture(cvTexture)
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return texture
}
private func renderGradient(to texture: MTLTexture, time: Float) {
guard let commandBuffer = metalCommandQueue.makeCommandBuffer(),
let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { return }
commandEncoder.setComputePipelineState(metalPipeline)
commandEncoder.setTexture(texture, index: 0)
var mutableTime = time
commandEncoder.setBytes(&mutableTime, length: MemoryLayout<Float>.size, index: 0)
let threadsPerGroup = MTLSize(width: 16, height: 16, depth: 1)
let threadGroups = MTLSize(
width: (texture.width + 15) / 16,
height: (texture.height + 15) / 16,
depth: 1
)
commandEncoder.dispatchThreadgroups(threadGroups, threadsPerThreadgroup: threadsPerGroup)
commandEncoder.endEncoding()
commandBuffer.commit()
}
private func createTextureCache() -> CVMetalTextureCache? {
var cache: CVMetalTextureCache?
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, metalDevice, nil, &cache)
return cache
}
private func createCVMetalTexture(from pixelBuffer: CVPixelBuffer, cache: CVMetalTextureCache) -> CVMetalTexture? {
var cvTexture: CVMetalTexture?
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
cache,
pixelBuffer,
nil,
.bgra8Unorm,
width,
height,
0,
&cvTexture
)
return cvTexture
}
}
class GradientCompositionInstructionTest: NSObject, AVVideoCompositionInstructionProtocol {
var timeRange: CMTimeRange
var enablePostProcessing: Bool = true
var containsTweening: Bool = true
var requiredSourceTrackIDs: [NSValue]? = nil
var passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
init(timeRange: CMTimeRange) {
self.timeRange = timeRange
}
}
func createGradientVideoComposition(duration: CMTime, size: CGSize) -> AVMutableVideoComposition {
let composition = AVMutableComposition()
let instruction = GradientCompositionInstructionTest(timeRange: CMTimeRange(start: .zero, duration: duration))
let videoComposition = AVMutableVideoComposition()
videoComposition.customVideoCompositorClass = GradientVideoCompositorTest.self
videoComposition.renderSize = size
videoComposition.frameDuration = CMTime(value: 1, timescale: 30) // 30 FPS
videoComposition.instructions = [instruction]
return videoComposition
}
#include <metal_stdlib>
using namespace metal;
kernel void gradientShader(texture2d<float, access::write> output [[texture(0)]],
constant float &time [[buffer(0)]],
uint2 id [[thread_position_in_grid]]) {
float2 uv = float2(id) / float2(output.get_width(), output.get_height());
// Animated colors based on time
float3 color1 = float3(sin(time) * 0.8 + 0.1, 0.6, 1.0);
float3 color2 = float3(0.12, 0.99, cos(time) * 0.9 + 0.3);
// Linear interpolation for gradient
float3 gradientColor = mix(color1, color2, uv.y);
output.write(float4(gradientColor, 1.0), id);
}
Selecting any option will automatically load the page
Post
Replies
Boosts
Views
Activity
I am trying to set up code signing for my macOS/Tauri app and I’m running into a problem with my Developer ID Application certificate in Keychain Access.
Steps I followed:
Generated a CSR on my Mac using Keychain Access → Certificate Assistant → Request a Certificate From a Certificate Authority.
Uploaded the CSR to the Apple Developer portal.
Downloaded the resulting .cer file and installed it in my login Keychain.
The certificate appears under All Items, but it does not show under My Certificates, and there is no private key attached.
What I expected:
The certificate should pair with the private key created during CSR generation and show under My Certificates, allowing me to export a .p12 file.
What I’ve tried so far:
Verified that the WWDR Intermediate Certificate is installed.
Ensured I’m on the same Mac and same login Keychain where I created the CSR.
Revoked and regenerated the certificate multiple times.
Tried importing into both login and system Keychains.
Problem:
The certificate never links with the private key and therefore cannot be used for signing.
Has anyone experienced this issue or knows why the certificate would fail to pair with the private key in Keychain Access? Any workaround or fix would be greatly appreciated.
Topic:
Code Signing
SubTopic:
Certificates, Identifiers & Profiles
Tags:
Signing Certificates
Code Signing
Developer ID