The documentation for AVPictureInPictureVideoCallController suggests that it works with an AVCaptureVideoPreviewLayer in addition to the AVSampleBufferDisplayLayer, however I'm having a hard time getting it to actually work. Here's my sample code:
class PIPPreviewView: UIView {
override class var layerClass: AnyClass {
get { return AVCaptureVideoPreviewLayer.self }
}
var previewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
var session: AVCaptureSession? {
get { previewLayer.session }
set { previewLayer.session = newValue }
}
}
final class PreviewViewController: UIViewController, AVPictureInPictureControllerDelegate {
private let captureSession = AVCaptureSession()
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
do {
let videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
captureSession.addInput(videoInput)
} catch {
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
previewLayer.connection?.videoOrientation = .landscapeLeft
view.layer.addSublayer(previewLayer)
let pipview = PIPPreviewView()
pipview.session = self.captureSession
let pipvc = AVPictureInPictureVideoCallViewController()
pipvc.preferredContentSize = CGSize(width: 1080, height: 1920)
pipvc.view.addSubview(pipview)
let pipcs = AVPictureInPictureController.ContentSource(activeVideoCallSourceView: pipview, contentViewController: pipvc)
let pipc = AVPictureInPictureController(contentSource: pipcs)
pipc.canStartPictureInPictureAutomaticallyFromInline = true
pipc.delegate = self
DispatchQueue.global(qos: .background).async {
self.captureSession.startRunning()
print("starting pip")
pipc.startPictureInPicture()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (!captureSession.isRunning) {
DispatchQueue.global(qos: .background).async {
self.captureSession.startRunning()
}
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession.isRunning) {
captureSession.stopRunning()
}
}
override var prefersStatusBarHidden: Bool {
return true
}
}
When I run this, the preview layer in app is black and the application doesn't trigger PIP. Has anyone gotten this to work properly?
Selecting any option will automatically load the page
Post
Replies
Boosts
Views
Activity
Is it possible to use AVAssetWriter to mux together audio and video and produce an MPEG-TS stream?
I'm constructing an AVAssetWriter like this:
private lazy var assetWriter = {
let w = AVAssetWriter(contentType: .mpeg2TransportStream)
w.shouldOptimizeForNetworkUse = true
return w
}()
and wiring it up to an AVCaptureSession, however I get the following error
[AVAssetWriter initWithContentType:] Invalid file type UTI. Available file types are: public.aiff-audio, public.3gpp, public.aifc-audio, com.apple.m4v-video, com.apple.m4a-audio, com.apple.coreaudio-format, public.mpeg-4, com.microsoft.waveform-audio, com.apple.quicktime-movie, org.3gpp.adaptive-multi-rate-audio'
which seems to suggest that I can't do this? My goal is to produce an MPEG-TS stream to send over the network from an AVCaptureSession, following something like this guide except producing MPEG-TS instead of fragmented mp4.