Post

Replies

Boosts

Views

Activity

Reply to Core Data complaining about store being opened without persistent history tracking... but I don't think that it has been
Example for NSPersistentHistoryTrackingKey: static let containerLocal: NSPersistentContainer = { let description = NSPersistentStoreDescription() description.url = SELF.storeURL description.configuration = "Default" description.setOption(true as NSNumber, forKey: NSPersistentHistoryTrackingKey) description.setOption(true as NSNumber, forKey: NSPersistentStoreRemoteChangeNotificationPostOptionKey) let container = NSPersistentContainer(name: "Model") container.persistentStoreDescriptions = [description] container.loadPersistentStores(completionHandler: { (storeDescription, error) in if let error = error as NSError? { fatalError("LoadPersistentStores() error \(error), \(error.userInfo)") } else { #if DEBUG print("DB container = Default") #endif } }) return container }()
May ’25
Reply to Removing NSPersistentHistoryTrackingKey causes error.
Example with NSPersistentHistoryTrackingKey: static let containerCloud: NSPersistentCloudKitContainer = { let description = NSPersistentStoreDescription() description.url = SELF.storeURL description.configuration = "CloudKit" description.setOption(true as NSNumber, forKey: NSPersistentHistoryTrackingKey) description.setOption(true as NSNumber, forKey: NSPersistentStoreRemoteChangeNotificationPostOptionKey) description.cloudKitContainerOptions = NSPersistentCloudKitContainerOptions(containerIdentifier: "iCloud.jsblocker") let container = NSPersistentCloudKitContainer(name: "Model") container.persistentStoreDescriptions = [description] container.loadPersistentStores(completionHandler: { (storeDescription, error) in if let error = error as NSError? { fatalError("LoadPersistentStores() error \(error), \(error.userInfo)") } else { #if DEBUG print("DB container = CloudKit") #endif } }) return container }()
May ’25
Reply to Playing files simultaneously in AVAudioEngine
There is no more perfect synchronization in nature than to merge all the tracks into one whole. This tested code makes a mix of tracks and plays the result: import SwiftUI import AVFoundation let AV_ENGINE = AVAudioEngine() #if os(iOS) let AV_SESSION = AVAudioSession.sharedInstance() #endif @main struct app: App { var body: some Scene { WindowGroup { Text("Audio Mix") } } init() { /* mix */ let mix = AVAudioEngine().mix([ AVAudioPCMBuffer.etaloneGenerate(channels: 2, size: 7000)!, AVAudioPCMBuffer.etaloneGenerate(channels: 2, size: 3000)!, AVAudioPCMBuffer.etaloneGenerate(channels: 2, size: 4000)!, ])! /* play */ let avPlayerNode = AVAudioPlayerNode() AV_ENGINE.attach(avPlayerNode) AV_ENGINE.connect( avPlayerNode, to: AV_ENGINE.mainMixerNode, format: AVAudioEngine.AV_DEFAULT_FORMAT ) if AV_ENGINE.isRunning == false { try! AV_ENGINE.start() } avPlayerNode.scheduleBuffer(mix) avPlayerNode.play() dump( mix.frameLength ) } } Extension Numeric extension Numeric { func fixBounds(min: Self = 0, max: Self) -> Self where Self: Comparable { if self < min {return min} if self > max {return max} return self } } Extension AVAudioEngine (magic is here) extension AVAudioEngine { static let AV_DEFAULT_LINEAR_PCM_BIT_DEPTH_FORMAT: AVAudioCommonFormat = .pcmFormatFloat32 static let AV_DEFAULT_SAMPLERATE: Double = 44100.0 static let AV_DEFAULT_CHANNELS: AVAudioChannelCount = 2 static let AV_DEFAULT_IS_INTERLEAVED: Bool = false static let AV_DEFAULT_FORMAT = AVAudioFormat( commonFormat: AV_DEFAULT_LINEAR_PCM_BIT_DEPTH_FORMAT, sampleRate : AV_DEFAULT_SAMPLERATE, channels : AV_DEFAULT_CHANNELS, interleaved : AV_DEFAULT_IS_INTERLEAVED )! public func mix(_ avBuffers: [AVAudioPCMBuffer], resultFormat: AVAudioFormat? = nil) -> AVAudioPCMBuffer? { do { var avPlayerNodes: [AVAudioPlayerNode] = [] var frameLength: AVAudioFrameCount = 0 for avBuffer in avBuffers { let avPlayerNode = AVAudioPlayerNode() avPlayerNodes.append(avPlayerNode) self.attach(avPlayerNode) self.connect( avPlayerNode, to: self.mainMixerNode, format: avBuffer.format ) avPlayerNode.scheduleBuffer(avBuffer) frameLength = max( frameLength, avBuffer.frameLength ) } try self.enableManualRenderingMode( AVAudioEngineManualRenderingMode.offline, format : resultFormat ?? Self.AV_DEFAULT_FORMAT, maximumFrameCount: frameLength ) try self.start() for node in avPlayerNodes { node.play() } let buffer = AVAudioPCMBuffer( pcmFormat : self.manualRenderingFormat, frameCapacity: self.manualRenderingMaximumFrameCount )! let result = AVAudioPCMBuffer( pcmFormat : resultFormat ?? Self.AV_DEFAULT_FORMAT, frameCapacity: frameLength )! result.frameLength = frameLength let renderStep = 1024 for from in stride(from: 0, to: frameLength, by: renderStep) { let renderResult = try self.renderOffline( AVAudioFrameCount(renderStep), to: buffer ) switch renderResult { case .success: result.segmentSet( buffer, from: UInt64(from), size: UInt64(renderStep) ) case .error : break case .insufficientDataFromInputNode: break case .cannotDoInCurrentContext : break default : break } } for node in avPlayerNodes { node.stop() } self.stop() return result } catch { return nil } } } Extension AVAudioPCMBuffer extension AVAudioPCMBuffer { func segmentSet(_ data: AVAudioPCMBuffer, from: UInt64 = 0, size: UInt64? = nil) { let srcFullSize = UInt64(data.frameLength) let dstFullSize = UInt64(self.frameLength) var size = size ?? srcFullSize let from = from.fixBounds(max: dstFullSize) size = size.fixBounds(max: dstFullSize - from) for i in 0 ..< UInt64((Float(size) / Float(srcFullSize)).rounded(.up)) { let sampleSize = Int(data.format.streamDescription.pointee.mBytesPerFrame) let srcPointer = UnsafeMutableAudioBufferListPointer(data.mutableAudioBufferList) let dstPointer = UnsafeMutableAudioBufferListPointer(self.mutableAudioBufferList) let size = min(srcFullSize, size - (i * srcFullSize)) let from = from + (i * srcFullSize) for (src, dst) in zip(srcPointer, dstPointer) { memcpy( dst.mData?.advanced(by: Int(from) * sampleSize), src.mData, Int(size) * sampleSize ) } } } static func etaloneGenerate(channels: UInt8 = 2, size: UInt64 = 1000) -> Self? { let avFormat = AVAudioFormat( commonFormat: AVAudioEngine.AV_DEFAULT_LINEAR_PCM_BIT_DEPTH_FORMAT, sampleRate : AVAudioEngine.AV_DEFAULT_SAMPLERATE, channels : AVAudioChannelCount(channels), interleaved : AVAudioEngine.AV_DEFAULT_IS_INTERLEAVED )! if let avBuffer = Self(pcmFormat: avFormat, frameCapacity: AVAudioFrameCount(size)) { avBuffer.frameLength = AVAudioFrameCount(size) let channelL = avBuffer.floatChannelData![0] let channelR = avBuffer.floatChannelData![1] for i in 0 ..< size { let sample: Float = Float(String(format: "%.3f", 0.001 * Float(i)))! if (channels >= 1) { channelL[Int(i) * avBuffer.stride] = (-1.0...1.0).contains(+sample) ? +sample : 0 } if (channels == 2) { channelR[Int(i) * avBuffer.stride] = (-1.0...1.0).contains(-sample) ? -sample : 0 } } return avBuffer } return nil } } How to get buffer from file: let avFile = try! AVAudioFile(forReading: FILE_URL_PIANO) let avBuffer = try! AVAudioPCMBuffer(file: avFile)!
Topic: Media Technologies SubTopic: Audio Tags:
Mar ’25
Reply to How can I optimize SwiftUI CPU load on frequent updates
Production version (horizontal version): import SwiftUI typealias Size = CGFloat extension Numeric { func fixBounds(min: Self = 0, max: Self) -> Self where Self: Comparable { if self < min {return min} if self > max {return max} return self } } @Observable final class EqState { var canvasFrameMinX: Size = 0 var canvasFrameMaxX: Size = 0 var levels: [Double] = [] } @main struct app: App { private var eqState = EqState() private let eqLevelsCount: Int = 128 private let eqLevelWidth: Size = 10.0 private let eqHeight: Size = 150 private let timeInterval: Double = 1 / 24 private var timer: Timer! var body: some Scene { WindowGroup { Equalizer( height : self.eqHeight, levelWidth: self.eqLevelWidth, state : self.eqState ) .background(.gray) .padding(.horizontal, 12) } } init() { self.eqState.levels = Array( repeating: 0.0, count: self.eqLevelsCount ) self.timer = Timer( timeInterval: self.timeInterval, repeats: true, block: self.onTimerTick ) self.timer.tolerance = 0.0 RunLoop.current.add( self.timer, forMode: .common ) } func onTimerTick(_ : Timer) { for index in 0 ..< self.eqState.levels.count { self.eqState.levels[index] = Size.random( in: 0...1 ) } } } struct Equalizer: View { var height: Size var levelWidth: Size var state: EqState var body: some View { ScrollView(.horizontal) { Canvas { context, size in for index in 0 ..< self.state.levels.count { let w = self.levelWidth let x = self.levelWidth * Size(index) if (self.state.canvasFrameMinX ... self.state.canvasFrameMaxX).contains(x) { let level = self.state.levels[index] let value = size.height * level let sliceHeight = size.height / 3 let h3 = (value - (sliceHeight * 2)).fixBounds(min: 0, max: sliceHeight) let h2 = (value - (sliceHeight * 1)).fixBounds(min: 0, max: sliceHeight) let h1 = (value - (sliceHeight * 0)).fixBounds(min: 0, max: sliceHeight) let y3 = sliceHeight * 1 - h3 let y2 = sliceHeight * 2 - h2 let y1 = sliceHeight * 3 - h1 if (h3 > 0) { context.fill(Path(CGRect(x: x, y: y3, width: w, height: h3)), with: .color(.red )) } if (h2 > 0) { context.fill(Path(CGRect(x: x, y: y2, width: w, height: h2)), with: .color(.yellow)) } if (h1 > 0) { context.fill(Path(CGRect(x: x, y: y1, width: w, height: h1)), with: .color(.green )) } } } } .frame(width: self.levelWidth * Size(self.state.levels.count)) .frame(height: self.height) } .onScrollGeometryChange(for: Bool.self) { geometry in self.state.canvasFrameMinX = geometry.bounds.minX self.state.canvasFrameMaxX = geometry.bounds.maxX return true } action: { _, _ in } } }
Topic: UI Frameworks SubTopic: SwiftUI Tags:
Feb ’25
Reply to What is a performant way to change view offset as user scrolls?
Try using LazyVStack which does not render elements until they are in the visible area. ScrollView { LazyVStack { ForEach(models) { model in CardView(model: model) } } .scrollTargetLayout() } .onScrollTargetVisibilityChange(for: Model.ID.self, threshold: 0.2) { onScreenCards in // Disable video playback for cards that are offscreen... } https://developer.apple.com/documentation/swiftui/view/onscrolltargetvisibilitychange(idtype:threshold:_:)
Topic: UI Frameworks SubTopic: SwiftUI Tags:
Feb ’25
Reply to How can I optimize SwiftUI CPU load on frequent updates
This example draws only in the visible window. The load is 10% at 128 stripes. The gain will be at 2048 stripes, where the load will decrease from 45% to 35%. It seems to me that you can't squeeze more out of this - 10% is the ceiling and this is an acceptable result for SWiftUI. You can always reduce the number of stripes - in real life you don't need that much. You can slow down the timer from 1/24 to 1/12. You can remake it to UIKit/AppKit. You can make virtual scrolling - this is when nothing scrolls, and changes occur only on the image or in any separate area. In any case, I helped both myself and you. I need exactly the same functionality and was interested in how much can be squeezed out of optimization. import SwiftUI @main struct app: App { static var ITEM_HEIGHT: CGFloat = 10.0 var equalizerState = EqualizerState() @State var canvasMinY: CGFloat = 0 @State var canvasMaxY: CGFloat = 0 var body: some Scene { WindowGroup { ScrollView(.vertical) { Canvas { context, size in for index in 0 ..< EqualizerState.MAX_ITEMS { let h = Self.ITEM_HEIGHT let y = h * CGFloat(index) if (self.canvasMinY ... self.canvasMaxY).contains(y) { let value = self.equalizerState.values[index] let wPart = size.width / 3 let wFull = size.width * value let x1 = wPart * 0 let x2 = wPart * 1 let x3 = wPart * 2 let w1 = wFull - (wPart * 0) let w2 = wFull - (wPart * 1) let w3 = wFull - (wPart * 2) if (0.00 ... 1.00).contains(value) { context.fill(Path(CGRect(x: x1, y: y, width: w1, height: h)), with: .color(.green )) } if (0.33 ... 1.00).contains(value) { context.fill(Path(CGRect(x: x2, y: y, width: w2, height: h)), with: .color(.yellow)) } if (0.66 ... 1.00).contains(value) { context.fill(Path(CGRect(x: x3, y: y, width: w3, height: h)), with: .color(.red )) } } } }.frame(height: Self.ITEM_HEIGHT * CGFloat(EqualizerState.MAX_ITEMS)) }.onScrollGeometryChange(for: Bool.self) { geometry in self.canvasMinY = geometry.bounds.minY self.canvasMaxY = geometry.bounds.maxY return true } action: { _, _ in } .frame(width: 150) .background(.gray) .padding(.vertical, 12) } } } @Observable final class EqualizerState { static public let MAX_ITEMS: Int = 128 @ObservationIgnored private var timer: Timer? = nil var values: [CGFloat] = [] init() { self.values = Array( repeating: 0.0, count: Self.MAX_ITEMS ) self.timer = Timer( timeInterval: 1 / 24, repeats: true, block: { _ in for index in 0 ..< Self.MAX_ITEMS { self.values[index] = CGFloat.random( in: 0...1 ) } } ) self.timer!.tolerance = 0.0 RunLoop.current.add( self.timer!, forMode: .common ) } }
Topic: UI Frameworks SubTopic: SwiftUI Tags:
Feb ’25
Reply to How can I optimize SwiftUI CPU load on frequent updates
I remade Canvas so that it is not in ForEach and now CPU load is no more than 10%: import SwiftUI @main struct app: App { var equalizerState = EqalizerState() static var ITEM_HEIGHT: CGFloat = 10.0 var body: some Scene { WindowGroup { ScrollView(.vertical) { Canvas { context, size in for index in 0 ..< EqalizerState.MAX_ITEMS { let value = self.equalizerState.values[index] let wPart = size.width / 3 let wFull = size.width * value let x1 = wPart * 0 let x2 = wPart * 1 let x3 = wPart * 2 let w1 = wFull - (wPart * 0) let w2 = wFull - (wPart * 1) let w3 = wFull - (wPart * 2) let y = Self.ITEM_HEIGHT * CGFloat(index) let h = Self.ITEM_HEIGHT if (0.00 ... 1.00).contains(value) { context.fill(Path(CGRect(x: x1, y: y, width: w1, height: h)), with: .color(.green )) } if (0.33 ... 1.00).contains(value) { context.fill(Path(CGRect(x: x2, y: y, width: w2, height: h)), with: .color(.yellow)) } if (0.66 ... 1.00).contains(value) { context.fill(Path(CGRect(x: x3, y: y, width: w3, height: h)), with: .color(.red )) } } }.frame(height: Self.ITEM_HEIGHT * CGFloat(EqalizerState.MAX_ITEMS)) } .frame(width: 150) .background(.gray) .padding(.vertical, 12) } } } @Observable final class EqalizerState { static public let MAX_ITEMS: Int = 128 @ObservationIgnored private var timer: Timer? = nil var values: [CGFloat] = [] init() { self.values = Array( repeating: 0.0, count: Self.MAX_ITEMS ) self.timer = Timer( timeInterval: 1 / 24, repeats: true, block: { _ in for index in 0 ..< self.values.count { self.values[index] = CGFloat.random( in: 0...1 ) } } ) self.timer!.tolerance = 0.0 RunLoop.current.add( self.timer!, forMode: .common ) } } It is advisable not to use ScrollView if you can do without it. Environment variables are very slow.
Topic: UI Frameworks SubTopic: SwiftUI Tags:
Feb ’25
Reply to How can I optimize SwiftUI CPU load on frequent updates
I am also creating a music app on SwiftUI and I also encountered a speed problem. I have rewritten your code to a modern style and here it is: import SwiftUI @main struct app: App { var peakmeterState = PeakmeterState() static var ITEM_HEIGHT: CGFloat = 10.0 var body: some Scene { WindowGroup { GeometryReader { geometry in ScrollView(.vertical) { VStack(alignment: .leading, spacing: 1) { let _ : Bool = { print("FRAME REDRAW \(self.peakmeterState.frameNum)") return true }() ForEach(0 ..< PeakmeterState.MAX_ITEMS, id: \.self) { index in let width = geometry.size.width let value = self.peakmeterState.peakValues[index] Canvas { context, size in context.fill(Path(CGRect(x: width / 3 * 0, y: 0, width: width / 3, height: Self.ITEM_HEIGHT)), with: .color(.green)) context.fill(Path(CGRect(x: width / 3 * 1, y: 0, width: width / 3, height: Self.ITEM_HEIGHT)), with: .color(.yellow)) context.fill(Path(CGRect(x: width / 3 * 2, y: 0, width: width / 3, height: Self.ITEM_HEIGHT)), with: .color(.red)) } .frame(width: width * value, height: Self.ITEM_HEIGHT) .animation(.spring(duration: 0.1), value: width * value) } } } } .frame(width: 150) .background(.gray) .padding(.vertical, 12) } } } @Observable final class PeakmeterState { static public let MAX_ITEMS: Int = 128 @ObservationIgnored private var timer: Timer? = nil @ObservationIgnored var peakValues: [CGFloat] = [] var frameNum: Int = 0 init() { self.peakValues = Array( repeating: 0.0, count: Self.MAX_ITEMS ) self.timer = Timer( timeInterval: 1 / 5, repeats: true, block: { _ in for index in 0 ..< self.peakValues.count { self.peakValues[index] = CGFloat.random( in: 0...1 ) } self.frameNum += 1 } ) self.timer!.tolerance = 0.0 RunLoop.current.add( self.timer!, forMode: .common ) } } You can reduce the load by: Use final on classes to speed up method calls. Don't modify arrays in loops in @Observable classes because it causes unnecessary repaints (set them to @ObservationIgnored and change another variable in that class after array modification). Reducing the number of elements. Making the timer slower + removing animation. Rewriting the component to UIKit / AppKit. Maybe use SpriteKit (high-performance 2D content with smooth animations to your app, or create a game with a high-level set of 2D game-based tools). In my project, even a simple cursor redraw eats up 15% of CPU power. Nothing helps - while the number of redraw frames was reduced to the minimum value.
Topic: UI Frameworks SubTopic: SwiftUI Tags:
Feb ’25
Reply to How to use Dicitonary in SwiftUI ForEach to make a List
Best way of SwiftUI ForEach key/value iteration for ANY type of Dictionary: import SwiftUI struct ComplexStruct: Hashable { let a: String let b: String } let dict = [ "key1": "value1", "key2": "value2", ] let dictComplex = [ "key1": ComplexStruct(a: "value 1.1", b: "value 1.2"), "key2": ComplexStruct(a: "value 2.1", b: "value 2.2"), ] @main struct app: App { var body: some Scene { WindowGroup { ForEach(Array(dict), id: \.key) { key, value in Text("\(key) = \(value)") } ForEach(Array(dictComplex), id: \.key) { key, value in Text("\(key) = \(value.a):\(value.b)") } } } }
Topic: UI Frameworks SubTopic: SwiftUI Tags:
Jan ’25
Reply to How to specify bit rate when writing with AVAudioFile
Full example of generation and saving: import AVFoundation import Foundation struct FMSynth { static let sampleRate = 44100.0 static let carrierFrequency: Float32 = 440.0 static let unitVelocity = Float32(2.0 * .pi / sampleRate) static let modulatorFrequency: Float32 = 679.0 static let channels: UInt32 = 1 let modulatorAmplitude: Float32 = 0.8 let carrierVelocity = carrierFrequency * unitVelocity let modulatorVelocity = modulatorFrequency * unitVelocity let samplesPerBuffer: AVAudioFrameCount = 1024 * 16 let engine = AVAudioEngine() let player = AVAudioPlayerNode() let format = AVAudioFormat( standardFormatWithSampleRate: sampleRate, channels: channels ) func generateAndPlay() { do { if let buffer = AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: samplesPerBuffer) { // generate let channelL = buffer.floatChannelData?[0] let channelR = buffer.floatChannelData?[1] var sampleTime: Float32 = 0 for sampleIndex in 0..<Int(samplesPerBuffer) { let sample = sin(carrierVelocity * sampleTime + modulatorAmplitude * sin(modulatorVelocity * sampleTime)) channelL?[sampleIndex] = sample channelR?[sampleIndex] = sample sampleTime += 1.0 } buffer.frameLength = samplesPerBuffer // save to file let settings: [String: Any] = [ AVFormatIDKey : buffer.format.settings[AVFormatIDKey] ?? kAudioFormatLinearPCM, AVNumberOfChannelsKey : buffer.format.settings[AVNumberOfChannelsKey] ?? 1, AVSampleRateKey : buffer.format.settings[AVSampleRateKey] ?? 44100, AVLinearPCMBitDepthKey: buffer.format.settings[AVLinearPCMBitDepthKey] ?? 16 ] let fileURL = URL(filePath: "/tmp/out.wav") let file = try AVAudioFile(forWriting: fileURL, settings: settings, commonFormat: .pcmFormatFloat32, interleaved: true) try file.write(from: buffer) file.close() // play engine.attach(player) engine.connect(player, to: engine.mainMixerNode, format: format) try engine.start() player.scheduleBuffer(buffer) player.play() } } catch { print("Error: \(error).") } } } let fmSynth = FMSynth() fmSynth.generateAndPlay()
Topic: Media Technologies SubTopic: Audio Tags:
Dec ’24
Reply to App Group Not working as intended after updating to macOS 15 beta.
macOS Sequoia has started to fully control security. The issue is with App Groups in xCode. The app developer in xCode should go to the section (repeat for each target): Signing & Capabilities → Targets → TARGET_NAME → App Groups → App Group in which he should specify the following: $(TeamIdentifierPrefix)myFirm.myName A fictitious ID like this does not work: myGroupID.myFirm.myName And in the program code he should specify the real group ID (in numbers) let storeDirectory = FileManager.default.containerURL( forSecurityApplicationGroupIdentifier: “0123456789.myFirm.myName” ) After recompiling the app, the problem disappears.
Topic: Privacy & Security SubTopic: General Tags:
Nov ’24
Reply to Sequoia Group Container for Mac Catalyst Apps
I had the same problem after updating to Sequoia - every time I launch the application, a window started appearing asking me to approve the action "Application wants to access the data of another application". I changed the values ​​to these in my application and everything worked: File "*.entitlements": <key>com.apple.security.application-groups</key> <array> - <string>maxrys.js-blocker.group</string> + <string>$(TeamIdentifierPrefix)maxrys.js-blocker</string> </array> File "modelDomains.swift": public class Domains: NSManagedObject { @NSManaged var name: String static let context: NSManagedObjectContext = { - let appGrpName = "maxrys.js-blocker.group" + let appGrpName = "97CZR6J379.maxrys.js-blocker" let storeDirectory = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: appGrpName)! let storeURL = storeDirectory.appendingPathComponent("Database.sqlite") let storeDescription = NSPersistentStoreDescription(url: storeURL) let container = NSPersistentContainer(name: "Model") container.persistentStoreDescriptions = [storeDescription] container.loadPersistentStores(completionHandler: { (storeDescription, error) in if let error = error as NSError? { fatalError("Unresolved error \(error), \(error.userInfo)") } }) return container.viewContext }() convenience init() { self.init(context: Domains.context) } static func selectAll(orderBy: String = #keyPath(Domains.name), ascending: Bool = true) -> [Domains] { let fetchRequest = NSFetchRequest<Domains>(entityName: "Domains") let sortDescriptorKey = NSSortDescriptor(key: orderBy, ascending: ascending) fetchRequest.sortDescriptors = [sortDescriptorKey] return try! self.context.fetch( fetchRequest ) } } You can get your Team ID here: https://developer.apple.com/account → Membership details → Team ID
Topic: App & System Services SubTopic: Core OS Tags:
Sep ’24