Hi,
I am trying to export the following SwiftUI View to PDF using UIGraphicsPDFRenderer.
The problem I am getting is there is somehow a margin on top of the exported PDF.
The code I am using
import SwiftUI
struct ContentView: View {
var body: some View {
let pressGesture = TapGesture()
.onEnded { value in
let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let outputFileURL = documentDirectory.appendingPathComponent("SwiftUI.pdf")
let pdfVC = UIHostingController(rootView: self)
let pageSize = CGSize(width: 2.625 * 72.0, height: 1.0 * 72.0)
pdfVC.view.frame = CGRect(origin: .zero, size: pageSize)
let rootVC = UIApplication.shared.windows.first?.rootViewController
rootVC?.addChild(pdfVC)
rootVC?.view.insertSubview(pdfVC.view, at: 0)
let pdfRenderer = UIGraphicsPDFRenderer(bounds: CGRect(x: 0, y: 0, width: pageSize.width, height: pageSize.height))
DispatchQueue.main.async {
do {
try pdfRenderer.writePDF(to: outputFileURL, withActions: { (context) in
context.beginPage()
pdfVC.view.layer.render(in: context.cgContext)
})
print("wrote file to: (outputFileURL.path)")
pdfVC.removeFromParent()
pdfVC.view.removeFromSuperview()
} catch {
print("Could not create PDF file: (error.localizedDescription)")
}
}
}
VStack(){
Text("Hello")
}
.frame(width: 2.625 * 72.0, height: 1.0 * 72.0, alignment: .center)
.gesture(pressGesture)
.background(Color.red)
}
}
May I know what am I doing wrong? Thanks in advance.
Selecting any option will automatically load the page
Post
Replies
Boosts
Views
Activity
Hi,
Does anyone know the specs of the front camera and LiDar for VisionPro?
Hi,
Does anyone have any ideas how to save a generated scene/mesh in RealityView to USDZ?
And is there any way to save animations?
Is there a way to move a Rigged Character with its Armature Bones in ARKit/RealityKit?
I am trying to do this
When I try to move using JointTransform the usdz robot provided in
https://developer.apple.com/documentation/arkit/arkit_in_ios/content_anchors/capturing_body_motion_in_3d
It gives me the following:
I see the documentation on Character Rigging etc. But is the movement through armature bones only available through a third party software. Or can it be done in Reality Kit/Arkit/RealityView?
https://developer.apple.com/documentation/arkit/arkit_in_ios/content_anchors/rigging_a_model_for_motion_capture
In visionOS simulator, a ContactPicker for Multiple contacts selection is shown without the Done button. Can I assume this behavior will be OK on an actual Vision Pro? I could not get a list of contacts to be selected.
On iOS, the Done button is shown ok as follows:
import ContactsUI
import Combine
struct ContactPickerView: View {
@State private var pickedNumber: String?
@StateObject private var coordinator = Coordinator()
var body: some View {
VStack {
Button("Open Contact Picker") {
openContactPicker()
}
.padding()
Text(pickedNumber ?? "")
.padding()
}
.onReceive(coordinator.$pickedNumber, perform: { phoneNumber in
self.pickedNumber = phoneNumber
})
.environmentObject(coordinator)
}
func openContactPicker() {
let contactPicker = CNContactPickerViewController()
contactPicker.delegate = coordinator
let scenes = UIApplication.shared.connectedScenes
let windowScenes = scenes.first as? UIWindowScene
let window = windowScenes?.windows.first
window?.rootViewController?.present(contactPicker, animated: true, completion: nil)
}
class Coordinator: NSObject, ObservableObject, CNContactPickerDelegate {
@Published var pickedNumber: String?
func contactPicker(_ picker: CNContactPickerViewController, didSelect contacts: [CNContact]){
print(contacts)
contacts.forEach { contact in
for number in contact.phoneNumbers {
let phoneNumber = number.value
print("number is = \(phoneNumber)")
}
}
}
}
}
We have an app that is saving UIColor through NSKeyedArchiver.archivedData to a file.
Now we are trying to port the app to a Mac and will like to load the UIColor. But on a Mac, we are using NSColor. Are there any recommendations of what I can do?
Topic:
UI Frameworks
SubTopic:
SwiftUI
Anyone can advise a newbie how to apply a displacement texture map image to a PyshicallyBaseMaterial in RealityKit?
I applied the diffuse texture to PyshicallyBaseMaterial.BaseColor, roughness texture to PyshicallyBaseMaterial.Roughness, normal texture to PyshicallyBaseMaterial.Normal but have no idea what to do with a displacement texture.
I understand that a
Company’s hardware vendor ID
is required for requesting Entitlements for DriverKit. Does this means DriverKit is only targeted for hardware vendors?
Does Apple allows third party (not the hardware vendor) entitlements for DriverKit? Our company is exploring the development of generic open source drivers on iPad for label printers such as those by Zebra. Is this allowed?
I am porting an existing app to iOS16. It contains a NavigationView that looks like the following:
var body: some View {
NavigationView {
VStack {
Text("Main Content")
.frame(height: 400)
NavigationView {
NavigationLink(destination:
Text("Navigation Detail")
)
{
Text("Navigation Link")
}
.navigationTitle("Properties Frame")
.navigationBarTitleDisplayMode(.inline)
}.navigationViewStyle(.stack)
}
.navigationTitle("Main Frame")
.navigationBarTitleDisplayMode(.inline)
}
.navigationViewStyle(.stack)
}
Clicking on Navigation Link gives the following (as originally intended) :
When the above is ported to NavigationStack
var body: some View {
NavigationStack {
VStack {
Text("Main Content")
.frame(height: 400)
NavigationStack {
NavigationLink(destination:
Text("Navigation Detail")
)
{
Text("Navigation Link")
}
.navigationTitle("Properties Frame")
.navigationBarTitleDisplayMode(.inline)
}
}
.navigationTitle("Main Frame")
.navigationBarTitleDisplayMode(.inline)
}
}
It gives a full screen Properties Frame when clicking on Navigation Link:
Will be grateful for any suggestions on how I can achieve the same behavior (half screen Properties Frame) as NavigationView in NavigationStack. Leaving the code as NavigationView (Deprecated) in iOS16 does still work.
Or does it means, I should move away from the above half screen design. It is still useful to see the changes to the Main Content when changing the properties.
We are hoping to port our app which requires printing to visionOS. Can I check if visionOS supports AirPrint/printing? If anyone has already done it, anything we should watch out for?
I have the following TaskExecutor code in Swift 6 and is getting the following error:
//Error
Passing closure as a sending parameter risks causing data races between main actor-isolated code and concurrent execution of the closure.
May I know what is the best way to approach this?
This is the default code generated by Xcode when creating a Vision Pro App using Metal as the Immersive Renderer.
Renderer
@MainActor
static func startRenderLoop(_ layerRenderer: LayerRenderer, appModel: AppModel) {
Task(executorPreference: RendererTaskExecutor.shared) { //Error
let renderer = Renderer(layerRenderer, appModel: appModel)
await renderer.startARSession()
await renderer.renderLoop()
}
}
final class RendererTaskExecutor: TaskExecutor {
private let queue = DispatchQueue(label: "RenderThreadQueue", qos: .userInteractive)
func enqueue(_ job: UnownedJob) {
queue.async {
job.runSynchronously(on: self.asUnownedSerialExecutor())
}
}
func asUnownedSerialExecutor() -> UnownedTaskExecutor {
return UnownedTaskExecutor(ordinary: self)
}
static let shared: RendererTaskExecutor = RendererTaskExecutor()
}
Hi,
I enabled Mac Catalyst (macOS12.1) for an app, set Optimise Interface for Mac with the following code. The code can be compiled with "Build Succeeded" and can be run successfully on an M1 Mac. However the editor will flag out the following:
'horizontalSizeClass' is unavailable in macOS
and with build time issues. If I use UIViewControllerRepresentable it will give
Cannot find type 'UIViewControllerRepresentable' in scope
The app does run ok. Am I missing something?
import SwiftUI
struct ContentView: View {
@Environment(.horizontalSizeClass) var horizontalSizeClass: UserInterfaceSizeClass?
var body: some View {
if horizontalSizeClass == .compact {
Text("Compact")
} else {
Text("Regular")
}
}
}
Thanks in advance.
Hi,
I have trouble mixing Core Image CIKernel with Surface Shader/Geometry Modifier in the same project. I am getting the following error in my surface shader when I compile with -fcikernel option.
RealityFoundation.CustomMaterialError.geometryModifierFunctionNotFound
To simulate the problem:
Compile Sample from Apple
Altering RealityKit Rendering with Shader Functions
https://developer.apple.com/documentation/realitykit/altering_realitykit_rendering_with_shader_functions
Set
Metal Compiler - Build Options
Other Metal Compiler Flags -fcikernel
Metal Linker - Build Options
Other Metal Linker Flags -fcikernel
RealityView->SetupRobots
RealityFoundation.CustomMaterialError.geometryModifierFunctionNotFound
Any suggestions will be great. Thank you in advance.
The following RealityView ModelEntity animated text works in visionOS 1.0. In visionOS 2.0, when running the same piece of code, the model entity move duration does not seem to work. Are there changes to the way it works that I am missing? Thank you in advance.
RealityView { content in
let textEntity = generateMovingText()
content.add(textEntity)
_ = try? await arkitSession.run([worldTrackingProvider])
} update: { content in
guard let entity = content.entities.first(where: { $0.name == .textEntityName}) else { return }
if let pose = worldTrackingProvider.queryDeviceAnchor(atTimestamp: CACurrentMediaTime()) {
entity.position = .init(
x: pose.originFromAnchorTransform.columns.3.x,
y: pose.originFromAnchorTransform.columns.3.y,
z: pose.originFromAnchorTransform.columns.3.z
)
}
if let modelEntity = entity as? ModelEntity {
let rotation = Transform(rotation: simd_quatf(angle: -.pi / 6, axis: [1, 0, 0])) // Adjust angle as needed
modelEntity.transform = Transform(matrix: rotation.matrix * modelEntity.transform.matrix)
let animationDuration: Float = 60.0 // Adjust the duration as needed
let moveUp = Transform(scale: .one, translation: [0, 2, 0])
modelEntity.move(to: moveUp, relativeTo: modelEntity, duration: TimeInterval(animationDuration), timingFunction: .linear)
}
}
The source is available at the following:
https://github.com/Sebulec/crawling-text
Hi,
I have install Xcode Beta 2. Creating a VisionOS app and running it gives me the following error. I have submitted an issue to Apple Feedback.
Anyone faces the same issue and have a quick workaround for me? Thanks.
Unable to boot the Simulator.
Domain: NSPOSIXErrorDomain
Code: 60
Failure Reason: launchd failed to respond.
User Info: {
DVTErrorCreationDateKey = "2023-06-22 00:13:12 +0000";
IDERunOperationFailingWorker = "_IDEInstalliPhoneSimulatorWorker";
Session = "com.apple.CoreSimulator.SimDevice.133D85C4-93F9-4BC7-9F7F-3AD80E15DCD5";
}
Failed to start launchd_sim: could not bind to session, launchd_sim may have crashed or quit responding
Domain: com.apple.SimLaunchHostService.RequestError
Code: 4
Event Metadata: com.apple.dt.IDERunOperationWorkerFinished : {
"device_model" = "RealityDevice14,1";
"device_osBuild" = "1.0 (21N5165g)";
"device_platform" = "com.apple.platform.xrsimulator";
"dvt_coredevice_version" = "325.3";
"dvt_mobiledevice_version" = "1643.0.15.505.1";
"launchSession_schemeCommand" = Run;
"launchSession_state" = 1;
"launchSession_targetArch" = arm64;
"operation_duration_ms" = 10345;
"operation_errorCode" = 60;
"operation_errorDomain" = NSPOSIXErrorDomain;
"operation_errorWorker" = "_IDEInstalliPhoneSimulatorWorker";
"operation_name" = IDERunOperationWorkerGroup;
"param_consoleMode" = 0;
"param_debugger_attachToExtensions" = 0;
"param_debugger_attachToXPC" = 1;
"param_debugger_type" = 3;
"param_destination_isProxy" = 0;
"param_destination_platform" = "com.apple.platform.xrsimulator";
"param_diag_MainThreadChecker_stopOnIssue" = 0;
"param_diag_MallocStackLogging_enableDuringAttach" = 0;
"param_diag_MallocStackLogging_enableForXPC" = 1;
"param_diag_allowLocationSimulation" = 1;
"param_diag_checker_tpc_enable" = 1;
"param_diag_gpu_frameCapture_enable" = 0;
"param_diag_gpu_shaderValidation_enable" = 0;
"param_diag_gpu_validation_enable" = 0;
"param_diag_memoryGraphOnResourceException" = 0;
"param_diag_queueDebugging_enable" = 1;
"param_diag_runtimeProfile_generate" = 0;
"param_diag_sanitizer_asan_enable" = 0;
"param_diag_sanitizer_tsan_enable" = 0;
"param_diag_sanitizer_tsan_stopOnIssue" = 0;
"param_diag_sanitizer_ubsan_stopOnIssue" = 0;
"param_diag_showNonLocalizedStrings" = 0;
"param_diag_viewDebugging_enabled" = 1;
"param_diag_viewDebugging_insertDylibOnLaunch" = 1;
"param_install_style" = 0;
"param_launcher_UID" = 2;
"param_launcher_allowDeviceSensorReplayData" = 0;
"param_launcher_kind" = 0;
"param_launcher_style" = 0;
"param_launcher_substyle" = 0;
"param_runnable_appExtensionHostRunMode" = 0;
"param_runnable_productType" = "com.apple.product-type.application";
"param_testing_launchedForTesting" = 0;
"param_testing_suppressSimulatorApp" = 0;
"param_testing_usingCLI" = 0;
"sdk_canonicalName" = "xrsimulator1.0";
"sdk_osVersion" = "1.0";
"sdk_variant" = xrsimulator;
}
System Information
macOS Version 14.0 (Build 23A5276g)
Xcode 15.0 (22181.22) (Build 15A5161b)
Timestamp: 2023-06-22T08:13:12+08:00