import SwiftUI
import TipKit
struct ChatRoomView: View {
@StateObject private var socketManager = SocketIOManager()
@State private var inputText: String = ""
@StateObject var viewModel = SignInWithAppleViewModel()
@Binding var isCall: Bool
@State private var isSheet = false
@State private var ShowView = false
var learnlisttip = KeyTip()
@Binding var showShareSheet: Bool
@Binding var codeshar: String
var body: some View {
NavigationStack{
VStack {
if let roomCode = socketManager.roomCode {
ZStack{
VStack{
HStack{
Text("Room Key: \(roomCode)")
.font(.title)
.onAppear{
codeshar = roomCode
self.isCall = true
}
Button(action:{
self.showShareSheet = true
}, label:{
Image(systemName: "square.and.arrow.up.fill")
.accessibilityLabel("Share")
})
}
.padding(20)
TipView(learnlisttip, arrowEdge: .top)
.glassBackgroundEffect()
.offset(z: 20)
Spacer()
}
List(socketManager.messages, id: \.self) { message in
Text(message)
}
TextField("input", text: $inputText)
Button("send") {
socketManager.sendMessage(roomCode: roomCode, message: inputText)
inputText = ""
}
}
.sheet(isPresented: $showShareSheet) {
let shareContent = "Open SpatialCall, Join this Room, Key is: \(codeshar)"
ActivityView(activityItems: [shareContent])
}
} else {
HStack{
Button(action:{
withAnimation{
socketManager.createRoom()
}
}, label: {
VStack{
Image(systemName: "phone.circle.fill")
.symbolRenderingMode(.multicolor)
.symbolEffect(.appear, isActive: !ShowView)
.font(.largeTitle)
Text("Add Room")
.font(.title3)
}
})
.buttonStyle(.borderless)
.buttonBorderShape(.roundedRectangle)
.padding(.horizontal, 30)
.glassBackgroundEffect()
.offset(z: 20)
.scaleEffect(1.5)
.padding(60)
Button(action:{
withAnimation{
self.isSheet = true
}
}, label: {
VStack{
Image(systemName: "phone.badge.checkmark")
.symbolRenderingMode(.multicolor)
.symbolEffect(.appear, isActive:!ShowView)
.font(.largeTitle)
Text("Join Room")
.font(.title3)
}
})
.buttonStyle(.borderless)
.buttonBorderShape(.roundedRectangle)
.padding(.horizontal, 30)
.glassBackgroundEffect()
.offset(z: 20)
.scaleEffect(1.5)
.padding(70)
}
}
}
.onAppear {
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) {
withAnimation {
self.ShowView = true
}
}
}
.sheet(isPresented: $isSheet){
VStack{
Text("Join Room")
.font(.largeTitle)
Text("You need to get the key to the room.")
TextField("Key", text: $inputText)
.padding(30)
.textFieldStyle(.roundedBorder)
Button(action:{
socketManager.joinRoom(roomCode: inputText)
self.isSheet = false
}, label: {
Text("Join Room")
.font(.title3)
})
.padding(50)
}
.padding()
}
.sheet(isPresented: $socketManager.showRoomNotFoundAlert) {
Text("The room does not exist. Please check whether the Key you entered is correct.")
.font(.title)
.frame(width: 500)
.padding()
Button(action:{
self.socketManager.showRoomNotFoundAlert = false
}, label: {
Text("OK")
.font(.title3)
})
.padding()
}
}
}
}
In the above code (this is a visionOS project), when I click Share, it can't display Sheet normally, and TipView can't be displayed either. Why?
Selecting any option will automatically load the page
Post
Replies
Boosts
Views
Created
In visionOS project, How does the model in RealityView track a certain part of the body, such as left foot, right leg, left arm, etc., and store the position information of the model moving in the following part in a variable (in order to realize the transmission of body part information in the call).
Please let me know if Apple publishes a document about the Persona virtual camera.
In RealityView, I use AnchorEntity to lock to Wall to run normally:
AnchorEntity(.plane(.vertical, classification: .wall, minimumBounds: .one))
But I use a location other than wall but I can't display View. Why is that?
I found in App Store Connect that on the 18th (Vision Pro pre-sale day), my visionOS App was purchased by a person in the United States, but this day was only pre-sale. No one really got Vision Pro, and this app is only compatible with visionOS. May I ask why?
Topic:
Programming Languages
SubTopic:
Swift
Tags:
Swift
App Store Connect API
App Submission
visionOS
Apple launched the Developer Strap on the developer's official website, which allows developers to connect Vision Pro and Mac with USB-C, but I can't get it in China, so I want to ask developers who already have Vision Pro or People who understand these questions: Can Vision Pro without Developer Strap be debugged wirelessly? If so, How is the experience of unlimited debugging? and how does it compare with wired connection? Thank you!
We can use AnchorEmpty to fix RealityView in one place, such as a wall. Now I hope that it can not only be fixed in one place, but also fill the whole place with RealityView by stretching. For example: My RealityView is a picture, which is now anchored to the wall, and ARKit will stretch the picture according to the size of the wall. How to achieve the whole wall? Thank you!
In order to improve App sales, I hope to promote App. At present, I only know from the VisionOS App Submission Guide that I can get the opportunity to become an editor's choice to promote the App by sharing my APP. Is there any other way to promote VisionOS App?
Topic:
App Store Distribution & Marketing
SubTopic:
App Store Connect API
Tags:
App Store Connect API
App Store Server Library
visionOS
In visionOS, I want show a 3D Content, I can use RealityView or Mode3D, But the effect they achieve is similar. What is the difference between them and which one to use for users?
I hope to send my income to my Apple Cash (Apple Cash can display the virtual card number in iOS 17.4 Beta). Is this okay?
If you can, how to fill in questions such as ABA Routeing Number in Agreements, Tax, and Banking? And will Apple Cash give me interest (I only have Apple Cash)
Topic:
App Store Distribution & Marketing
SubTopic:
App Store Connect
Tags:
Wallet
App Store Connect
Analytics & Reporting
I want to test the in-app purchase in visionOS Simulator. I logged in to the Sandbox account in Simulator and was prompted to send the verification code to the phone number I wrote in the previous step. I entered the verification code accurately, but I couldn't log in normally. It was wet many times.
Topic:
App & System Services
SubTopic:
StoreKit
Tags:
StoreKit Test
In-App Purchase
visionOS
StoreKit
Please treat me as a beginner of Unity.
Now I want to learn to develop visionOS VR App through unity. I try to find a relatively complete route and start learning, but Unity's official website does not have much explanation for visionOS VR App, so I hope you can give me a comparison. The whole route, thank you!
Topic:
Graphics & Games
SubTopic:
General
Tags:
Games
Apple Unity Plug-Ins
WWDC23 Community
visionOS
I saw onnoffitacation in the Behavior configuration of Reality Composer pro, which asked me to enter the Nofficatition name, that is to say, this requires swift in Xcode to send a message. There is a message name in the message, so I hope you can write an list for me how to use Swift in Xcode to send a message containing the message name.
Topic:
Spatial Computing
SubTopic:
Reality Composer Pro
Tags:
SwiftUI
RealityKit
Reality Composer Pro
visionOS
How to display the user's own persona in a view
When I wanted to call the Reality Composer Pro scene containing Object Tracking, I tried the following code:
RealityView { content in
if let model = try? await Entity(named: "Scene", in: realityKitContentBundle) {
content.add(model)
}
}
Obviously, this is wrong. We need to add some configurations that can enable Object Tracking to Reality View. What do we need to add?
I followed the WWDC video to learn Sharplay. I understood the first creation of seats, but I couldn't learn some of the following content very well, so I hope you can give me a list code. The contents are as follows:
I have already taken a seat.
struct TeamSelectionTemplate: SpatialTemplate {
let elements: [any SpatialTemplateElement] = [
.seat(position: .app.offsetBy(x: 0, z: 4)),
.seat(position: .app.offsetBy(x: 1, z: 4)),
.seat(position: .app.offsetBy(x: -1, z: 4)),
.seat(position: .app.offsetBy(x: 2, z: 4)),
.seat(position: .app.offsetBy(x: -2, z: 4)),
]
}
I hope you can give me a SharePlay Button. After pressing it, it will assign all users in Facetime to a seat with elements quantified in TeamSelectionTemplate. Thank you very much.