Post

Replies

Boosts

Views

Activity

How to convert response text to speech using AVSpeechSynthesizer
I have this code below that uses CHATGPT API in order to provide a response to the users input. Whilst the response is being printed out character by character it disables the users input. (This is just so you guys get an idea of what this code is.) I want to use AVSpeechSynthesizer in order to read the response aloud however I am not familiar with this and how to develop this. Please can you help me find a way using AVSpeechSynthesizer in order to read the response aloud. Here is the code: import SwiftUI import OpenAISwift final class ViewModel: ObservableObject { init() {} private var client: OpenAISwift? @Published var isChatting = false func setup() { client = OpenAISwift(authToken: "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") } func send(text: String, completion: @escaping (String) -> Void) { client?.sendCompletion(with: text, maxTokens: 500, completionHandler: { result in switch result { case .success(let model): let output = model.choices?.first?.text ?? "" completion(output) case .failure: break } }) } } struct ContentView: View { @ObservedObject var viewModel = ViewModel() @State var text = "" @State var models = [String]() @State var textfieldEnabled = true var body: some View { VStack(alignment: .leading) { ForEach(models, id: \.self) { string in if string.starts(with: "User") { Text(string) .foregroundColor(.white) .fontWeight(.bold) } else { Text(string) .foregroundColor(.white) .fontWeight(.bold) } } Spacer() HStack { TextField("User Input", text: $text) .disabled(!textfieldEnabled) .padding(.bottom, 8) // Add padding to bottom of textfield Button(action: { send() }) { Image(systemName: "camera.filters") }.font(.system(size: 32)) .disabled(!textfieldEnabled) } .padding(.bottom, 10) Button { //self.returnhomeFromConverter.toggle() }label: { Text("Return") .fontWeight(.bold) .foregroundColor(.white) .frame(maxWidth: .infinity) .padding(.vertical) .background{ RoundedRectangle(cornerRadius: 12, style: .continuous) .fill(.clear) } } } .preferredColorScheme(.dark) .onAppear { viewModel.setup() } .padding() } func send() { guard !text.isEmpty && !viewModel.isChatting else { return } viewModel.isChatting = true models.append("User: \(text)") textfieldEnabled = false viewModel.send(text: text) { response in DispatchQueue.main.async { self.models.removeLast() self.models.append("ChatGPT: ") var index = response.startIndex Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { timer in guard index < response.endIndex else { timer.invalidate() viewModel.isChatting = false textfieldEnabled = true return } self.models[self.models.count - 1] += "\(response[index])" index = response.index(after: index) } } } text = "" } } struct ContentView_Previews: PreviewProvider { static var previews: some View { ContentView() } }
1
1
650
Mar ’23