I’m making an attempt to do batch Transcription of audio recordsdata exported from Voice Memos, and I’m working into an attention-grabbing subject. If I solely transcribe a single file it really works each time, but when I attempt to batch it, solely the final one works, and the others fail with No speech detected
. I assumed it should be one thing about concurrency, so I banged round and eventually requested right here about methods to power synchronous processing. And with a mocked up unit of labor, all the things appeared good. So I added the transcription in, and
- It nonetheless fails on all however the final file. This occurs if I’m processing 10 recordsdata or simply 2.
- It not processes so as, any file could be the final one which succeeds. And it appears to not be associated to file measurement. I’ve had paragraph sized notes end final, but in addition a single quick sentence that finishes final. I left the mocked
processFiles()
for reference.
import Speech
import SwiftUI
struct ContentView: View {
@State personal var processing: Bool = false
@State personal var fileNumber: String?
@State personal var fileName: String?
@State personal var recordsdata: [URL] = []
let locale = Locale(identifier: "en-US")
let recognizer: SFSpeechRecognizer?
init() {
self.recognizer = SFSpeechRecognizer(locale: self.locale)
}
var physique: some View {
VStack {
if recordsdata.rely > 0 {
ZStack {
ProgressView()
Textual content(fileNumber ?? "-")
.daring()
}
Textual content(fileName ?? "-")
} else {
Picture(systemName: "folder.badge.minus")
Textual content("No audio recordsdata discovered")
}
}
.onAppear {
recordsdata = getFiles()
Activity {
await processFiles()
}
}
}
personal func getFiles() -> [URL] {
do {
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let path = documentsURL.appendingPathComponent("Voice Memos").absoluteURL
let contents = attempt FileManager.default.contentsOfDirectory(at: path, includingPropertiesForKeys: nil, choices: [])
let recordsdata = (contents.filter {$0.pathExtension == "m4a"}).sorted { url1, url2 in
url1.path < url2.path
}
return recordsdata
}
catch {
print(error.localizedDescription)
return []
}
}
personal func processFiles() async {
var fileCount = recordsdata.rely
for file in recordsdata {
fileNumber = String(fileCount)
fileName = file.lastPathComponent
await processFile(file)
fileCount -= 1
}
}
// personal func processFile(_ url: URL) async {
// let seconds = Double.random(in: 2.0...10.0)
// await withCheckedContinuation { continuation in
// DispatchQueue.most important.asyncAfter(deadline: .now() + seconds) {
// continuation.resume()
// print("(url.lastPathComponent) (seconds)")
// }
// }
// }
personal func processFile(_ url: URL) async {
let recognitionRequest = SFSpeechURLRecognitionRequest(url: url)
recognitionRequest.requiresOnDeviceRecognition = false
recognitionRequest.shouldReportPartialResults = false
await withCheckedContinuation { continuation in
recognizer?.recognitionTask(with: recognitionRequest) { (transcriptionResult, error) in
guard transcriptionResult != nil else {
print("(url.lastPathComponent.uppercased())")
print(error?.localizedDescription ?? "")
return
}
if ((transcriptionResult?.isFinal) == true) {
if let finalText: String = transcriptionResult?.bestTranscription.formattedString {
print("(url.lastPathComponent.uppercased())")
print(finalText)
}
}
}
continuation.resume()
}
}
}