diff --git a/AWSTranscribeStreamingTests/AWSTranscribeStreamingSwiftTests.swift b/AWSTranscribeStreamingTests/AWSTranscribeStreamingSwiftTests.swift index 97ec35a5bf4..040ee250cb3 100644 --- a/AWSTranscribeStreamingTests/AWSTranscribeStreamingSwiftTests.swift +++ b/AWSTranscribeStreamingTests/AWSTranscribeStreamingSwiftTests.swift @@ -43,137 +43,141 @@ class AWSTranscribeStreamingSwiftTests: XCTestCase { AWSDDLog.add(AWSDDOSLogger.sharedInstance) } - func testStreamingExamples() throws { - let audioExamplesMap: [AWSTranscribeStreamingLanguageCode: String] = [ - .enUS: "hello_world", - .deDE: "guten_tag", - .esES: "hola_ mundo" - ] + func testStreamingExampleEnglish() throws { + try transcribeAudio(languageCode: .enUS, fileName: "hello_world") + } - for (languageCode, fileName) in audioExamplesMap { - let bundle = Bundle(for: AWSTranscribeStreamingSwiftTests.self) - guard let audioPath = bundle.path(forResource: fileName, ofType: "wav") else { - XCTFail("Can't find audio path: \(fileName).wav") - return - } + func testStreamingExampleDeutsche() throws { + try transcribeAudio(languageCode: .deDE, fileName: "guten_tag") + } - let audioURL = URL(fileURLWithPath: audioPath) - let audioData = try Data(contentsOf: audioURL) + func testStreamingExampleEspañol() throws { + try transcribeAudio(languageCode: .esES, fileName: "hola_mundo") + } - guard let request = AWSTranscribeStreamingStartStreamTranscriptionRequest() else { - XCTFail("request unexpectedly nil") - return - } + private func transcribeAudio(languageCode: AWSTranscribeStreamingLanguageCode, fileName: String) throws { + let bundle = Bundle(for: AWSTranscribeStreamingSwiftTests.self) + guard let audioPath = bundle.path(forResource: fileName, ofType: "wav") else { + XCTFail("Can't find audio path: \(fileName).wav") + return + } - request.languageCode = languageCode - request.mediaEncoding = .pcm - request.mediaSampleRateHertz = 24000 - - // Set up delegate and its expectations - let delegate = MockTranscribeStreamingClientDelegate() - - // Connection open/close - let webSocketIsConnected = expectation(description: "Web socket is connected") - let webSocketIsClosed = expectation(description: "Web socket is closed") - delegate.connectionStatusCallback = { status, error in - if status == .connected { - DispatchQueue.main.async { - webSocketIsConnected.fulfill() - } - } + let audioURL = URL(fileURLWithPath: audioPath) + let audioData = try Data(contentsOf: audioURL) - if status == .closed && error == nil { - DispatchQueue.main.async { - webSocketIsClosed.fulfill() - } - } - } + guard let request = AWSTranscribeStreamingStartStreamTranscriptionRequest() else { + XCTFail("request unexpectedly nil") + return + } - // Event: for this test, we expect to only receive transcriptions, not errors - let receivedFinalTranscription = expectation(description: "Received final transcription") - delegate.receiveEventCallback = { event, error in - if let error = error { - XCTFail("Unexpected error receiving event: \(error)") - return - } + request.languageCode = languageCode + request.mediaEncoding = .pcm + request.mediaSampleRateHertz = 24000 - guard let event = event else { - XCTFail("event unexpectedly nil") - return - } + // Set up delegate and its expectations + let delegate = MockTranscribeStreamingClientDelegate() - guard let transcriptEvent = event.transcriptEvent else { - XCTFail("transcriptEvent unexpectedly nil: event may be an error \(event)") - return + // Connection open/close + let webSocketIsConnected = expectation(description: "Web socket is connected") + let webSocketIsClosed = expectation(description: "Web socket is closed") + delegate.connectionStatusCallback = { status, error in + if status == .connected { + DispatchQueue.main.async { + webSocketIsConnected.fulfill() } + } - guard let results = transcriptEvent.transcript?.results else { - print("No results, waiting for next event") - return + if status == .closed && error == nil { + DispatchQueue.main.async { + webSocketIsClosed.fulfill() } + } + } - guard let firstResult = results.first else { - print("firstResult nil--possibly a partial result: \(event)") - return - } + // Event: for this test, we expect to only receive transcriptions, not errors + let receivedFinalTranscription = expectation(description: "Received final transcription") + delegate.receiveEventCallback = { event, error in + if let error = error { + XCTFail("Unexpected error receiving event: \(error)") + return + } - guard let isPartial = firstResult.isPartial as? Bool else { - XCTFail("isPartial unexpectedly nil, or cannot cast NSNumber to Bool") - return - } + guard let event = event else { + XCTFail("event unexpectedly nil") + return + } - guard !isPartial else { - print("Partial result received, waiting for next event (results: \(results))") - return - } + guard let transcriptEvent = event.transcriptEvent else { + XCTFail("transcriptEvent unexpectedly nil: event may be an error \(event)") + return + } - print("Received final transcription event (results: \(results))") - DispatchQueue.main.async { - receivedFinalTranscription.fulfill() - } + guard let results = transcriptEvent.transcript?.results else { + print("No results, waiting for next event") + return } - let callbackQueue = DispatchQueue(label: "testStreamingExample") - transcribeStreamingClient.setDelegate(delegate, callbackQueue: callbackQueue) + guard let firstResult = results.first else { + print("firstResult nil--possibly a partial result: \(event)") + return + } - // Now that we have a delegate ready to receive the "open" event, we can start the transcription request - transcribeStreamingClient.startTranscriptionWSS(request) + guard let isPartial = firstResult.isPartial as? Bool else { + XCTFail("isPartial unexpectedly nil, or cannot cast NSNumber to Bool") + return + } - wait(for: [webSocketIsConnected], timeout: AWSTranscribeStreamingSwiftTests.networkOperationTimeout) + guard !isPartial else { + print("Partial result received, waiting for next event (results: \(results))") + return + } - // Now that the web socket is connected, it is safe to proceed with streaming + print("Received final transcription event (results: \(results))") + DispatchQueue.main.async { + receivedFinalTranscription.fulfill() + } + } - let headers = [ - ":content-type": "audio/wav", - ":message-type": "event", - ":event-type": "AudioEvent" - ] + let callbackQueue = DispatchQueue(label: "testStreamingExample") + transcribeStreamingClient.setDelegate(delegate, callbackQueue: callbackQueue) - let chunkSize = 4096 - let audioDataSize = audioData.count + // Now that we have a delegate ready to receive the "open" event, we can start the transcription request + transcribeStreamingClient.startTranscriptionWSS(request) - var currentStart = 0 - var currentEnd = min(chunkSize, audioDataSize - currentStart) + wait(for: [webSocketIsConnected], timeout: AWSTranscribeStreamingSwiftTests.networkOperationTimeout) - while currentStart < audioDataSize { - let dataChunk = audioData[currentStart ..< currentEnd] - transcribeStreamingClient.send(dataChunk, headers: headers) + // Now that the web socket is connected, it is safe to proceed with streaming - currentStart = currentEnd - currentEnd = min(currentStart + chunkSize, audioDataSize) - } + let headers = [ + ":content-type": "audio/wav", + ":message-type": "event", + ":event-type": "AudioEvent" + ] - print("Sending end frame") - self.transcribeStreamingClient.sendEndFrame() + let chunkSize = 4096 + let audioDataSize = audioData.count - print("Waiting for final transcription event") - wait(for: [receivedFinalTranscription], timeout: AWSTranscribeStreamingSwiftTests.networkOperationTimeout) + var currentStart = 0 + var currentEnd = min(chunkSize, audioDataSize - currentStart) - print("Ending transcription") - transcribeStreamingClient.endTranscription() + while currentStart < audioDataSize { + let dataChunk = audioData[currentStart ..< currentEnd] + transcribeStreamingClient.send(dataChunk, headers: headers) - print("Waiting for websocket to close") - wait(for: [webSocketIsClosed], timeout: AWSTranscribeStreamingSwiftTests.networkOperationTimeout) + currentStart = currentEnd + currentEnd = min(currentStart + chunkSize, audioDataSize) } + + print("Sending end frame") + self.transcribeStreamingClient.sendEndFrame() + + print("Waiting for final transcription event") + wait(for: [receivedFinalTranscription], timeout: AWSTranscribeStreamingSwiftTests.networkOperationTimeout) + + print("Ending transcription") + transcribeStreamingClient.endTranscription() + + print("Waiting for websocket to close") + wait(for: [webSocketIsClosed], timeout: AWSTranscribeStreamingSwiftTests.networkOperationTimeout) } } diff --git a/AWSiOSSDKv2.xcodeproj/project.pbxproj b/AWSiOSSDKv2.xcodeproj/project.pbxproj index d5a4a2c636d..36db0f580bd 100644 --- a/AWSiOSSDKv2.xcodeproj/project.pbxproj +++ b/AWSiOSSDKv2.xcodeproj/project.pbxproj @@ -598,6 +598,7 @@ 5C1590172755727C00F88085 /* AWSCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = CE0D416D1C6A66E5006B91B5 /* AWSCore.framework */; }; 5C1978DD2702364800F9C11E /* AWSLocationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C1978DC2702364800F9C11E /* AWSLocationTests.swift */; }; 5C71F33F295672B8001183A4 /* guten_tag.wav in Resources */ = {isa = PBXBuildFile; fileRef = 5C71F33E295672B8001183A4 /* guten_tag.wav */; }; + 680BA6182D1626CF00D69A6C /* hola_mundo.wav in Resources */ = {isa = PBXBuildFile; fileRef = 680BA6172D1626CF00D69A6C /* hola_mundo.wav */; }; 685AA2112CDA7843008EFC7B /* AWSMQTTTimerRing.h in Headers */ = {isa = PBXBuildFile; fileRef = 685AA20F2CDA7843008EFC7B /* AWSMQTTTimerRing.h */; }; 685AA2122CDA7843008EFC7B /* AWSMQTTTimerRing.m in Sources */ = {isa = PBXBuildFile; fileRef = 685AA2102CDA7843008EFC7B /* AWSMQTTTimerRing.m */; }; 687952932B8FE2C5001E8990 /* AWSDDLog+Optional.swift in Sources */ = {isa = PBXBuildFile; fileRef = 687952922B8FE2C5001E8990 /* AWSDDLog+Optional.swift */; }; @@ -3217,6 +3218,7 @@ 5C1978DB2702364800F9C11E /* AWSLocationTests-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "AWSLocationTests-Bridging-Header.h"; sourceTree = ""; }; 5C1978DC2702364800F9C11E /* AWSLocationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AWSLocationTests.swift; sourceTree = ""; }; 5C71F33E295672B8001183A4 /* guten_tag.wav */ = {isa = PBXFileReference; lastKnownFileType = audio.wav; path = guten_tag.wav; sourceTree = ""; }; + 680BA6172D1626CF00D69A6C /* hola_mundo.wav */ = {isa = PBXFileReference; lastKnownFileType = audio.wav; path = hola_mundo.wav; sourceTree = ""; }; 685AA20F2CDA7843008EFC7B /* AWSMQTTTimerRing.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AWSMQTTTimerRing.h; sourceTree = ""; }; 685AA2102CDA7843008EFC7B /* AWSMQTTTimerRing.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AWSMQTTTimerRing.m; sourceTree = ""; }; 687952922B8FE2C5001E8990 /* AWSDDLog+Optional.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AWSDDLog+Optional.swift"; sourceTree = ""; }; @@ -5409,6 +5411,7 @@ FA53331F22D4065800BD88AF /* AWSTranscribeStreamingTests-Bridging-Header.h */, 5C71F33E295672B8001183A4 /* guten_tag.wav */, FABD9ED322D6661200BD4441 /* hello_world.wav */, + 680BA6172D1626CF00D69A6C /* hola_mundo.wav */, 178A805122B097B900B167D6 /* Info.plist */, ); path = AWSTranscribeStreamingTests; @@ -11487,6 +11490,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 680BA6182D1626CF00D69A6C /* hola_mundo.wav in Resources */, FABD9ED422D6661300BD4441 /* hello_world.wav in Resources */, 5C71F33F295672B8001183A4 /* guten_tag.wav in Resources */, );