From 38e42a192afa29707b8b89dcbba4a941b6dc6552 Mon Sep 17 00:00:00 2001 From: Nilesh Chavan Date: Fri, 14 Nov 2025 12:07:03 +0530 Subject: [PATCH] fix(UNT-T39987): recorder state update when call receive in ios --- ios/AudioRecorder.swift | 55 +++++++++++++++++++++++++--- ios/EventEmitter.swift | 2 +- ios/Utils.swift | 10 ++++- src/components/Waveform/Waveform.tsx | 12 ++++++ src/constants/index.ts | 6 +++ src/hooks/useAudioPlayer.tsx | 9 +++++ 6 files changed, 86 insertions(+), 8 deletions(-) diff --git a/ios/AudioRecorder.swift b/ios/AudioRecorder.swift index d571ca0..5f5f207 100644 --- a/ios/AudioRecorder.swift +++ b/ios/AudioRecorder.swift @@ -16,7 +16,8 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ var audioUrl: URL? var recordedDuration: CMTime = CMTime.zero private var timer: Timer? - var updateFrequency = UpdateFrequency.medium + var updateFrequency = UpdateFrequency.medium + private var isRecordingActive = false private func createAudioRecordPath(fileNameFormat: String?) -> URL? { let format = DateFormatter() @@ -65,11 +66,20 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ reject(Constants.audioWaveforms, "Failed to initialise file URL", nil) return } + + NotificationCenter.default.addObserver( + self, + selector: #selector(handleInterruption), + name: AVAudioSession.interruptionNotification, + object: AVAudioSession.sharedInstance() + ) + audioRecorder = try AVAudioRecorder(url: newPath, settings: settings as [String : Any]) audioRecorder?.delegate = self audioRecorder?.isMeteringEnabled = true audioRecorder?.record() - startListening() + isRecordingActive = true + startListening() resolve(true) } catch let error as NSError { print(error.localizedDescription) @@ -97,7 +107,9 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ } public func stopRecording(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { - stopListening() + stopListening() + isRecordingActive = false + NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil) audioRecorder?.stop() if(audioUrl != nil) { let asset = AVURLAsset(url: audioUrl!) @@ -122,14 +134,26 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ } public func pauseRecording(_ resolve: RCTPromiseResolveBlock) -> Void { - audioRecorder?.pause() + pauseActiveRecording() resolve(true) } public func resumeRecording(_ resolve: RCTPromiseResolveBlock) -> Void { - audioRecorder?.record() + resumeActiveRecording() resolve(true) } + + private func pauseActiveRecording() { + audioRecorder?.pause() + isRecordingActive = false + stopListening() + } + + private func resumeActiveRecording() { + audioRecorder?.record() + isRecordingActive = true + startListening() + } func getDecibelLevel() -> Float { audioRecorder?.updateMeters() @@ -203,5 +227,26 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ return Int(kAudioFormatMPEG4AAC) } } + + @objc private func handleInterruption(notification: Notification) { + guard let userInfo = notification.userInfo, + let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt, + let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + return + } + + switch type { + case .began: + if isRecordingActive { + pauseActiveRecording() + EventEmitter.sharedInstance.dispatch( + name: Constants.onDidFinishRecordingAudio, + body: ["finishType": RecorderFinishType.pause.rawValue] + ) + } + @unknown default: + break + } + } } diff --git a/ios/EventEmitter.swift b/ios/EventEmitter.swift index 5d0d289..9309a28 100644 --- a/ios/EventEmitter.swift +++ b/ios/EventEmitter.swift @@ -19,7 +19,7 @@ class EventEmitter { /// All Events which must be support by React Native. lazy var allEvents: [String] = { - var allEventNames: [String] = ["onDidFinishPlayingAudio", "onCurrentDuration", "onCurrentExtractedWaveformData", "onCurrentRecordingWaveformData"] + var allEventNames: [String] = ["onDidFinishPlayingAudio", "onCurrentDuration", "onCurrentExtractedWaveformData", "onCurrentRecordingWaveformData", "onDidFinishRecordingAudio"] // Append all events here diff --git a/ios/Utils.swift b/ios/Utils.swift index 9a10019..2ff54d3 100644 --- a/ios/Utils.swift +++ b/ios/Utils.swift @@ -55,7 +55,7 @@ struct Constants { static let preparePlayer = "preparePlayer" static let onCurrentDuration = "onCurrentDuration" static let currentDuration = "currentDuration" - static let currentDecibel = "currentDecibel" + static let currentDecibel = "currentDecibel" static let playerKey = "playerKey" static let stopAllPlayers = "stopAllPlayers" static let stopAllWaveFormExtractors = "stopAllWaveFormExtractors" @@ -66,7 +66,8 @@ struct Constants { static let extractWaveformData = "extractWaveformData" static let noOfSamples = "noOfSamples" static let onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData" - static let onCurrentRecordingWaveformData = "onCurrentRecordingWaveformData" + static let onCurrentRecordingWaveformData = "onCurrentRecordingWaveformData" + static let onDidFinishRecordingAudio = "onDidFinishRecordingAudio" static let waveformData = "waveformData" static let onExtractionProgressUpdate = "onExtractionProgressUpdate" static let useLegacyNormalization = "useLegacyNormalization" @@ -80,6 +81,11 @@ enum FinishMode : Int{ case stop = 2 } +enum RecorderFinishType : Int{ + case pause = 0 + case resume = 1 +} + //Note: If you are making change here, please make sure to make change in Android and React Native side as well other wise there will be mismatch in value //Use same values in Android and React native side as well enum UpdateFrequency : Double { diff --git a/src/components/Waveform/Waveform.tsx b/src/components/Waveform/Waveform.tsx index cef932b..2d81098 100644 --- a/src/components/Waveform/Waveform.tsx +++ b/src/components/Waveform/Waveform.tsx @@ -24,6 +24,7 @@ import { playbackSpeedThreshold, PlayerState, RecorderState, + RecorderFinishType, UpdateFrequency, } from '../../constants'; import { @@ -96,6 +97,7 @@ export const Waveform = forwardRef((props, ref) => { onCurrentDuration, onDidFinishPlayingAudio, onCurrentRecordingWaveformData, + onDidFinishRecordingAudio, setPlaybackSpeed, markPlayerAsUnmounted, } = useAudioPlayer(); @@ -524,10 +526,20 @@ export const Waveform = forwardRef((props, ref) => { } } ); + + const traceRecordingFinish = onDidFinishRecordingAudio(data => { + if (data.finishType === RecorderFinishType.pause) { + setRecorderState(RecorderState.paused); + } else if (data.finishType === RecorderFinishType.resume) { + setRecorderState(RecorderState.recording); + } + }); + return () => { tracePlayerState.remove(); tracePlaybackValue.remove(); traceRecorderWaveformValue.remove(); + traceRecordingFinish.remove(); markPlayerAsUnmounted(); }; // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/src/constants/index.ts b/src/constants/index.ts index 7955112..56b95ef 100644 --- a/src/constants/index.ts +++ b/src/constants/index.ts @@ -8,6 +8,7 @@ export enum NativeEvents { onCurrentDuration = 'onCurrentDuration', onCurrentExtractedWaveformData = 'onCurrentExtractedWaveformData', onCurrentRecordingWaveformData = 'onCurrentRecordingWaveformData', + onDidFinishRecordingAudio = 'onDidFinishRecordingAudio', } export enum PermissionStatus { @@ -34,6 +35,11 @@ export enum RecorderState { stopped = 'stopped', } +export enum RecorderFinishType { + pause = 0, + resume = 1, +} + //Note: If you are making change here, please make sure to make change in iOS and Android side as well other wise there will be mismatch in value //Use same values in iOS and Android side as well export enum UpdateFrequency { diff --git a/src/hooks/useAudioPlayer.tsx b/src/hooks/useAudioPlayer.tsx index c231c65..9a9140f 100644 --- a/src/hooks/useAudioPlayer.tsx +++ b/src/hooks/useAudioPlayer.tsx @@ -79,6 +79,14 @@ export const useAudioPlayer = () => { result => callback(result) ); + const onDidFinishRecordingAudio = ( + callback: (result: { finishType: number }) => void + ) => + audioPlayerEmitter.addListener( + NativeEvents.onDidFinishRecordingAudio, + result => callback(result) + ); + const setPlaybackSpeed = (args: ISetPlaybackSpeed) => AudioWaveform.setPlaybackSpeed(args); @@ -100,6 +108,7 @@ export const useAudioPlayer = () => { onCurrentExtractedWaveformData, getDuration, onCurrentRecordingWaveformData, + onDidFinishRecordingAudio, setPlaybackSpeed, markPlayerAsUnmounted, stopAllWaveFormExtractors,