diff --git a/CHANGELOG.md b/CHANGELOG.md index 76a18cab..44807f51 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 1.0.6 +- upgrade Flutter version +- minor internal restructure + ## 1.0.5 - Updated gradle file diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt b/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt index 7420e09d..e33f8de9 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt @@ -66,18 +66,18 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener { fun stopRecording(result: MethodChannel.Result, recorder: MediaRecorder?, path: String) { try { - val audioInfoArrayList = ArrayList() - + val hashMap : HashMap = HashMap() try { recorder?.stop() val duration = getDuration(path) - audioInfoArrayList.add(path) - audioInfoArrayList.add(duration) + + hashMap[Constants.resultFilePath] = path + hashMap[Constants.resultDuration] = duration } catch (e: RuntimeException) { // Stop was called immediately after start which causes stop() call to fail. - audioInfoArrayList.add(null) - audioInfoArrayList.add("-1") + hashMap[Constants.resultFilePath] = null + hashMap[Constants.resultDuration] = "-1" } recorder?.apply { @@ -85,7 +85,7 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener { release() } - result.success(audioInfoArrayList) + result.success(hashMap) } catch (e: IllegalStateException) { Log.e(LOG_TAG, "Failed to stop recording") } diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt b/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt index ac7a483f..8c6507ef 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt @@ -66,6 +66,9 @@ object Constants { const val waveformData = "waveformData" const val useLegacyNormalization = "useLegacyNormalization" const val updateFrequency = "updateFrequency" + + const val resultFilePath = "resultFilePath" + const val resultDuration = "resultDuration" } enum class FinishMode(val value:Int) { diff --git a/example/ios/Flutter/AppFrameworkInfo.plist b/example/ios/Flutter/AppFrameworkInfo.plist index 9625e105..7c569640 100644 --- a/example/ios/Flutter/AppFrameworkInfo.plist +++ b/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 11.0 + 12.0 diff --git a/ios/Classes/AudioRecorder.swift b/ios/Classes/AudioRecorder.swift index e7578ea5..77bd6d4b 100644 --- a/ios/Classes/AudioRecorder.swift +++ b/ios/Classes/AudioRecorder.swift @@ -62,26 +62,34 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ audioRecorder?.stop() if(audioUrl != nil) { let asset = AVURLAsset(url: audioUrl!) + if #available(iOS 15.0, *) { Task { do { recordedDuration = try await asset.load(.duration) - result([path,Int(recordedDuration.seconds * 1000).description]) + sendResult(result, duration: (recordedDuration.seconds * 1000).description) } catch let err { debugPrint(err.localizedDescription) - result([path,CMTime.zero.seconds.description]) + sendResult(result, duration: CMTime.zero.seconds.description) } } } else { recordedDuration = asset.duration - result([path,Int(recordedDuration.seconds * 1000).description]) + sendResult(result, duration: (recordedDuration.seconds * 1000).description) } } else { - result([path,CMTime.zero.seconds.description]) + sendResult(result, duration: CMTime.zero.seconds.description) } audioRecorder = nil } + private func sendResult(_ result: @escaping FlutterResult, duration:String){ + var params = [String:String?]() + params[Constants.resultFilePath] = path + params[Constants.resultDuration] = duration + result(params) + } + public func pauseRecording(_ result: @escaping FlutterResult) { audioRecorder?.pause() result(false) diff --git a/ios/Classes/Utils.swift b/ios/Classes/Utils.swift index 47b5368d..f9e467f4 100644 --- a/ios/Classes/Utils.swift +++ b/ios/Classes/Utils.swift @@ -60,6 +60,8 @@ struct Constants { static let onExtractionProgressUpdate = "onExtractionProgressUpdate" static let useLegacyNormalization = "useLegacyNormalization" static let updateFrequency = "updateFrequency" + static let resultFilePath = "resultFilePath" + static let resultDuration = "resultDuration" } enum FinishMode : Int{ diff --git a/lib/src/base/audio_waveforms_interface.dart b/lib/src/base/audio_waveforms_interface.dart index b06a25c3..94429ace 100644 --- a/lib/src/base/audio_waveforms_interface.dart +++ b/lib/src/base/audio_waveforms_interface.dart @@ -63,10 +63,10 @@ class AudioWaveformsInterface { } ///platform call to stop recording - Future?> stop() async { - final audioInfo = + Future> stop() async { + Map audioInfo = await _methodChannel.invokeMethod(Constants.stopRecording); - return List.from(audioInfo ?? []); + return audioInfo.cast(); } ///platform call to resume recording. diff --git a/lib/src/base/constants.dart b/lib/src/base/constants.dart index 57e0d86a..cfc3bd13 100644 --- a/lib/src/base/constants.dart +++ b/lib/src/base/constants.dart @@ -47,4 +47,6 @@ class Constants { "onCurrentExtractedWaveformData"; static const String useLegacyNormalization = "useLegacyNormalization"; static const String updateFrequency = "updateFrequency"; + static const String resultFilePath = "resultFilePath"; + static const String resultDuration = "resultDuration"; } diff --git a/lib/src/controllers/recorder_controller.dart b/lib/src/controllers/recorder_controller.dart index 0b6d8ba6..4aa98985 100644 --- a/lib/src/controllers/recorder_controller.dart +++ b/lib/src/controllers/recorder_controller.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'dart:io' show Platform; import 'dart:math' show max; +import 'package:audio_waveforms/src/base/constants.dart'; import 'package:flutter/material.dart'; import '/src/base/utils.dart'; @@ -286,24 +287,20 @@ class RecorderController extends ChangeNotifier { Future stop([bool callReset = true]) async { if (_recorderState.isRecording || _recorderState.isPaused) { final audioInfo = await AudioWaveformsInterface.instance.stop(); - if (audioInfo != null) { - _isRecording = false; - _timer?.cancel(); - _recorderTimer?.cancel(); - if (audioInfo[1] != null) { - var duration = int.tryParse(audioInfo[1]!); - if (duration != null) { - recordedDuration = Duration(milliseconds: duration); - _recordedFileDurationController.add(recordedDuration); - } + _isRecording = false; + _timer?.cancel(); + _recorderTimer?.cancel(); + if (audioInfo[Constants.resultDuration] != null) { + var duration = int.tryParse(Constants.resultDuration); + if (duration != null) { + recordedDuration = Duration(milliseconds: duration); + _recordedFileDurationController.add(recordedDuration); } - elapsedDuration = Duration.zero; - _setRecorderState(RecorderState.stopped); - if (callReset) reset(); - return audioInfo[0]; - } else { - throw "Failed stop recording"; } + elapsedDuration = Duration.zero; + _setRecorderState(RecorderState.stopped); + if (callReset) reset(); + return audioInfo[Constants.resultFilePath]; } notifyListeners(); diff --git a/pubspec.yaml b/pubspec.yaml index b4d7e28f..f0a0391c 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: audio_waveforms description: A Flutter package that allow you to generate waveform while recording audio or from audio file. -version: 1.0.5 +version: 1.0.6 homepage: https://github.com/SimformSolutionsPvtLtd/audio_waveforms issue_tracker: https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues