Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update return type of result when user stop the audio player #283

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 1.0.6
- upgrade Flutter version
- minor internal restructure

## 1.0.5

- Updated gradle file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,26 +66,26 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener {

fun stopRecording(result: MethodChannel.Result, recorder: MediaRecorder?, path: String) {
try {
val audioInfoArrayList = ArrayList<String?>()

val hashMap : HashMap<String,Any?> = HashMap()
try {
recorder?.stop()

val duration = getDuration(path)
audioInfoArrayList.add(path)
audioInfoArrayList.add(duration)

hashMap[Constants.resultFilePath] = path
hashMap[Constants.resultDuration] = duration
} catch (e: RuntimeException) {
// Stop was called immediately after start which causes stop() call to fail.
audioInfoArrayList.add(null)
audioInfoArrayList.add("-1")
hashMap[Constants.resultFilePath] = null
hashMap[Constants.resultDuration] = "-1"
}

recorder?.apply {
reset()
release()
}

result.success(audioInfoArrayList)
result.success(hashMap)
} catch (e: IllegalStateException) {
Log.e(LOG_TAG, "Failed to stop recording")
}
Expand Down
3 changes: 3 additions & 0 deletions android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@ object Constants {
const val waveformData = "waveformData"
const val useLegacyNormalization = "useLegacyNormalization"
const val updateFrequency = "updateFrequency"

const val resultFilePath = "resultFilePath"
const val resultDuration = "resultDuration"
}

enum class FinishMode(val value:Int) {
Expand Down
2 changes: 1 addition & 1 deletion example/ios/Flutter/AppFrameworkInfo.plist
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@
<key>CFBundleVersion</key>
<string>1.0</string>
<key>MinimumOSVersion</key>
<string>11.0</string>
<string>12.0</string>
</dict>
</plist>
16 changes: 12 additions & 4 deletions ios/Classes/AudioRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -62,26 +62,34 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{
audioRecorder?.stop()
if(audioUrl != nil) {
let asset = AVURLAsset(url: audioUrl!)

if #available(iOS 15.0, *) {
Task {
do {
recordedDuration = try await asset.load(.duration)
result([path,Int(recordedDuration.seconds * 1000).description])
sendResult(result, duration: (recordedDuration.seconds * 1000).description)
} catch let err {
debugPrint(err.localizedDescription)
result([path,CMTime.zero.seconds.description])
sendResult(result, duration: CMTime.zero.seconds.description)
}
}
} else {
recordedDuration = asset.duration
result([path,Int(recordedDuration.seconds * 1000).description])
sendResult(result, duration: (recordedDuration.seconds * 1000).description)
}
} else {
result([path,CMTime.zero.seconds.description])
sendResult(result, duration: CMTime.zero.seconds.description)
}
audioRecorder = nil
}

private func sendResult(_ result: @escaping FlutterResult, duration:String){
var params = [String:String?]()
params[Constants.resultFilePath] = path
params[Constants.resultDuration] = duration
result(params)
}

public func pauseRecording(_ result: @escaping FlutterResult) {
audioRecorder?.pause()
result(false)
Expand Down
2 changes: 2 additions & 0 deletions ios/Classes/Utils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ struct Constants {
static let onExtractionProgressUpdate = "onExtractionProgressUpdate"
static let useLegacyNormalization = "useLegacyNormalization"
static let updateFrequency = "updateFrequency"
static let resultFilePath = "resultFilePath"
static let resultDuration = "resultDuration"
}

enum FinishMode : Int{
Expand Down
6 changes: 3 additions & 3 deletions lib/src/base/audio_waveforms_interface.dart
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@ class AudioWaveformsInterface {
}

///platform call to stop recording
Future<List<String?>?> stop() async {
final audioInfo =
Future<Map<String, dynamic>> stop() async {
Map<Object?, Object?> audioInfo =
await _methodChannel.invokeMethod(Constants.stopRecording);
return List.from(audioInfo ?? []);
return audioInfo.cast<String, dynamic>();
}

///platform call to resume recording.
Expand Down
2 changes: 2 additions & 0 deletions lib/src/base/constants.dart
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,6 @@ class Constants {
"onCurrentExtractedWaveformData";
static const String useLegacyNormalization = "useLegacyNormalization";
static const String updateFrequency = "updateFrequency";
static const String resultFilePath = "resultFilePath";
static const String resultDuration = "resultDuration";
}
29 changes: 13 additions & 16 deletions lib/src/controllers/recorder_controller.dart
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:io' show Platform;
import 'dart:math' show max;

import 'package:audio_waveforms/src/base/constants.dart';
import 'package:flutter/material.dart';

import '/src/base/utils.dart';
Expand Down Expand Up @@ -286,24 +287,20 @@ class RecorderController extends ChangeNotifier {
Future<String?> stop([bool callReset = true]) async {
if (_recorderState.isRecording || _recorderState.isPaused) {
final audioInfo = await AudioWaveformsInterface.instance.stop();
if (audioInfo != null) {
_isRecording = false;
_timer?.cancel();
_recorderTimer?.cancel();
if (audioInfo[1] != null) {
var duration = int.tryParse(audioInfo[1]!);
if (duration != null) {
recordedDuration = Duration(milliseconds: duration);
_recordedFileDurationController.add(recordedDuration);
}
_isRecording = false;
_timer?.cancel();
_recorderTimer?.cancel();
if (audioInfo[Constants.resultDuration] != null) {
var duration = int.tryParse(Constants.resultDuration);
if (duration != null) {
recordedDuration = Duration(milliseconds: duration);
_recordedFileDurationController.add(recordedDuration);
}
elapsedDuration = Duration.zero;
_setRecorderState(RecorderState.stopped);
if (callReset) reset();
return audioInfo[0];
} else {
throw "Failed stop recording";
}
elapsedDuration = Duration.zero;
_setRecorderState(RecorderState.stopped);
if (callReset) reset();
return audioInfo[Constants.resultFilePath];
}

notifyListeners();
Expand Down
2 changes: 1 addition & 1 deletion pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: audio_waveforms
description: A Flutter package that allow you to generate waveform while recording audio or from audio file.
version: 1.0.5
version: 1.0.6
homepage: https://github.com/SimformSolutionsPvtLtd/audio_waveforms
issue_tracker: https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues

Expand Down