diff --git a/CHANGELOG.md b/CHANGELOG.md index 5394255..e9ad4d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ -## 1.0.0 (Unreleased) +## 1.0.0 - Reworked waveforms from audio file - - Breaking: removed `readingComplete` PlayerState and `visualizerHeight`. With this, added `extractWaveforms` function to extract waveforms. + - **Breaking**: removed `readingComplete` PlayerState and `visualizerHeight`. With this, added `extractWaveforms` function to extract waveforms. - Added `WaveformType` enum for selecting longer or shorter type of waveform. - Added `onCurrentExtractedWaveformData` and `onExtractionProgress` to monitor progress and currently extracted waveform data. - improved drawing of waveforms. @@ -10,7 +10,9 @@ - Fixed [#101](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/101) - Fixed setting volume for android throws error - Fixed zero duration would cause waveforms not to expand - Fixed `postFrameCallback` error for flutter version below 3.0.0 +- **BREAKING**: Replaced `normalizationFactor` with `scaleFactor`and with this fixed [#43](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/43) - Updated default values for bitRate and sampleRate +- Updated example app ## 0.1.5+1 diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt b/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt index 9798ebc..dc5972e 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt @@ -12,7 +12,6 @@ import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.PluginRegistry import java.io.IOException import java.lang.IllegalStateException -import kotlin.math.log10 private const val LOG_TAG = "AudioWaveforms" private const val RECORD_AUDIO_REQUEST_CODE = 1001 @@ -20,12 +19,7 @@ private const val RECORD_AUDIO_REQUEST_CODE = 1001 class AudioRecorder : PluginRegistry.RequestPermissionsResultListener { private var permissions = arrayOf(Manifest.permission.RECORD_AUDIO) fun getDecibel(result: MethodChannel.Result, recorder: MediaRecorder?) { - val db = 20 * log10((recorder?.maxAmplitude?.toDouble() ?: 0.0 / 32768.0)) - if (db == Double.NEGATIVE_INFINITY) { - Log.d(LOG_TAG, "Microphone might be turned off") - } else { - result.success(db) - } + result.success(recorder?.maxAmplitude?.toDouble() ?: 0.0) } @RequiresApi(Build.VERSION_CODES.LOLLIPOP) diff --git a/example/lib/main.dart b/example/lib/main.dart index f11a60e..26394b5 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -65,8 +65,8 @@ class _HomeState extends State with WidgetsBindingObserver { ..androidEncoder = AndroidEncoder.aac ..androidOutputFormat = AndroidOutputFormat.mpeg4 ..iosEncoder = IosEncoder.kAudioFormatMPEG4AAC - ..sampleRate = 16000 - ..bitRate = 64000; + ..sampleRate = 44100 + ..bitRate = 48000; playerController1 = PlayerController() ..addListener(() { if (mounted) setState(() {}); @@ -310,7 +310,7 @@ class _HomeState extends State with WidgetsBindingObserver { await playerController5.preparePlayer(path: path); } } else { - await recorderController.record(path); + await recorderController.record(path: path); } setState(() { isRecording = !isRecording; diff --git a/ios/Classes/AudioRecorder.swift b/ios/Classes/AudioRecorder.swift index b27fde9..8d0f892 100644 --- a/ios/Classes/AudioRecorder.swift +++ b/ios/Classes/AudioRecorder.swift @@ -9,9 +9,9 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ bitRate : Int?,_ fileNameFormat: String){ let settings = [ - AVEncoderBitRateKey: bitRate ?? 64000, + AVEncoderBitRateKey: bitRate ?? 48000, AVFormatIDKey: getEncoder(encoder ?? 0), - AVSampleRateKey: sampleRate ?? 16000, + AVSampleRateKey: sampleRate ?? 44100, AVNumberOfChannelsKey: 1, AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue ] @@ -62,8 +62,9 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ public func getDecibel(_ result: @escaping FlutterResult) { audioRecorder?.updateMeters() - let amp = audioRecorder?.averagePower(forChannel: 0) ?? 0.0 - result(amp) + let amp = audioRecorder?.peakPower(forChannel: 0) ?? 0.0 + let linear = pow(10, amp / 20); + result(linear) } public func checkHasPermission(_ result: @escaping FlutterResult){ diff --git a/lib/src/audio_waveforms.dart b/lib/src/audio_waveforms.dart index e7fdcac..742202b 100644 --- a/lib/src/audio_waveforms.dart +++ b/lib/src/audio_waveforms.dart @@ -113,6 +113,7 @@ class _AudioWaveformsState extends State { widget.recorderController.setScrolledPositionDuration, shouldCalculateScrolledPosition: widget.shouldCalculateScrolledPosition, + scaleFactor: widget.waveStyle.scaleFactor, ), ), ), diff --git a/lib/src/base/utils.dart b/lib/src/base/utils.dart index 51c438d..726080f 100644 --- a/lib/src/base/utils.dart +++ b/lib/src/base/utils.dart @@ -166,3 +166,13 @@ extension WaveformTypeExtension on WaveformType { /// Check WaveformType is equals to long or not. bool get isLong => this == WaveformType.long; } + +extension RecorderStateExtension on RecorderState { + bool get isRecording => this == RecorderState.recording; + + bool get isInitialized => this == RecorderState.initialized; + + bool get isPaused => this == RecorderState.paused; + + bool get isStopped => this == RecorderState.stopped; +} diff --git a/lib/src/base/wave_style.dart b/lib/src/base/wave_style.dart index 6d4d580..d442891 100644 --- a/lib/src/base/wave_style.dart +++ b/lib/src/base/wave_style.dart @@ -82,8 +82,8 @@ class WaveStyle { /// ```dart final Shader? gradient; - /// This is applied to each wave while generating. - /// Use this to [scale] the waves. Defaults to 1.0. + /// Default normalised amplitude/power we have are between 0.0 and 1.0. + /// So scale them, [scaleFactor] can be used. Defaults to 20.0. final double scaleFactor; /// A model class to provide style to the waveforms. @@ -112,7 +112,7 @@ class WaveStyle { this.durationTextPadding = 20.0, this.durationLinesColor = Colors.blueAccent, this.gradient, - this.scaleFactor = 1.0, + this.scaleFactor = 20.0, }) : assert(waveThickness < spacing, "waveThickness can't be greater than spacing"); } diff --git a/lib/src/controllers/recorder_controller.dart b/lib/src/controllers/recorder_controller.dart index 0f0cd5f..c140b17 100644 --- a/lib/src/controllers/recorder_controller.dart +++ b/lib/src/controllers/recorder_controller.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io' show Platform; +import 'dart:math' show max; import 'package:flutter/material.dart'; @@ -10,23 +11,26 @@ class RecorderController extends ChangeNotifier { final List _waveData = []; /// At which rate waveform needs to be updated - late Duration updateFrequency = const Duration(milliseconds: 100); + Duration updateFrequency = const Duration(milliseconds: 100); - late AndroidEncoder androidEncoder = AndroidEncoder.aac; + AndroidEncoder androidEncoder = AndroidEncoder.aac; - late AndroidOutputFormat androidOutputFormat = AndroidOutputFormat.mpeg4; + AndroidOutputFormat androidOutputFormat = AndroidOutputFormat.mpeg4; - late IosEncoder iosEncoder = IosEncoder.kAudioFormatMPEG4AAC; + IosEncoder iosEncoder = IosEncoder.kAudioFormatMPEG4AAC; - late int sampleRate = 44100; + int sampleRate = 44100; - late int bitRate = 48000; + int bitRate = 48000; - ///Db we get from native is too high so in Android it the value is subtracted - ///and in IOS value added - late double normalizationFactor = Platform.isAndroid ? 60 : 40; + /// Current maximum peak power for ios and peak amplitude android. + double _maxPeak = Platform.isIOS ? 1 : 32786.0; - ///Current list of decibels(different values for each platform) + /// Current list of scaled waves. For IOS, this list contains normalised + /// peak power and for Android, this list contains normalised peak + /// amplitude. + /// + /// Values are between 0.0 to 1.0. List get waveData => _waveData; RecorderState _recorderState = RecorderState.stopped; @@ -92,14 +96,27 @@ class RecorderController extends ChangeNotifier { /// /// 2. Stopped -: If a recorder is stopped from previous recording and again /// this function is called then it will re-initialise the recorder. - Future record([String? path]) async { - if (_recorderState != RecorderState.recording) { + Future record({ + String? path, + AndroidEncoder? androidEncoder, + AndroidOutputFormat? androidOutputFormat, + IosEncoder? iosEncoder, + int? sampleRate, + int? bitRate, + }) async { + if (!_recorderState.isRecording) { await checkPermission(); if (_hasPermission) { - if (Platform.isAndroid && _recorderState == RecorderState.stopped) { - await _initRecorder(path); + if (Platform.isAndroid && _recorderState.isStopped) { + await _initRecorder( + path: path, + androidEncoder: androidEncoder, + androidOutputFormat: androidOutputFormat, + sampleRate: sampleRate, + bitRate: bitRate, + ); } - if (_recorderState == RecorderState.paused) { + if (_recorderState.isPaused) { _isRecording = await AudioWaveformsInterface.instance.resume(); if (_isRecording) { _startTimer(); @@ -113,12 +130,13 @@ class RecorderController extends ChangeNotifier { if (Platform.isIOS) { _recorderState = RecorderState.initialized; } - if (_recorderState == RecorderState.initialized) { + if (_recorderState.isInitialized) { _isRecording = await AudioWaveformsInterface.instance.record( - audioFormat: - Platform.isIOS ? iosEncoder.index : androidEncoder.index, - sampleRate: sampleRate, - bitRate: bitRate, + audioFormat: Platform.isIOS + ? iosEncoder?.index ?? this.iosEncoder.index + : androidEncoder?.index ?? this.androidEncoder.index, + sampleRate: sampleRate ?? this.sampleRate, + bitRate: bitRate ?? this.bitRate, path: path, ); if (_isRecording) { @@ -137,13 +155,20 @@ class RecorderController extends ChangeNotifier { } /// Initialises recorder for android platform. - Future _initRecorder(String? path) async { + Future _initRecorder({ + String? path, + AndroidEncoder? androidEncoder, + AndroidOutputFormat? androidOutputFormat, + int? sampleRate, + int? bitRate, + }) async { final initialized = await AudioWaveformsInterface.instance.initRecorder( path: path, - encoder: androidEncoder.index, - outputFormat: androidOutputFormat.index, - sampleRate: sampleRate, - bitRate: bitRate, + encoder: androidEncoder?.index ?? this.androidEncoder.index, + outputFormat: + androidOutputFormat?.index ?? this.androidOutputFormat.index, + sampleRate: sampleRate ?? this.sampleRate, + bitRate: bitRate ?? this.bitRate, ); if (initialized) { _recorderState = RecorderState.initialized; @@ -172,7 +197,7 @@ class RecorderController extends ChangeNotifier { /// Pauses the current recording. Call [record] to resume recording. Future pause() async { - if (_recorderState == RecorderState.recording) { + if (_recorderState.isRecording) { _isRecording = (await AudioWaveformsInterface.instance.pause()) ?? true; if (_isRecording) { throw "Failed to pause recording"; @@ -195,8 +220,7 @@ class RecorderController extends ChangeNotifier { /// manually else it will start showing waveforms from same place where it /// left of for previous recording. Future stop([bool callReset = true]) async { - if (_recorderState == RecorderState.recording || - _recorderState == RecorderState.paused) { + if (_recorderState.isRecording || _recorderState.isPaused) { final path = await AudioWaveformsInterface.instance.stop(); if (path != null) { @@ -251,19 +275,12 @@ class RecorderController extends ChangeNotifier { ); } - /// Normalises the decibel - void _normalise(double db) { - if (Platform.isAndroid) { - waveData.add(db - normalizationFactor); - } else { - if (db == 0.0) { - waveData.add(0); - } else if (db + normalizationFactor < 1) { - waveData.add(0); - } else { - waveData.add(db + normalizationFactor); - } - } + /// Normalises the peak power for ios and peak amplitude for android + void _normalise(double peak) { + final absDb = peak.abs(); + _maxPeak = max(absDb, _maxPeak); + final scaledWave = (absDb / _maxPeak); + _waveData.add(scaledWave); notifyListeners(); } diff --git a/lib/src/painters/recorder_wave_painter.dart b/lib/src/painters/recorder_wave_painter.dart index 8bda079..de128c8 100644 --- a/lib/src/painters/recorder_wave_painter.dart +++ b/lib/src/painters/recorder_wave_painter.dart @@ -48,6 +48,7 @@ class RecorderWavePainter extends CustomPainter { final VoidCallback revertClearlabelCall; final Function(int) setCurrentPositionDuration; final bool shouldCalculateScrolledPosition; + final double scaleFactor; RecorderWavePainter({ required this.waveData, @@ -80,6 +81,7 @@ class RecorderWavePainter extends CustomPainter { required this.revertClearlabelCall, required this.setCurrentPositionDuration, required this.shouldCalculateScrolledPosition, + required this.scaleFactor, }) : _wavePaint = Paint() ..color = waveColor ..strokeWidth = waveThickness @@ -201,7 +203,7 @@ class RecorderWavePainter extends CustomPainter { dragOffset.dx + (spacing * i) - initialPosition, - -waveData[i] + size.height - bottomPadding), + -(waveData[i] * scaleFactor) + size.height - bottomPadding), _wavePaint); } @@ -218,7 +220,7 @@ class RecorderWavePainter extends CustomPainter { dragOffset.dx + (spacing * i) - initialPosition, - waveData[i] + size.height - bottomPadding), + (waveData[i] * scaleFactor) + size.height - bottomPadding), _wavePaint); } diff --git a/pubspec.yaml b/pubspec.yaml index 8281355..c92fed2 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: audio_waveforms description: A Flutter package that allow you to generate waveform while recording audio or from audio file. -version: 0.1.6 +version: 1.0.0 homepage: https://github.com/SimformSolutionsPvtLtd/audio_waveforms issue_tracker: https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues