Skip to content

Commit

Permalink
✨ Update recorder normalization and updated default values for sample…
Browse files Browse the repository at this point in the history
… rate and bit rate.
  • Loading branch information
ujas-m-simformsolutions authored and Ujas-Majithiya committed Dec 20, 2022
1 parent 3a04b21 commit 8e5ba30
Show file tree
Hide file tree
Showing 10 changed files with 90 additions and 63 deletions.
6 changes: 4 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
## 1.0.0 (Unreleased)
## 1.0.0

- Reworked waveforms from audio file
- Breaking: removed `readingComplete` PlayerState and `visualizerHeight`. With this, added `extractWaveforms` function to extract waveforms.
- **Breaking**: removed `readingComplete` PlayerState and `visualizerHeight`. With this, added `extractWaveforms` function to extract waveforms.
- Added `WaveformType` enum for selecting longer or shorter type of waveform.
- Added `onCurrentExtractedWaveformData` and `onExtractionProgress` to monitor progress and currently extracted waveform data.
- improved drawing of waveforms.
Expand All @@ -10,7 +10,9 @@
- Fixed [#101](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/101) - Fixed setting volume for android throws error
- Fixed zero duration would cause waveforms not to expand
- Fixed `postFrameCallback` error for flutter version below 3.0.0
- **BREAKING**: Replaced `normalizationFactor` with `scaleFactor`and with this fixed [#43](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/43)
- Updated default values for bitRate and sampleRate
- Updated example app

## 0.1.5+1

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,14 @@ import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.PluginRegistry
import java.io.IOException
import java.lang.IllegalStateException
import kotlin.math.log10

private const val LOG_TAG = "AudioWaveforms"
private const val RECORD_AUDIO_REQUEST_CODE = 1001

class AudioRecorder : PluginRegistry.RequestPermissionsResultListener {
private var permissions = arrayOf(Manifest.permission.RECORD_AUDIO)
fun getDecibel(result: MethodChannel.Result, recorder: MediaRecorder?) {
val db = 20 * log10((recorder?.maxAmplitude?.toDouble() ?: 0.0 / 32768.0))
if (db == Double.NEGATIVE_INFINITY) {
Log.d(LOG_TAG, "Microphone might be turned off")
} else {
result.success(db)
}
result.success(recorder?.maxAmplitude?.toDouble() ?: 0.0)
}

@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
Expand Down
6 changes: 3 additions & 3 deletions example/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ class _HomeState extends State<Home> with WidgetsBindingObserver {
..androidEncoder = AndroidEncoder.aac
..androidOutputFormat = AndroidOutputFormat.mpeg4
..iosEncoder = IosEncoder.kAudioFormatMPEG4AAC
..sampleRate = 16000
..bitRate = 64000;
..sampleRate = 44100
..bitRate = 48000;
playerController1 = PlayerController()
..addListener(() {
if (mounted) setState(() {});
Expand Down Expand Up @@ -310,7 +310,7 @@ class _HomeState extends State<Home> with WidgetsBindingObserver {
await playerController5.preparePlayer(path: path);
}
} else {
await recorderController.record(path);
await recorderController.record(path: path);
}
setState(() {
isRecording = !isRecording;
Expand Down
9 changes: 5 additions & 4 deletions ios/Classes/AudioRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{

public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ bitRate : Int?,_ fileNameFormat: String){
let settings = [
AVEncoderBitRateKey: bitRate ?? 64000,
AVEncoderBitRateKey: bitRate ?? 48000,
AVFormatIDKey: getEncoder(encoder ?? 0),
AVSampleRateKey: sampleRate ?? 16000,
AVSampleRateKey: sampleRate ?? 44100,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
Expand Down Expand Up @@ -62,8 +62,9 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{

public func getDecibel(_ result: @escaping FlutterResult) {
audioRecorder?.updateMeters()
let amp = audioRecorder?.averagePower(forChannel: 0) ?? 0.0
result(amp)
let amp = audioRecorder?.peakPower(forChannel: 0) ?? 0.0
let linear = pow(10, amp / 20);
result(linear)
}

public func checkHasPermission(_ result: @escaping FlutterResult){
Expand Down
1 change: 1 addition & 0 deletions lib/src/audio_waveforms.dart
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ class _AudioWaveformsState extends State<AudioWaveforms> {
widget.recorderController.setScrolledPositionDuration,
shouldCalculateScrolledPosition:
widget.shouldCalculateScrolledPosition,
scaleFactor: widget.waveStyle.scaleFactor,
),
),
),
Expand Down
10 changes: 10 additions & 0 deletions lib/src/base/utils.dart
Original file line number Diff line number Diff line change
Expand Up @@ -166,3 +166,13 @@ extension WaveformTypeExtension on WaveformType {
/// Check WaveformType is equals to long or not.
bool get isLong => this == WaveformType.long;
}

extension RecorderStateExtension on RecorderState {
bool get isRecording => this == RecorderState.recording;

bool get isInitialized => this == RecorderState.initialized;

bool get isPaused => this == RecorderState.paused;

bool get isStopped => this == RecorderState.stopped;
}
6 changes: 3 additions & 3 deletions lib/src/base/wave_style.dart
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ class WaveStyle {
/// ```dart
final Shader? gradient;

/// This is applied to each wave while generating.
/// Use this to [scale] the waves. Defaults to 1.0.
/// Default normalised amplitude/power we have are between 0.0 and 1.0.
/// So scale them, [scaleFactor] can be used. Defaults to 20.0.
final double scaleFactor;

/// A model class to provide style to the waveforms.
Expand Down Expand Up @@ -112,7 +112,7 @@ class WaveStyle {
this.durationTextPadding = 20.0,
this.durationLinesColor = Colors.blueAccent,
this.gradient,
this.scaleFactor = 1.0,
this.scaleFactor = 20.0,
}) : assert(waveThickness < spacing,
"waveThickness can't be greater than spacing");
}
99 changes: 58 additions & 41 deletions lib/src/controllers/recorder_controller.dart
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import 'dart:async';
import 'dart:io' show Platform;
import 'dart:math' show max;

import 'package:flutter/material.dart';

Expand All @@ -10,23 +11,26 @@ class RecorderController extends ChangeNotifier {
final List<double> _waveData = [];

/// At which rate waveform needs to be updated
late Duration updateFrequency = const Duration(milliseconds: 100);
Duration updateFrequency = const Duration(milliseconds: 100);

late AndroidEncoder androidEncoder = AndroidEncoder.aac;
AndroidEncoder androidEncoder = AndroidEncoder.aac;

late AndroidOutputFormat androidOutputFormat = AndroidOutputFormat.mpeg4;
AndroidOutputFormat androidOutputFormat = AndroidOutputFormat.mpeg4;

late IosEncoder iosEncoder = IosEncoder.kAudioFormatMPEG4AAC;
IosEncoder iosEncoder = IosEncoder.kAudioFormatMPEG4AAC;

late int sampleRate = 44100;
int sampleRate = 44100;

late int bitRate = 48000;
int bitRate = 48000;

///Db we get from native is too high so in Android it the value is subtracted
///and in IOS value added
late double normalizationFactor = Platform.isAndroid ? 60 : 40;
/// Current maximum peak power for ios and peak amplitude android.
double _maxPeak = Platform.isIOS ? 1 : 32786.0;

///Current list of decibels(different values for each platform)
/// Current list of scaled waves. For IOS, this list contains normalised
/// peak power and for Android, this list contains normalised peak
/// amplitude.
///
/// Values are between 0.0 to 1.0.
List<double> get waveData => _waveData;

RecorderState _recorderState = RecorderState.stopped;
Expand Down Expand Up @@ -92,14 +96,27 @@ class RecorderController extends ChangeNotifier {
///
/// 2. Stopped -: If a recorder is stopped from previous recording and again
/// this function is called then it will re-initialise the recorder.
Future<void> record([String? path]) async {
if (_recorderState != RecorderState.recording) {
Future<void> record({
String? path,
AndroidEncoder? androidEncoder,
AndroidOutputFormat? androidOutputFormat,
IosEncoder? iosEncoder,
int? sampleRate,
int? bitRate,
}) async {
if (!_recorderState.isRecording) {
await checkPermission();
if (_hasPermission) {
if (Platform.isAndroid && _recorderState == RecorderState.stopped) {
await _initRecorder(path);
if (Platform.isAndroid && _recorderState.isStopped) {
await _initRecorder(
path: path,
androidEncoder: androidEncoder,
androidOutputFormat: androidOutputFormat,
sampleRate: sampleRate,
bitRate: bitRate,
);
}
if (_recorderState == RecorderState.paused) {
if (_recorderState.isPaused) {
_isRecording = await AudioWaveformsInterface.instance.resume();
if (_isRecording) {
_startTimer();
Expand All @@ -113,12 +130,13 @@ class RecorderController extends ChangeNotifier {
if (Platform.isIOS) {
_recorderState = RecorderState.initialized;
}
if (_recorderState == RecorderState.initialized) {
if (_recorderState.isInitialized) {
_isRecording = await AudioWaveformsInterface.instance.record(
audioFormat:
Platform.isIOS ? iosEncoder.index : androidEncoder.index,
sampleRate: sampleRate,
bitRate: bitRate,
audioFormat: Platform.isIOS
? iosEncoder?.index ?? this.iosEncoder.index
: androidEncoder?.index ?? this.androidEncoder.index,
sampleRate: sampleRate ?? this.sampleRate,
bitRate: bitRate ?? this.bitRate,
path: path,
);
if (_isRecording) {
Expand All @@ -137,13 +155,20 @@ class RecorderController extends ChangeNotifier {
}

/// Initialises recorder for android platform.
Future<void> _initRecorder(String? path) async {
Future<void> _initRecorder({
String? path,
AndroidEncoder? androidEncoder,
AndroidOutputFormat? androidOutputFormat,
int? sampleRate,
int? bitRate,
}) async {
final initialized = await AudioWaveformsInterface.instance.initRecorder(
path: path,
encoder: androidEncoder.index,
outputFormat: androidOutputFormat.index,
sampleRate: sampleRate,
bitRate: bitRate,
encoder: androidEncoder?.index ?? this.androidEncoder.index,
outputFormat:
androidOutputFormat?.index ?? this.androidOutputFormat.index,
sampleRate: sampleRate ?? this.sampleRate,
bitRate: bitRate ?? this.bitRate,
);
if (initialized) {
_recorderState = RecorderState.initialized;
Expand Down Expand Up @@ -172,7 +197,7 @@ class RecorderController extends ChangeNotifier {

/// Pauses the current recording. Call [record] to resume recording.
Future<void> pause() async {
if (_recorderState == RecorderState.recording) {
if (_recorderState.isRecording) {
_isRecording = (await AudioWaveformsInterface.instance.pause()) ?? true;
if (_isRecording) {
throw "Failed to pause recording";
Expand All @@ -195,8 +220,7 @@ class RecorderController extends ChangeNotifier {
/// manually else it will start showing waveforms from same place where it
/// left of for previous recording.
Future<String?> stop([bool callReset = true]) async {
if (_recorderState == RecorderState.recording ||
_recorderState == RecorderState.paused) {
if (_recorderState.isRecording || _recorderState.isPaused) {
final path = await AudioWaveformsInterface.instance.stop();

if (path != null) {
Expand Down Expand Up @@ -251,19 +275,12 @@ class RecorderController extends ChangeNotifier {
);
}

/// Normalises the decibel
void _normalise(double db) {
if (Platform.isAndroid) {
waveData.add(db - normalizationFactor);
} else {
if (db == 0.0) {
waveData.add(0);
} else if (db + normalizationFactor < 1) {
waveData.add(0);
} else {
waveData.add(db + normalizationFactor);
}
}
/// Normalises the peak power for ios and peak amplitude for android
void _normalise(double peak) {
final absDb = peak.abs();
_maxPeak = max(absDb, _maxPeak);
final scaledWave = (absDb / _maxPeak);
_waveData.add(scaledWave);
notifyListeners();
}

Expand Down
6 changes: 4 additions & 2 deletions lib/src/painters/recorder_wave_painter.dart
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ class RecorderWavePainter extends CustomPainter {
final VoidCallback revertClearlabelCall;
final Function(int) setCurrentPositionDuration;
final bool shouldCalculateScrolledPosition;
final double scaleFactor;

RecorderWavePainter({
required this.waveData,
Expand Down Expand Up @@ -80,6 +81,7 @@ class RecorderWavePainter extends CustomPainter {
required this.revertClearlabelCall,
required this.setCurrentPositionDuration,
required this.shouldCalculateScrolledPosition,
required this.scaleFactor,
}) : _wavePaint = Paint()
..color = waveColor
..strokeWidth = waveThickness
Expand Down Expand Up @@ -201,7 +203,7 @@ class RecorderWavePainter extends CustomPainter {
dragOffset.dx +
(spacing * i) -
initialPosition,
-waveData[i] + size.height - bottomPadding),
-(waveData[i] * scaleFactor) + size.height - bottomPadding),
_wavePaint);
}

Expand All @@ -218,7 +220,7 @@ class RecorderWavePainter extends CustomPainter {
dragOffset.dx +
(spacing * i) -
initialPosition,
waveData[i] + size.height - bottomPadding),
(waveData[i] * scaleFactor) + size.height - bottomPadding),
_wavePaint);
}

Expand Down
2 changes: 1 addition & 1 deletion pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: audio_waveforms
description: A Flutter package that allow you to generate waveform while recording audio or from audio file.
version: 0.1.6
version: 1.0.0
homepage: https://github.com/SimformSolutionsPvtLtd/audio_waveforms
issue_tracker: https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues

Expand Down

0 comments on commit 8e5ba30

Please sign in to comment.