The audiowave package is not displaying in the recorded audio. I am seeking assistance to ensure that the audiowave updates and changes dynamically while playing the audio. Does anyone know how to use flutter audiowaveform package correctly. when I try to use some other packages which is referred on youtube they are deprecated. Can anyone help with this issue?
audioplayer.dart
import 'dart:async';
import 'dart:ui' as ui;
import 'package:audio_wave/audio_wave.dart';
import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
import '../../../controller/audioprovider.dart';
import 'package:audio_waveforms/audio_waveforms.dart';
import '../../../model/chat/chatdetaildata.dart';
import 'package:flutter_audio_waveforms/flutter_audio_waveforms.dart';
class AudioPlayerWidget extends StatefulWidget {
final ChatMessage chatMessage;
AudioPlayerWidget(this.chatMessage);
@override
_AudioPlayerWidgetState createState() => _AudioPlayerWidgetState();
}
class _AudioPlayerWidgetState extends State<AudioPlayerWidget> {
final player = AudioPlayer();
bool isPlaying = false;
bool isAnimation = true;
Duration duration = Duration.zero;
Duration position = Duration.zero;
StreamSubscription? completionSubscription;
late Timer _timer;
@override
void initState() {
super.initState();
// Set up the timer to update the position every second while audio is playing
_timer = Timer.periodic(Duration(seconds: 1), (_) {
setState(() {
// Update the position only if the audio is currently playing
if (isPlaying) {
position = position + Duration(seconds: 1);
}
});
});
}
@override
void dispose() {
_timer.cancel();
player.dispose();
super.dispose();
}
Future<void> playAudio() async {
await player.play(UrlSource(widget.chatMessage.audioPath!));
setState(() {
isPlaying = true;
isAnimation = false;
});
completionSubscription = player.onPlayerComplete.listen((_) {
// Audio playback completed
setState(() {
isPlaying = false;
isAnimation = true;
position = Duration.zero;
});
});
}
Future<void> pauseAudio() async {
await player.pause();
setState(() {
isPlaying = false;
isAnimation = true;
});
completionSubscription?.cancel();
}
@override
Widget build(BuildContext context) {
final audioProvider = Provider.of<AudioProvider>(context, listen: true);
final formattedTime = audioProvider.getFormattedTime(widget.chatMessage.audioPath!);
final remainingTimeInSeconds = audioProvider.getSecondsFromFormattedTime(formattedTime) - position.inSeconds;
final remainingTime = audioProvider.getFormattedDuration(remainingTimeInSeconds);
// final remainingTime = audioProvider.getFormattedDuration(
// remainingTimeInSeconds >= 0 ? remainingTimeInSeconds : 0);
PlayerController controller = PlayerController();
// final formattedTime = audioProvider.getFormattedTime();
// print(formattedTime);
// print(audioProvider.hashCode);
// print(audioProvider.hashCode);
return GestureDetector(
onTap: () {
if (isPlaying) {
pauseAudio();
} else {
playAudio();
}
},
child: Align(
alignment: Alignment.centerRight, // Align to the right (sender's side)
child: Padding(
padding: const EdgeInsets.only(top: 8), // Add padding to the top
child: Container(
width: 170,
height: 45,
child: Stack(
children: [
Container(
width: 160,
height: 60,
decoration: BoxDecoration(
color: Colors.blue,
borderRadius: BorderRadius.circular(20), // Add border radius
),
child: Row(
children: [
Align(
alignment: Alignment.centerLeft,
child: IconButton(
icon: Icon(
isPlaying ? Icons.pause : Icons.play_arrow,
color: Colors.white,
),
onPressed: () {
if (isPlaying) {
pauseAudio();
} else {
playAudio();
}
},
),
),
Expanded(
child: AudioFileWaveforms(
backgroundColor: Colors.white,
size: Size(MediaQuery.of(context).size.width*0.2, 10.0),
playerController: controller,
playerWaveStyle: PlayerWaveStyle(
scaleFactor: 0.8,
fixedWaveColor: Colors.white30,
liveWaveColor: Colors.white,
waveCap: StrokeCap.butt,
),
),
),
],
),
),
// Expanded(
Padding(
padding: const EdgeInsets.only(top: 33.0, right: 18),
child: Row(
children: [
SizedBox(width: 20,),
Row(
mainAxisAlignment: MainAxisAlignment.start,
children: [
// final formattedTime = audioProvider.getFormattedTime();
// print(formattedTime);
Text(remainingTime, style: const TextStyle(fontSize: 8))
],
),
SizedBox(width: 90,),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
// final formattedTime = audioProvider.getFormattedTime();
// print(formattedTime);
Text(formattedTime, style: const TextStyle(fontSize: 8))
],
),
],
),
),
],
),
),
),
),
);
}
}
audioprovider.dart
import 'dart:async';
import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/foundation.dart';
import 'package:chatapp/model/chat/chatdetaildata.dart';
import 'package:flutter/material.dart';
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
import 'package:uuid/uuid.dart';
import '../model/audio/recorded_audio_data.dart';
class AudioProvider with ChangeNotifier {
List<ChatMessage> audioMessages = [];
final record = Record();
final textEditingController = TextEditingController();
String _formattedTime = "01:00"; // Default value
String get formattedTime => _formattedTime;
Timer? _durationTimer;
bool _isRecording = false;
String _recordedFilePath = '';
DateTime? _startTime; // To store the end time of recording
DateTime? _endTime; // To store the end time of recording
bool get isRecording => _isRecording;
String? get recordedFilePath => _recordedFilePath;
int _recordingDuration = 0;
int get recordingDuration => _recordingDuration;
Map<String, String> _formattedTimes = {};
// List<RecordedAudio> _recordedAudios = [];
// List<RecordedAudio> get recordedAudios => _recordedAudios;
Future<void> startRecording() async {
try {
if (await record.hasPermission()) {
final directory = await getApplicationDocumentsDirectory();
final uniqueFileName = Uuid().v4(); // Generate a unique file name using uuid
_recordedFilePath = '${directory.path}/$uniqueFileName.m4a';
await record.start(
path: _recordedFilePath!,
encoder: AudioEncoder.aacLc,
bitRate: 128000,
samplingRate: 44100,
);
_isRecording = true;
_startTime = DateTime.now();
print('Start recording');
notifyListeners();
_startRecordingDurationTimer();
}
} catch (e) {
print('Error: $e');
}
}
void _startRecordingDurationTimer() {
_durationTimer?.cancel();
_recordingDuration = 0; // Reset recording duration at the start of recording
_durationTimer = Timer.periodic(Duration(seconds: 1), (_) {
_recordingDuration++;
// Update the formatted time whenever the recording duration changes
final formattedTime = getFormattedDuration(_recordingDuration);
if (_recordedFilePath != null) {
setFormattedTime(formattedTime, _recordedFilePath!);
}
notifyListeners();
});
}