Merge branch 'krille/recording-dialog' into 'main'

feat: Recording dialog with displaying amplitude

See merge request famedly/fluffychat!660
This commit is contained in:
Krille Fear 2022-01-01 14:36:51 +00:00
commit e75436771f
4 changed files with 149 additions and 74 deletions

View File

@ -422,6 +422,10 @@ class ChatController extends State<Chat> {
'duration': result.duration, 'duration': result.duration,
}, },
'org.matrix.msc3245.voice': {}, 'org.matrix.msc3245.voice': {},
'org.matrix.msc1767.audio': {
'duration': result.duration,
'waveform': result.waveform,
},
}), }),
); );
setState(() { setState(() {

View File

@ -1,3 +1,5 @@
//@dart=2.12
import 'dart:async'; import 'dart:async';
import 'dart:io'; import 'dart:io';
@ -8,6 +10,7 @@ import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:matrix/matrix.dart'; import 'package:matrix/matrix.dart';
import 'package:path_provider/path_provider.dart'; import 'package:path_provider/path_provider.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/utils/sentry_controller.dart'; import 'package:fluffychat/utils/sentry_controller.dart';
import '../../../utils/matrix_sdk_extensions.dart/event_extension.dart'; import '../../../utils/matrix_sdk_extensions.dart/event_extension.dart';
@ -15,9 +18,9 @@ class AudioPlayerWidget extends StatefulWidget {
final Color color; final Color color;
final Event event; final Event event;
static String currentId; static String? currentId;
const AudioPlayerWidget(this.event, {this.color = Colors.black, Key key}) const AudioPlayerWidget(this.event, {this.color = Colors.black, Key? key})
: super(key: key); : super(key: key);
@override @override
@ -30,16 +33,16 @@ class _AudioPlayerState extends State<AudioPlayerWidget> {
AudioPlayerStatus status = AudioPlayerStatus.notDownloaded; AudioPlayerStatus status = AudioPlayerStatus.notDownloaded;
final AudioPlayer audioPlayer = AudioPlayer(); final AudioPlayer audioPlayer = AudioPlayer();
StreamSubscription onAudioPositionChanged; StreamSubscription? onAudioPositionChanged;
StreamSubscription onDurationChanged; StreamSubscription? onDurationChanged;
StreamSubscription onPlayerStateChanged; StreamSubscription? onPlayerStateChanged;
StreamSubscription onPlayerError; StreamSubscription? onPlayerError;
String statusText; String? statusText;
double currentPosition = 0; int currentPosition = 0;
double maxPosition = 0; double maxPosition = 0;
File audioFile; File? audioFile;
@override @override
void dispose() { void dispose() {
@ -60,6 +63,7 @@ class _AudioPlayerState extends State<AudioPlayerWidget> {
try { try {
final matrixFile = final matrixFile =
await widget.event.downloadAndDecryptAttachmentCached(); await widget.event.downloadAndDecryptAttachmentCached();
if (matrixFile == null) throw ('Download failed');
final tempDir = await getTemporaryDirectory(); final tempDir = await getTemporaryDirectory();
final fileName = final fileName =
widget.event.content.tryGet<String>('filename') ?? matrixFile.name; widget.event.content.tryGet<String>('filename') ?? matrixFile.name;
@ -86,7 +90,7 @@ class _AudioPlayerState extends State<AudioPlayerWidget> {
if (AudioPlayerWidget.currentId != null) { if (AudioPlayerWidget.currentId != null) {
if (audioPlayer.state != PlayerState.STOPPED) { if (audioPlayer.state != PlayerState.STOPPED) {
await audioPlayer.stop(); await audioPlayer.stop();
setState(() => null); setState(() {});
} }
} }
AudioPlayerWidget.currentId = widget.event.eventId; AudioPlayerWidget.currentId = widget.event.eventId;
@ -105,30 +109,31 @@ class _AudioPlayerState extends State<AudioPlayerWidget> {
setState(() { setState(() {
statusText = statusText =
'${state.inMinutes.toString().padLeft(2, '0')}:${(state.inSeconds % 60).toString().padLeft(2, '0')}'; '${state.inMinutes.toString().padLeft(2, '0')}:${(state.inSeconds % 60).toString().padLeft(2, '0')}';
currentPosition = state.inMilliseconds.toDouble(); currentPosition =
((state.inMilliseconds.toDouble() / maxPosition) * 100).round();
}); });
}); });
onDurationChanged ??= audioPlayer.onDurationChanged.listen((max) => onDurationChanged ??= audioPlayer.onDurationChanged.listen((max) =>
setState(() => maxPosition = max.inMilliseconds.toDouble())); setState(() => maxPosition = max.inMilliseconds.toDouble()));
onPlayerStateChanged ??= audioPlayer.onPlayerStateChanged onPlayerStateChanged ??=
.listen((_) => setState(() => null)); audioPlayer.onPlayerStateChanged.listen((_) => setState(() {}));
onPlayerError ??= audioPlayer.onPlayerError.listen((e) { onPlayerError ??= audioPlayer.onPlayerError.listen((e) {
ScaffoldMessenger.of(context).showSnackBar( ScaffoldMessenger.of(context).showSnackBar(
SnackBar( SnackBar(
content: Text(L10n.of(context).oopsSomethingWentWrong), content: Text(L10n.of(context)!.oopsSomethingWentWrong),
), ),
); );
SentryController.captureException(e, StackTrace.current); SentryController.captureException(e, StackTrace.current);
}); });
await audioPlayer.play(audioFile.path); await audioPlayer.play(audioFile!.path);
break; break;
} }
} }
static const double buttonSize = 36; static const double buttonSize = 36;
String get _durationString { String? get _durationString {
final durationInt = widget.event.content final durationInt = widget.event.content
.tryGetMap<String, dynamic>('info') .tryGetMap<String, dynamic>('info')
?.tryGet<int>('duration'); ?.tryGet<int>('duration');
@ -137,9 +142,30 @@ class _AudioPlayerState extends State<AudioPlayerWidget> {
return '${duration.inMinutes.toString().padLeft(2, '0')}:${(duration.inSeconds % 60).toString().padLeft(2, '0')}'; return '${duration.inMinutes.toString().padLeft(2, '0')}:${(duration.inSeconds % 60).toString().padLeft(2, '0')}';
} }
List<int> get waveform {
final eventWaveForm = widget.event.content
.tryGetMap<String, dynamic>('org.matrix.msc1767.audio')
?.tryGetList<int>('waveform');
if (eventWaveForm == null) {
return List<int>.filled(100, 500);
}
while (eventWaveForm.length < 100) {
for (var i = 0; i < eventWaveForm.length; i = i + 2) {
eventWaveForm.insert(i, eventWaveForm[i]);
}
}
var i = 0;
final step = (eventWaveForm.length / 100).round();
while (eventWaveForm.length > 100) {
eventWaveForm.removeAt(i);
i = (i + step) % 100;
}
return eventWaveForm;
}
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
statusText ??= _durationString ?? '00:00'; final statusText = this.statusText ??= _durationString ?? '00:00';
return Padding( return Padding(
padding: const EdgeInsets.symmetric(horizontal: 6.0), padding: const EdgeInsets.symmetric(horizontal: 6.0),
child: Row( child: Row(
@ -172,23 +198,42 @@ class _AudioPlayerState extends State<AudioPlayerWidget> {
}, },
), ),
), ),
const SizedBox(width: 8),
Expanded( Expanded(
child: Slider( child: Row(
activeColor: Theme.of(context).colorScheme.secondaryVariant, children: [
inactiveColor: widget.color.withAlpha(64), for (var i = 0; i < 100; i++)
value: currentPosition, Expanded(
onChanged: (double position) => child: InkWell(
audioPlayer.seek(Duration(milliseconds: position.toInt())), onTap: () => audioPlayer.seek(Duration(
max: status == AudioPlayerStatus.downloaded ? maxPosition : 0, milliseconds: (maxPosition / 100).round() * i)),
min: 0, child: Opacity(
opacity: currentPosition > i ? 1 : 0.5,
child: Container(
margin: const EdgeInsets.only(left: 2),
decoration: BoxDecoration(
color: Theme.of(context).colorScheme.primary,
borderRadius:
BorderRadius.circular(AppConfig.borderRadius),
),
height: 64 * (waveform[i] / 1024)),
), ),
), ),
Text( )
],
),
),
const SizedBox(width: 8),
Container(
alignment: Alignment.centerRight,
width: 42,
child: Text(
statusText, statusText,
style: TextStyle( style: TextStyle(
color: widget.color, color: widget.color,
), ),
), ),
),
], ],
), ),
); );

View File

@ -1,3 +1,5 @@
//@dart=2.12
import 'dart:async'; import 'dart:async';
import 'package:flutter/cupertino.dart'; import 'package:flutter/cupertino.dart';
@ -8,13 +10,14 @@ import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart'; import 'package:record/record.dart';
import 'package:wakelock/wakelock.dart'; import 'package:wakelock/wakelock.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/utils/platform_infos.dart'; import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/utils/sentry_controller.dart'; import 'package:fluffychat/utils/sentry_controller.dart';
class RecordingDialog extends StatefulWidget { class RecordingDialog extends StatefulWidget {
static const String recordingFileType = 'm4a'; static const String recordingFileType = 'm4a';
const RecordingDialog({ const RecordingDialog({
Key key, Key? key,
}) : super(key: key); }) : super(key: key);
@override @override
@ -22,13 +25,13 @@ class RecordingDialog extends StatefulWidget {
} }
class _RecordingDialogState extends State<RecordingDialog> { class _RecordingDialogState extends State<RecordingDialog> {
Timer _recorderSubscription; Timer? _recorderSubscription;
Duration _duration = Duration.zero; Duration _duration = Duration.zero;
bool error = false; bool error = false;
String _recordedPath; String? _recordedPath;
final _audioRecorder = Record(); final _audioRecorder = Record();
Amplitude _amplitude; final List<double> amplitudeTimeline = [];
static const int bitRate = 64000; static const int bitRate = 64000;
static const double samplingRate = 22050.0; static const double samplingRate = 22050.0;
@ -55,7 +58,10 @@ class _RecordingDialogState extends State<RecordingDialog> {
_recorderSubscription?.cancel(); _recorderSubscription?.cancel();
_recorderSubscription = _recorderSubscription =
Timer.periodic(const Duration(milliseconds: 100), (_) async { Timer.periodic(const Duration(milliseconds: 100), (_) async {
_amplitude = await _audioRecorder.getAmplitude(); final amplitude = await _audioRecorder.getAmplitude();
var value = 100 + amplitude.current * 2;
value = value < 1 ? 1 : value;
amplitudeTimeline.add(value);
setState(() { setState(() {
_duration += const Duration(milliseconds: 100); _duration += const Duration(milliseconds: 100);
}); });
@ -83,52 +89,64 @@ class _RecordingDialogState extends State<RecordingDialog> {
void _stopAndSend() async { void _stopAndSend() async {
_recorderSubscription?.cancel(); _recorderSubscription?.cancel();
await _audioRecorder.stop(); await _audioRecorder.stop();
Navigator.of(context, rootNavigator: false) final path = _recordedPath;
.pop<RecordingResult>(RecordingResult( if (path == null) throw ('Recording failed!');
path: _recordedPath, final step = amplitudeTimeline.length < 100
? 1
: (amplitudeTimeline.length / 100).round();
final waveform = <int>[];
for (var i = 0; i < amplitudeTimeline.length; i += step) {
waveform.add((amplitudeTimeline[i] / 100 * 1024).round());
}
Navigator.of(context, rootNavigator: false).pop<RecordingResult>(
RecordingResult(
path: path,
duration: _duration.inMilliseconds, duration: _duration.inMilliseconds,
)); waveform: waveform,
),
);
} }
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
const maxDecibalWidth = 64.0; const maxDecibalWidth = 64.0;
final decibalWidth =
((_amplitude == null || _amplitude.current == double.negativeInfinity
? 0
: 1 / _amplitude.current / _amplitude.max)
.abs() +
2) *
(maxDecibalWidth / 4).toDouble();
final time = final time =
'${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}'; '${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}';
final content = error final content = error
? Text(L10n.of(context).oopsSomethingWentWrong) ? Text(L10n.of(context)!.oopsSomethingWentWrong)
: Row( : Row(
children: <Widget>[ children: [
Container( Container(
width: maxDecibalWidth, width: 16,
height: maxDecibalWidth, height: 16,
alignment: Alignment.center,
child: AnimatedContainer(
duration: const Duration(milliseconds: 100),
width: decibalWidth,
height: decibalWidth,
decoration: BoxDecoration( decoration: BoxDecoration(
borderRadius: BorderRadius.circular(32),
color: Colors.red, color: Colors.red,
borderRadius: BorderRadius.circular(decibalWidth),
), ),
), ),
),
const SizedBox(width: 8),
Expanded( Expanded(
child: Text( child: Center(
'${L10n.of(context).recording}: $time', child: Row(
style: const TextStyle( mainAxisSize: MainAxisSize.min,
fontSize: 18, mainAxisAlignment: MainAxisAlignment.end,
children: amplitudeTimeline.reversed
.take(26)
.toList()
.reversed
.map((amplitude) => Container(
margin: const EdgeInsets.only(left: 2),
width: 4,
decoration: BoxDecoration(
color: Theme.of(context).colorScheme.primary,
borderRadius:
BorderRadius.circular(AppConfig.borderRadius),
),
height: maxDecibalWidth * (amplitude / 100)))
.toList(),
), ),
), ),
), ),
Text(time),
], ],
); );
if (PlatformInfos.isCupertinoStyle) { if (PlatformInfos.isCupertinoStyle) {
@ -138,17 +156,20 @@ class _RecordingDialogState extends State<RecordingDialog> {
CupertinoDialogAction( CupertinoDialogAction(
onPressed: () => Navigator.of(context, rootNavigator: false).pop(), onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
child: Text( child: Text(
L10n.of(context).cancel.toUpperCase(), L10n.of(context)!.cancel.toUpperCase(),
style: TextStyle( style: TextStyle(
color: color: Theme.of(context)
Theme.of(context).textTheme.bodyText2.color.withAlpha(150), .textTheme
.bodyText2
?.color
?.withAlpha(150),
), ),
), ),
), ),
if (error != true) if (error != true)
CupertinoDialogAction( CupertinoDialogAction(
onPressed: _stopAndSend, onPressed: _stopAndSend,
child: Text(L10n.of(context).send.toUpperCase()), child: Text(L10n.of(context)!.send.toUpperCase()),
), ),
], ],
); );
@ -159,9 +180,10 @@ class _RecordingDialogState extends State<RecordingDialog> {
TextButton( TextButton(
onPressed: () => Navigator.of(context, rootNavigator: false).pop(), onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
child: Text( child: Text(
L10n.of(context).cancel.toUpperCase(), L10n.of(context)!.cancel.toUpperCase(),
style: TextStyle( style: TextStyle(
color: Theme.of(context).textTheme.bodyText2.color.withAlpha(150), color:
Theme.of(context).textTheme.bodyText2?.color?.withAlpha(150),
), ),
), ),
), ),
@ -171,7 +193,7 @@ class _RecordingDialogState extends State<RecordingDialog> {
child: Row( child: Row(
mainAxisSize: MainAxisSize.min, mainAxisSize: MainAxisSize.min,
children: <Widget>[ children: <Widget>[
Text(L10n.of(context).send.toUpperCase()), Text(L10n.of(context)!.send.toUpperCase()),
const SizedBox(width: 4), const SizedBox(width: 4),
const Icon(Icons.send_outlined, size: 15), const Icon(Icons.send_outlined, size: 15),
], ],
@ -185,20 +207,24 @@ class _RecordingDialogState extends State<RecordingDialog> {
class RecordingResult { class RecordingResult {
final String path; final String path;
final int duration; final int duration;
final List<int> waveform;
const RecordingResult({ const RecordingResult({
@required this.path, required this.path,
@required this.duration, required this.duration,
required this.waveform,
}); });
factory RecordingResult.fromJson(Map<String, dynamic> json) => factory RecordingResult.fromJson(Map<String, dynamic> json) =>
RecordingResult( RecordingResult(
path: json['path'], path: json['path'],
duration: json['duration'], duration: json['duration'],
waveform: List<int>.from(json['waveform']),
); );
Map<String, dynamic> toJson() => { Map<String, dynamic> toJson() => {
'path': path, 'path': path,
'duration': duration, 'duration': duration,
'waveform': waveform,
}; };
} }

View File

@ -61,7 +61,7 @@ dependencies:
qr_code_scanner: ^0.6.1 qr_code_scanner: ^0.6.1
qr_flutter: ^4.0.0 qr_flutter: ^4.0.0
receive_sharing_intent: ^1.4.5 receive_sharing_intent: ^1.4.5
record: ^3.0.0 record: ^3.0.2
salomon_bottom_bar: ^3.1.0 salomon_bottom_bar: ^3.1.0
scroll_to_index: ^2.1.0 scroll_to_index: ^2.1.0
sentry: ^6.0.1 sentry: ^6.0.1