fluffychat/lib/pages/chat/recording_dialog.dart

198 lines
5.6 KiB
Dart
Raw Normal View History

2020-03-15 11:27:51 +01:00
import 'dart:async';
2021-08-12 09:48:10 +02:00
import 'package:flutter/cupertino.dart';
2020-03-15 11:27:51 +01:00
import 'package:flutter/material.dart';
2021-10-26 18:50:34 +02:00
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:path_provider/path_provider.dart';
2021-04-30 17:09:26 +02:00
import 'package:record/record.dart';
import 'package:wakelock/wakelock.dart';
2021-10-26 18:50:34 +02:00
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/utils/sentry_controller.dart';
2020-03-15 11:27:51 +01:00
class RecordingDialog extends StatefulWidget {
static const String recordingFileType = 'm4a';
2021-01-19 15:46:43 +01:00
const RecordingDialog({
Key key,
}) : super(key: key);
2020-03-15 11:27:51 +01:00
@override
_RecordingDialogState createState() => _RecordingDialogState();
}
class _RecordingDialogState extends State<RecordingDialog> {
2021-04-30 17:09:26 +02:00
Timer _recorderSubscription;
2021-05-01 15:49:29 +02:00
Duration _duration = Duration.zero;
2020-03-15 11:27:51 +01:00
2020-03-15 11:49:59 +01:00
bool error = false;
String _recordedPath;
2021-07-08 17:20:38 +02:00
final _audioRecorder = Record();
2021-08-11 21:21:08 +02:00
Amplitude _amplitude;
2020-03-15 11:49:59 +01:00
2021-10-16 09:59:38 +02:00
Future<void> startRecording() async {
2020-03-15 11:49:59 +01:00
try {
final tempDir = await getTemporaryDirectory();
2021-04-30 17:09:26 +02:00
_recordedPath =
2021-05-07 19:54:32 +02:00
'${tempDir.path}/recording${DateTime.now().microsecondsSinceEpoch}.${RecordingDialog.recordingFileType}';
2021-07-08 17:20:38 +02:00
final result = await _audioRecorder.hasPermission();
2021-05-01 21:36:27 +02:00
if (result != true) {
setState(() => error = true);
return;
}
await Wakelock.enable();
2021-07-08 17:20:38 +02:00
await _audioRecorder.start(
path: _recordedPath, encoder: AudioEncoder.AAC);
2021-04-30 17:09:26 +02:00
setState(() => _duration = Duration.zero);
_recorderSubscription?.cancel();
2021-08-11 22:04:55 +02:00
_recorderSubscription =
2021-10-14 18:09:30 +02:00
Timer.periodic(const Duration(milliseconds: 100), (_) async {
2021-08-11 21:21:08 +02:00
_amplitude = await _audioRecorder.getAmplitude();
setState(() {
2021-10-14 18:09:30 +02:00
_duration += const Duration(milliseconds: 100);
2021-08-11 21:21:08 +02:00
});
});
2021-05-01 21:36:27 +02:00
} catch (e, s) {
SentryController.captureException(e, s);
setState(() => error = true);
2020-03-15 11:49:59 +01:00
}
2020-03-15 11:27:51 +01:00
}
@override
void initState() {
super.initState();
startRecording();
}
@override
void dispose() {
Wakelock.disable();
2020-03-15 11:27:51 +01:00
_recorderSubscription?.cancel();
2021-07-08 17:20:38 +02:00
_audioRecorder.stop();
2020-03-15 11:27:51 +01:00
super.dispose();
}
2021-10-30 10:39:00 +02:00
void _stopAndSend() async {
_recorderSubscription?.cancel();
await _audioRecorder.stop();
Navigator.of(context, rootNavigator: false)
.pop<RecordingResult>(RecordingResult(
path: _recordedPath,
duration: _duration.inMilliseconds,
));
}
2020-03-15 11:27:51 +01:00
@override
Widget build(BuildContext context) {
const maxDecibalWidth = 64.0;
2021-08-11 22:04:55 +02:00
final decibalWidth =
2021-08-12 09:44:19 +02:00
((_amplitude == null || _amplitude.current == double.negativeInfinity
? 0
2021-10-27 17:01:27 +02:00
: 1 / _amplitude.current / _amplitude.max)
2021-08-12 09:44:19 +02:00
.abs() +
2021-10-27 17:01:27 +02:00
2) *
2021-08-11 22:04:55 +02:00
(maxDecibalWidth / 4).toDouble();
2021-05-01 15:49:29 +02:00
final time =
'${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}';
2021-08-12 09:48:10 +02:00
final content = error
? Text(L10n.of(context).oopsSomethingWentWrong)
: Row(
children: <Widget>[
Container(
width: maxDecibalWidth,
height: maxDecibalWidth,
alignment: Alignment.center,
child: AnimatedContainer(
2021-10-14 18:09:30 +02:00
duration: const Duration(milliseconds: 100),
2021-08-12 09:48:10 +02:00
width: decibalWidth,
height: decibalWidth,
decoration: BoxDecoration(
color: Colors.red,
borderRadius: BorderRadius.circular(decibalWidth),
2021-05-01 21:36:27 +02:00
),
),
2021-08-12 09:48:10 +02:00
),
2021-10-14 18:09:30 +02:00
const SizedBox(width: 8),
2021-08-12 09:48:10 +02:00
Expanded(
child: Text(
'${L10n.of(context).recording}: $time',
2021-10-14 18:09:30 +02:00
style: const TextStyle(
2021-08-12 09:48:10 +02:00
fontSize: 18,
2021-05-01 21:36:27 +02:00
),
),
2021-08-12 09:48:10 +02:00
),
],
);
if (PlatformInfos.isCupertinoStyle) {
return CupertinoAlertDialog(
content: content,
actions: [
CupertinoDialogAction(
onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
child: Text(
L10n.of(context).cancel.toUpperCase(),
style: TextStyle(
color:
Theme.of(context).textTheme.bodyText2.color.withAlpha(150),
),
2020-03-15 11:27:51 +01:00
),
2021-08-12 09:48:10 +02:00
),
if (error != true)
CupertinoDialogAction(
2021-10-30 10:39:00 +02:00
onPressed: _stopAndSend,
2021-08-12 09:48:10 +02:00
child: Text(L10n.of(context).send.toUpperCase()),
),
],
);
}
return AlertDialog(
content: content,
actions: [
2021-02-27 07:53:34 +01:00
TextButton(
2021-03-04 12:28:06 +01:00
onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
2020-03-15 11:27:51 +01:00
child: Text(
2021-02-24 12:17:23 +01:00
L10n.of(context).cancel.toUpperCase(),
2020-03-15 11:27:51 +01:00
style: TextStyle(
2020-05-06 18:43:30 +02:00
color: Theme.of(context).textTheme.bodyText2.color.withAlpha(150),
2020-03-15 11:27:51 +01:00
),
),
),
2021-05-01 21:36:27 +02:00
if (error != true)
TextButton(
2021-10-30 10:39:00 +02:00
onPressed: _stopAndSend,
2021-05-01 21:36:27 +02:00
child: Row(
2021-05-31 19:13:57 +02:00
mainAxisSize: MainAxisSize.min,
2021-05-01 21:36:27 +02:00
children: <Widget>[
Text(L10n.of(context).send.toUpperCase()),
2021-10-14 18:09:30 +02:00
const SizedBox(width: 4),
const Icon(Icons.send_outlined, size: 15),
2021-05-01 21:36:27 +02:00
],
),
2020-03-15 11:27:51 +01:00
),
],
);
}
}
2021-10-30 10:39:00 +02:00
class RecordingResult {
final String path;
final int duration;
const RecordingResult({
@required this.path,
@required this.duration,
});
factory RecordingResult.fromJson(Map<String, dynamic> json) =>
RecordingResult(
path: json['path'],
duration: json['duration'],
);
Map<String, dynamic> toJson() => {
'path': path,
'duration': duration,
};
}