fluffychat/lib/components/dialogs/recording_dialog.dart

140 lines
3.7 KiB
Dart
Raw Normal View History

2020-03-15 11:27:51 +01:00
import 'dart:async';
import 'dart:io';
import 'dart:math';
2020-03-15 11:27:51 +01:00
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:flutter_sound_lite/flutter_sound.dart';
import 'package:path_provider/path_provider.dart';
2020-03-15 11:27:51 +01:00
class RecordingDialog extends StatefulWidget {
2021-01-19 15:46:43 +01:00
final L10n l10n;
2020-03-15 11:27:51 +01:00
2021-01-19 15:46:43 +01:00
const RecordingDialog({
@required this.l10n,
Key key,
}) : super(key: key);
2020-03-15 11:27:51 +01:00
@override
_RecordingDialogState createState() => _RecordingDialogState();
}
class _RecordingDialogState extends State<RecordingDialog> {
final FlutterSoundRecorder flutterSound = FlutterSoundRecorder();
2020-05-13 15:58:59 +02:00
String time = '00:00:00';
2020-03-15 11:27:51 +01:00
StreamSubscription _recorderSubscription;
2020-03-15 11:49:59 +01:00
bool error = false;
String _recordedPath;
double _decibels = 0;
2020-03-15 11:49:59 +01:00
2020-03-15 11:27:51 +01:00
void startRecording() async {
2020-03-15 11:49:59 +01:00
try {
await flutterSound.openAudioSession();
await flutterSound.setSubscriptionDuration(Duration(milliseconds: 100));
final codec = Codec.aacADTS;
final tempDir = await getTemporaryDirectory();
_recordedPath = '${tempDir.path}/recording${ext[codec.index]}';
// delete any existing file
var outputFile = File(_recordedPath);
if (outputFile.existsSync()) {
await outputFile.delete();
}
await flutterSound.startRecorder(codec: codec, toFile: _recordedPath);
_recorderSubscription = flutterSound.onProgress.listen((e) {
setState(() {
_decibels = e.decibels;
time =
'${e.duration.inMinutes.toString().padLeft(2, '0')}:${(e.duration.inSeconds % 60).toString().padLeft(2, '0')}';
});
2020-03-15 11:49:59 +01:00
});
} catch (e) {
error = true;
}
2020-03-15 11:27:51 +01:00
}
@override
void initState() {
super.initState();
startRecording();
}
@override
void dispose() {
if (flutterSound.isRecording) flutterSound.stopRecorder();
_recorderSubscription?.cancel();
flutterSound.closeAudioSession();
2020-03-15 11:27:51 +01:00
super.dispose();
}
@override
Widget build(BuildContext context) {
2020-03-15 11:49:59 +01:00
if (error) {
Timer(Duration(seconds: 1), () {
Navigator.of(context).pop();
});
}
const maxDecibalWidth = 64.0;
final decibalWidth = min(_decibels / 2, maxDecibalWidth).toDouble();
2020-03-15 11:27:51 +01:00
return AlertDialog(
content: Row(
children: <Widget>[
Container(
width: maxDecibalWidth,
height: maxDecibalWidth,
alignment: Alignment.center,
child: AnimatedContainer(
duration: Duration(milliseconds: 50),
width: decibalWidth,
height: decibalWidth,
decoration: BoxDecoration(
color: Colors.red,
borderRadius: BorderRadius.circular(decibalWidth),
),
),
2020-03-15 11:27:51 +01:00
),
SizedBox(width: 8),
Expanded(
child: Text(
2021-01-19 15:46:43 +01:00
'${widget.l10n.recording}: $time',
2020-03-15 11:27:51 +01:00
style: TextStyle(
fontSize: 18,
),
),
),
],
),
actions: <Widget>[
FlatButton(
child: Text(
2021-01-19 15:46:43 +01:00
widget.l10n.cancel.toUpperCase(),
2020-03-15 11:27:51 +01:00
style: TextStyle(
2020-05-06 18:43:30 +02:00
color: Theme.of(context).textTheme.bodyText2.color.withAlpha(150),
2020-03-15 11:27:51 +01:00
),
),
onPressed: () => Navigator.of(context).pop(),
),
FlatButton(
child: Row(
children: <Widget>[
2021-01-19 15:46:43 +01:00
Text(widget.l10n.send.toUpperCase()),
2020-03-15 11:27:51 +01:00
SizedBox(width: 4),
2020-12-06 10:31:35 +01:00
Icon(Icons.send_outlined, size: 15),
2020-03-15 11:27:51 +01:00
],
),
onPressed: () async {
await _recorderSubscription?.cancel();
await flutterSound.stopRecorder();
Navigator.of(context).pop<String>(_recordedPath);
2020-03-15 11:27:51 +01:00
},
),
],
);
}
}