coming along, next is getAudioLocal

This commit is contained in:
William Jordan-Cooley 2024-01-24 12:28:38 -05:00
parent 850e78c4b8
commit 847ad9f8c1
8 changed files with 2195 additions and 338 deletions

View file

@ -1,15 +1,16 @@
import 'dart:async';
import 'dart:developer';
import 'dart:io';
import 'package:fluffychat/pangea/controllers/text_to_speech_controller.dart';
import 'package:fluffychat/utils/error_reporter.dart';
import 'package:fluffychat/utils/localized_exception_extension.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:just_audio/just_audio.dart';
import 'package:matrix/matrix.dart';
import 'package:path_provider/path_provider.dart';
import 'package:fluffychat/utils/error_reporter.dart';
import 'package:fluffychat/utils/localized_exception_extension.dart';
import '../../../utils/matrix_sdk_extensions/event_extension.dart';
class AudioPlayerWidget extends StatefulWidget {
@ -80,6 +81,7 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
});
_playAction();
} catch (e, s) {
debugger();
Logs().v('Could not download audio file', e, s);
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(
@ -132,7 +134,26 @@ class AudioPlayerState extends State<AudioPlayerWidget> {
if (audioFile != null) {
audioPlayer.setFilePath(audioFile.path);
} else {
await audioPlayer.setAudioSource(MatrixFileAudioSource(matrixFile!));
final data = matrixFile!.bytes;
final mimeType = matrixFile!.mimeType;
//shouldn't have to be settting this here
//TODO: figure out why this is necessary
matrixFile = MatrixAudioFile(
bytes: matrixFile!.bytes,
name: matrixFile!.name,
mimeType: "audio/ogg",
);
debugPrint("audioType is $mimeType");
if (!TextToSpeechController.isOggFile(matrixFile!.bytes)) {
debugger(when: kDebugMode);
} else {
debugPrint("still an ogg file!");
}
try {
await audioPlayer.setAudioSource(MatrixFileAudioSource(matrixFile!));
} catch (e, s) {
debugger(when: kDebugMode);
}
}
audioPlayer.play().onError(
ErrorReporter(context, 'Unable to play audio message')

View file

@ -1,5 +1,6 @@
import 'dart:async';
import 'dart:convert';
import 'dart:typed_data';
import 'package:fluffychat/pangea/config/environment.dart';
import 'package:fluffychat/pangea/constants/model_keys.dart';
@ -122,4 +123,36 @@ class TextToSpeechController {
return TextToSpeechResponse.fromJson(json);
}
// if (json["wave_form"] == null) {
// json["wave_form"] = getWaveForm();
// }
// return TextToSpeechResponse(
// audioContent: String.fromCharCodes(base64Decode(json["audio_content"])),
// mediaType: json["media_type"],
// durationMillis: durationMillis(json["duration_millis"]),
// waveform: getWaveForm(json["audio_content"]),
// );
// }
// static List<int> getWaveForm(audioContent) {
// return [];
// }
// static int durationMillis(audioContent) {
// return 0;
// }
static bool isOggFile(Uint8List bytes) {
// Check if the file has enough bytes for the header
if (bytes.length < 4) {
return false;
}
// Check the magic number for OGG file
return bytes[0] == 0x4F &&
bytes[1] == 0x67 &&
bytes[2] == 0x67 &&
bytes[3] == 0x53;
}
}

View file

@ -824,28 +824,28 @@ extension PangeaRoom on Room {
Future<void> setClassPowerlLevels() async {
try {
if (ownPowerLevel < ClassDefaultValues.powerLevelOfAdmin) {
return;
}
final currentPower = getState(EventTypes.RoomPowerLevels);
final Map<String, dynamic>? currentPowerContent =
currentPower!.content["events"] as Map<String, dynamic>?;
final spaceChildPower = currentPowerContent?[EventTypes.spaceChild];
final studentAnalyticsPower =
currentPowerContent?[PangeaEventTypes.studentAnalyticsSummary];
// if (ownPowerLevel < ClassDefaultValues.powerLevelOfAdmin) {
// return;
// }
// final currentPower = getState(EventTypes.RoomPowerLevels);
// final Map<String, dynamic>? currentPowerContent =
// currentPower!.content["events"] as Map<String, dynamic>?;
// final spaceChildPower = currentPowerContent?[EventTypes.spaceChild];
// final studentAnalyticsPower =
// currentPowerContent?[PangeaEventTypes.studentAnalyticsSummary];
if (spaceChildPower == null || studentAnalyticsPower == null) {
currentPowerContent!["events"][EventTypes.spaceChild] = 0;
currentPowerContent["events"]
[PangeaEventTypes.studentAnalyticsSummary] = 0;
// if (spaceChildPower == null || studentAnalyticsPower == null) {
// currentPowerContent!["events"][EventTypes.spaceChild] = 0;
// currentPowerContent["events"]
// [PangeaEventTypes.studentAnalyticsSummary] = 0;
await client.setRoomStateWithKey(
id,
EventTypes.RoomPowerLevels,
currentPower.stateKey ?? "",
currentPowerContent,
);
}
// await client.setRoomStateWithKey(
// id,
// EventTypes.RoomPowerLevels,
// currentPower.stateKey ?? "",
// currentPowerContent,
// );
// }
} catch (err, s) {
debugger(when: kDebugMode);
ErrorHandler.logError(e: err, s: s, data: toJson());

View file

@ -110,6 +110,12 @@ class PangeaMessageEvent {
final audioBytes = base64.decode(response.audioContent);
if (!TextToSpeechController.isOggFile(audioBytes)) {
throw Exception("File is not a valid OGG format");
} else {
debugPrint("File is a valid OGG format");
}
// from text, trim whitespace, remove special characters, and limit to 20 characters
// final fileName =
// text.trim().replaceAll(RegExp('[^A-Za-z0-9]'), '').substring(0, 20);
@ -118,8 +124,13 @@ class PangeaMessageEvent {
final file = MatrixAudioFile(
bytes: audioBytes,
name: fileName,
mimeType: response.mediaType,
);
if (file.mimeType != "audio/ogg") {
throw Exception("Unexpected mime type: ${file.mimeType}");
}
return room.sendFileEvent(
file,
inReplyTo: _event,
@ -131,10 +142,9 @@ class PangeaMessageEvent {
'org.matrix.msc3245.voice': {},
'org.matrix.msc1767.audio': {
'duration': response.durationMillis,
'waveform': null,
// 'waveform': response.waveform,
'waveform': response.waveform,
},
'transcription': {
ModelKey.transcription: {
ModelKey.text: text,
ModelKey.langCode: langCode,
},
@ -170,28 +180,36 @@ class PangeaMessageEvent {
return allAudio.firstWhereOrNull(
(element) {
// Safely access the transcription map
final transcription =
element.content.tryGet<Map<String, String>>(ModelKey.transcription);
if (transcription == null) {
// If transcription is null, this element does not match.
return false;
}
final transcription = element.content.tryGet(ModelKey.transcription);
// Safely get language code and text from the transcription
final elementLangCode = transcription.tryGet(ModelKey.langCode);
final elementText = transcription.tryGet(ModelKey.text);
return transcription != null;
// if (transcription == null) {
// // If transcription is null, this element does not match.
// return false;
// }
// Check if both language code and text match
return elementLangCode == langCode && elementText == text;
// // Safely get language code and text from the transcription
// final elementLangCode = transcription.tryGet(ModelKey.langCode);
// final elementText = transcription.tryGet(ModelKey.text);
// // Check if both language code and text match
// return elementLangCode == langCode && elementText == text;
},
);
}
// get audio events that are related to this event
Set<Event> get allAudio => _latestEdit.aggregatedEvents(
Set<Event> get allAudio => _latestEdit
.aggregatedEvents(
timeline,
EventTypes.Message,
);
RelationshipTypes.reply,
)
.where((element) {
return element.content.tryGet<Map<String, dynamic>>(
ModelKey.transcription,
) !=
null;
}).toSet();
List<RepresentationEvent>? _representations;
List<RepresentationEvent> get representations {

View file

@ -10,18 +10,21 @@ class PangeaMessageActions extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Row(
children: <Widget>[
LanguageToggleSwitch(controller: chatController),
TextToSpeechButton(
controller: chatController,
),
// IconButton(
// icon: Icon(Icons.mic),
// onPressed: chatController.onMicTap,
// ),
// Add more IconButton widgets here
],
);
return chatController.selectedEvents.length == 1
? Row(
children: <Widget>[
LanguageToggleSwitch(controller: chatController),
TextToSpeechButton(
controller: chatController,
selectedEvent: chatController.selectedEvents.first,
),
// IconButton(
// icon: Icon(Icons.mic),
// onPressed: chatController.onMicTap,
// ),
// Add more IconButton widgets here
],
)
: const SizedBox();
}
}

View file

@ -1,7 +1,8 @@
import 'dart:developer';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pages/chat/chat.dart';
import 'package:fluffychat/pangea/constants/language_keys.dart';
import 'package:fluffychat/pages/chat/events/audio_player.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/widgets/matrix.dart';
@ -13,10 +14,12 @@ import 'package:matrix/matrix.dart';
class TextToSpeechButton extends StatefulWidget {
final ChatController controller;
final Event selectedEvent;
const TextToSpeechButton({
super.key,
required this.controller,
required this.selectedEvent,
});
@override
@ -25,6 +28,8 @@ class TextToSpeechButton extends StatefulWidget {
class _TextToSpeechButtonState extends State<TextToSpeechButton> {
final AudioPlayer _audioPlayer = AudioPlayer();
late PangeaMessageEvent _pangeaMessageEvent;
bool _isLoading = false;
@override
void dispose() {
@ -32,97 +37,103 @@ class _TextToSpeechButtonState extends State<TextToSpeechButton> {
super.dispose();
}
void _playSpeech() {
@override
void initState() {
super.initState();
_pangeaMessageEvent = PangeaMessageEvent(
event: widget.selectedEvent,
timeline: widget.controller.timeline!,
ownMessage:
widget.selectedEvent.senderId == Matrix.of(context).client.userID,
selected: true,
);
}
Event? get localAudioEvent =>
langCode != null && text != null && text!.isNotEmpty
? _pangeaMessageEvent.getAudioLocal(langCode!, text!)
: null;
String? get langCode =>
widget.controller.choreographer.messageOptions.selectedDisplayLang
?.langCode ??
widget.controller.choreographer.l2LangCode;
String? get text => langCode != null
? _pangeaMessageEvent.representationByLanguage(langCode!)?.text
: null;
Future<void> _getAudio() async {
try {
final String langCode = widget.controller.choreographer.messageOptions
.selectedDisplayLang?.langCode ??
widget.controller.choreographer.l2LangCode ??
'en';
final Event event = widget.controller.selectedEvents.first;
if (!mounted) return;
if (text == null || text!.isEmpty) return;
if (langCode == null || langCode!.isEmpty) return;
PangeaMessageEvent(
event: event,
timeline: widget.controller.timeline!,
ownMessage: event.senderId == Matrix.of(context).client.userID,
selected: true,
).getAudioGlobal(langCode);
// final String? text = PangeaMessageEvent(
// event: event,
// timeline: widget.controller.timeline!,
// ownMessage: event.senderId == Matrix.of(context).client.userID,
// selected: true,
// ).representationByLanguage(langCode)?.text;
// if (text == null || text.isEmpty) {
// throw Exception("text is null or empty in text_to_speech_button.dart");
// }
// final TextToSpeechRequest params = TextToSpeechRequest(
// text: text,
// langCode: widget.controller.choreographer.messageOptions
// .selectedDisplayLang?.langCode ??
// widget.controller.choreographer.l2LangCode ??
// LanguageKeys.unknownLanguage,
// );
// final TextToSpeechResponse response = await TextToSpeechService.get(
// accessToken:
// await MatrixState.pangeaController.userController.accessToken,
// params: params,
// );
// if (response.mediaType != 'audio/ogg') {
// throw Exception('Unexpected media type: ${response.mediaType}');
// }
// // Decode the base64 audio content to bytes
// final audioBytes = base64.decode(response.audioContent);
// final encoding = Uri.dataFromBytes(audioBytes);
// final uri = AudioSource.uri(encoding);
// // gets here without problems
// await _audioPlayer.setAudioSource(uri);
// await _audioPlayer.play();
// final audioBytes = base64.decode(response.audioContent);
// final tempDir = await getTemporaryDirectory();
// final file = File('${tempDir.path}/speech.ogg');
// await file.writeAsBytes(audioBytes);
// await _audioPlayer.setFilePath(file.path);
// await _audioPlayer.play();
setState(() => _isLoading = true);
await _pangeaMessageEvent.getAudioGlobal(langCode!);
setState(() => _isLoading = false);
} catch (e) {
setState(() => _isLoading = false);
debugger(when: kDebugMode);
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(
content: Text(
L10n.of(context)!.errorGettingAudio,
),
content: Text(L10n.of(context)!.errorGettingAudio),
),
);
ErrorHandler.logError(
e: Exception(),
s: StackTrace.current,
m: 'text is null or empty in text_to_speech_button.dart',
data: {
'event': widget.controller.selectedEvents.first,
'langCode': widget.controller.choreographer.messageOptions
.selectedDisplayLang?.langCode ??
widget.controller.choreographer.l2LangCode ??
LanguageKeys.unknownLanguage,
},
data: {'selectedEvent': widget.selectedEvent, 'langCode': langCode},
);
}
}
@override
Widget build(BuildContext context) {
return ElevatedButton(
onPressed: _playSpeech,
child: const Text('Convert to Speech'),
if (_isLoading) {
return const Center(child: CircularProgressIndicator());
}
final playButton = InkWell(
borderRadius: BorderRadius.circular(64),
onTap: text == null || text!.isEmpty ? null : _getAudio,
child: Material(
color: AppConfig.primaryColor.withAlpha(64),
borderRadius: BorderRadius.circular(64),
child: const Icon(
// Change the icon based on some condition. If you have an audio player state, use it here.
Icons.play_arrow_outlined,
color: AppConfig.primaryColor,
),
),
);
return localAudioEvent == null
? Opacity(
opacity: text == null || text!.isEmpty ? 0.5 : 1,
child: SizedBox(
width: 44, // Match the size of the button in AudioPlayerState
height: 36,
child: Padding(
//only left side of the button is padded to match the padding of the AudioPlayerState
padding: const EdgeInsets.only(left: 8),
child: playButton,
),
),
)
: Container(
constraints: const BoxConstraints(
maxWidth: 250,
),
child: Column(
children: [
AudioPlayerWidget(
localAudioEvent!,
color: Theme.of(context).colorScheme.onPrimaryContainer,
),
],
),
);
}
}

View file

@ -1,3 +1,4 @@
import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:matrix/matrix.dart';
import '../../config/app_config.dart';
@ -18,6 +19,9 @@ extension IsStateExtension on Event {
(!AppConfig.hideUnknownEvents || isEventTypeKnown) &&
// remove state events that we don't want to render
(isState || !AppConfig.hideAllStateEvents) &&
// #Pangea
content.tryGet(ModelKey.transcription) == null &&
// Pangea#
// hide unimportant state events
(!AppConfig.hideUnimportantStateEvents ||
!isState ||

File diff suppressed because it is too large Load diff