Merge pull request #151 from pangeachat/speech-to-text

[WIP] speech to text fully drafted
This commit is contained in:
wcjord 2024-05-08 17:25:26 -04:00 committed by GitHub
commit 29a3946e73
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
49 changed files with 2510 additions and 1154 deletions

View file

@ -2431,9 +2431,6 @@
"seconds": {}
}
},
"pleaseEnterANumber": "Please enter a number greater than 0",
"archiveRoomDescription": "The chat will be moved to the archive. Other users will be able to see that you have left the chat.",
"roomUpgradeDescription": "The chat will then be recreated with the new room version. All participants will be notified that they need to switch to the new chat. You can find out more about room versions at https://spec.matrix.org/latest/rooms/",
"allCorrect": "That's how I would say it! Nice!",
"newWayAllGood": "That's not how I would have said it but it looks good!",
"othersAreBetter": "Hm, there might be a better way to say that.",
@ -2584,15 +2581,7 @@
"placeholders": {}
},
"copyClassLink": "Copy invite link",
"@copyClassLink": {
"type": "text",
"placeholders": {}
},
"copyClassLinkDesc": "Clicking this link will take students to the app, direct them to make an account and they will automatically join this space.",
"@copyClassLink": {
"type": "text",
"placeholders": {}
},
"copyClassCode": "Copy invite code",
"inviteStudentByUserName": "Invite student by username",
"@inviteStudentByUserName": {
@ -2764,11 +2753,6 @@
"type": "text",
"placeholders": {}
},
"errorPleaseRefresh": "We're looking into it! Please reload and try again.",
"@errorPleaseRefresh": {
"type": "text",
"placeholders": {}
},
"joinWithClassCode": "Join class or exchange",
"@joinWithClassCode": {
"type": "text",
@ -2989,26 +2973,6 @@
"type": "text",
"placeholders": {}
},
"error502504Title": "Wow, there are a lot of students online!",
"@error502504Title": {
"type": "text",
"placeholders": {}
},
"error502504Desc": "Translation and grammar tools may be slow or unavailable while the Pangea bots catch up.",
"@error502504Desc": {
"type": "text",
"placeholders": {}
},
"error404Title": "Translation error!",
"@error404Title": {
"type": "text",
"placeholders": {}
},
"error404Desc": "Pangea Bot isn't sure how to translate that...",
"@error404Desc": {
"type": "text",
"placeholders": {}
},
"errorDisableIT": "Translation assistance is turned off.",
"errorDisableIGC": "Grammar assistance is turned off.",
"errorDisableLanguageAssistance": "Translation assistance and grammar assistance are turned off.",
@ -3111,11 +3075,6 @@
"type": "text",
"placeholders": {}
},
"classDescription": "Space Description",
"@classDescription": {
"type": "text",
"placeholders": {}
},
"inviteStudentByUserNameDesc": "If your student already has an account, you can search for them.",
"@inviteStudentByUserNameDesc": {
"type": "text",
@ -3132,7 +3091,6 @@
"clickMessageTitle": "Need help?",
"clickMessageBody": "Click messages to access definitions, translations, and audio!",
"understandingMessagesTitle": "Definitions and translations!",
"addToClass": "Add this chat to ",
"understandingMessagesBody": "Click underlined words for definitions. Translate with message options (upper right).",
"allDone": "All done!",
"vocab": "Vocabulary",
@ -3665,7 +3623,6 @@
"user": {}
}
},
"decline": "Decline",
"declinedInvitation": "Declined invitation",
"acceptedInvitation": "Accepted invitation",
"youreInvited": "📩 You're invited!",
@ -3744,7 +3701,6 @@
},
"acceptSelection": "Accept Correction",
"acceptSelectionAnyway": "Use this anyway",
"replace": "Make correction",
"makingActivity": "Making activity",
"why": "Why?",
"definition": "Definition",
@ -3767,12 +3723,6 @@
}
},
"noTeachersFound": "No teachers found to report to",
"pushNotificationsNotAvailable": "Push notifications not available",
"learnMore": "Learn more",
"banUserDescription": "The user will be banned from the chat and will not be able to enter the chat again until they are unbanned.",
"unbanUserDescription": "The user will be able to enter the chat again if they try.",
"kickUserDescription": "The user is kicked out of the chat but not banned. In public chats, the user can rejoin at any time.",
"makeAdminDescription": "Once you make this user admin, you may not be able to undo this as they will then have the same permissions as you.",
"pleaseEnterANumber": "Please enter a number greater than 0",
"archiveRoomDescription": "The chat will be moved to the archive. Other users will be able to see that you have left the chat.",
"roomUpgradeDescription": "The chat will then be recreated with the new room version. All participants will be notified that they need to switch to the new chat. You can find out more about room versions at https://spec.matrix.org/latest/rooms/",
@ -3792,10 +3742,6 @@
}
},
"searchChatsRooms": "Search for #chats, @users...",
"groupName": "Group name",
"createGroupAndInviteUsers": "Create a group and invite users",
"groupCanBeFoundViaSearch": "Group can be found via search",
"inNoSpaces": "You are not a member of any classes or exchanges",
"createClass": "Create class",
"createExchange": "Create exchange",
"viewArchive": "View Archive",
@ -3902,7 +3848,6 @@
"enableModerationDesc": "Enable automatic moderation to review messages before they are sent",
"conversationLanguageLevel": "What is the language level of this conversation?",
"showDefinition": "Show Definition",
"acceptedKeyVerification": "{sender} accepted key verification",
"sendReadReceipts": "Send read receipts",
"sendTypingNotificationsDescription": "Other participants in a chat can see when you are typing a new message.",
"sendReadReceiptsDescription": "Other participants in a chat can see when you have read a message.",
@ -3967,6 +3912,7 @@
"more": "More",
"translationTooltip": "Translate",
"audioTooltip": "Play Audio",
"speechToTextTooltip": "Transcript",
"certifyAge": "I certify that I am over {age} years of age",
"@certifyAge": {
"type": "text",
@ -3994,5 +3940,9 @@
}
},
"messageAnalytics": "Message Analytics",
"words": "Words",
"score": "Score",
"accuracy": "Accuracy",
"points": "Points",
"noPaymentInfo": "No payment info necessary!"
}

View file

@ -4572,6 +4572,7 @@
"more": "Más",
"translationTooltip": "Traducir",
"audioTooltip": "Reproducir audio",
"speechToTextTooltip": "Transcripción",
"yourBirthdayPleaseShort": "Seleccione su grupo de edad",
"certifyAge": "Certifico que soy mayor de {age} años",
"@certifyAge": {
@ -4587,4 +4588,4 @@
"joinToView": "Únete a esta sala para ver los detalles",
"autoPlayTitle": "Reproducción automática de mensajes",
"autoPlayDesc": "Cuando está activado, el audio de texto a voz de los mensajes se reproducirá automáticamente cuando se seleccione."
}
}

View file

@ -18,11 +18,12 @@ import 'package:fluffychat/pangea/choreographer/controllers/choreographer.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/enum/use_type.dart';
import 'package:fluffychat/pangea/extensions/pangea_room_extension.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/choreo_record.dart';
import 'package:fluffychat/pangea/models/class_model.dart';
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/student_analytics_summary_model.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/utils/firebase_analytics.dart';
import 'package:fluffychat/pangea/utils/report_message.dart';

View file

@ -1,8 +1,8 @@
import 'package:fluffychat/config/themes.dart';
import 'package:fluffychat/pages/chat/chat.dart';
import 'package:fluffychat/pangea/enum/use_type.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/language_model.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/widgets/chat/message_toolbar.dart';
import 'package:fluffychat/utils/date_time_extension.dart';
import 'package:fluffychat/utils/string_color.dart';
@ -168,7 +168,8 @@ class Message extends StatelessWidget {
ToolbarDisplayController? toolbarController;
if (event.type == EventTypes.Message &&
event.messageType == MessageTypes.Text ||
event.messageType == MessageTypes.Notice) {
event.messageType == MessageTypes.Notice ||
event.messageType == MessageTypes.Audio) {
toolbarController = controller.getToolbarDisplayController(
event.eventId,
nextEvent: nextEvent,

View file

@ -1,7 +1,8 @@
import 'dart:math';
import 'package:fluffychat/pages/chat/events/video_player.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/enum/message_mode_enum.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/widgets/chat/message_context_menu.dart';
import 'package:fluffychat/pangea/widgets/chat/message_toolbar.dart';
import 'package:fluffychat/pangea/widgets/igc/pangea_rich_text.dart';
@ -372,7 +373,7 @@ class MessageContent extends StatelessWidget {
),
onListen: () => toolbarController?.showToolbar(
context,
mode: MessageMode.play,
mode: MessageMode.textToSpeech,
),
),
enableInteractiveSelection:

View file

@ -12,7 +12,7 @@ import 'package:wakelock_plus/wakelock_plus.dart';
import 'events/audio_player.dart';
class RecordingDialog extends StatefulWidget {
static const String recordingFileType = 'm4a';
static const String recordingFileType = 'wav';
const RecordingDialog({
super.key,
});
@ -49,6 +49,8 @@ class RecordingDialogState extends State<RecordingDialog> {
path: _recordedPath,
bitRate: bitRate,
samplingRate: samplingRate,
encoder: AudioEncoder.wav,
numChannels: 1,
);
setState(() => _duration = Duration.zero);
_recorderSubscription?.cancel();

View file

@ -13,7 +13,8 @@ import 'package:fluffychat/pangea/enum/edit_type.dart';
import 'package:fluffychat/pangea/extensions/pangea_room_extension.dart';
import 'package:fluffychat/pangea/models/class_model.dart';
import 'package:fluffychat/pangea/models/it_step.dart';
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/utils/any_state_holder.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/utils/overlay.dart';

View file

@ -2,6 +2,8 @@ class PangeaEventTypes {
static const classSettings = "pangea.class";
static const pangeaExchange = "p.exchange";
static const transcript = "pangea.transcript";
static const rules = "p.rules";
static const studentAnalyticsSummary = "pangea.usranalytics";
@ -18,5 +20,7 @@ class PangeaEventTypes {
static const botOptions = "pangea.bot_options";
static const userAge = "pangea.user_age";
static const String report = 'm.report';
static const textToSpeechRule = "p.rule.text_to_speech";
}

View file

@ -1,3 +0,0 @@
class PangeaMessageTypes {
static String report = 'm.report';
}

View file

@ -13,8 +13,8 @@ import 'package:matrix/matrix.dart';
import '../constants/class_default_values.dart';
import '../extensions/client_extension.dart';
import '../extensions/pangea_room_extension.dart';
import '../matrix_event_wrappers/construct_analytics_event.dart';
import '../models/chart_analytics_model.dart';
import '../models/construct_analytics_event.dart';
import '../models/student_analytics_event.dart';
import 'base_controller.dart';
import 'pangea_controller.dart';

View file

@ -2,7 +2,8 @@ import 'package:collection/collection.dart';
import 'package:fluffychat/pangea/controllers/base_controller.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/extensions/pangea_room_extension.dart';
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/repo/tokens_repo.dart';
import 'package:flutter/material.dart';
import 'package:matrix/matrix.dart';

View file

@ -11,6 +11,7 @@ import 'package:fluffychat/pangea/controllers/local_settings.dart';
import 'package:fluffychat/pangea/controllers/message_data_controller.dart';
import 'package:fluffychat/pangea/controllers/my_analytics_controller.dart';
import 'package:fluffychat/pangea/controllers/permissions_controller.dart';
import 'package:fluffychat/pangea/controllers/speech_to_text_controller.dart';
import 'package:fluffychat/pangea/controllers/subscription_controller.dart';
import 'package:fluffychat/pangea/controllers/text_to_speech_controller.dart';
import 'package:fluffychat/pangea/controllers/user_controller.dart';
@ -48,6 +49,7 @@ class PangeaController {
late InstructionsController instructions;
late SubscriptionController subscriptionController;
late TextToSpeechController textToSpeech;
late SpeechToTextController speechToText;
///store Services
late PLocalStore pStoreService;
@ -94,6 +96,7 @@ class PangeaController {
subscriptionController = SubscriptionController(this);
itFeedback = ITFeedbackController(this);
textToSpeech = TextToSpeechController(this);
speechToText = SpeechToTextController(this);
PAuthGaurd.pController = this;
}

View file

@ -0,0 +1,122 @@
import 'dart:async';
import 'dart:convert';
import 'package:fluffychat/pangea/constants/pangea_event_types.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/extensions/pangea_room_extension.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/speech_to_text_models.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:flutter/foundation.dart';
import 'package:http/http.dart';
import '../config/environment.dart';
import '../network/requests.dart';
import '../network/urls.dart';
// Assuming SpeechToTextRequestModel, SpeechToTextModel and related models are already defined as in your provided code.
class _SpeechToTextCacheItem {
Future<SpeechToTextModel> data;
_SpeechToTextCacheItem({required this.data});
}
class SpeechToTextController {
static final Map<int, _SpeechToTextCacheItem> _cache = {};
late final PangeaController _pangeaController;
Timer? _cacheClearTimer;
SpeechToTextController(this._pangeaController) {
_initializeCacheClearing();
}
void _initializeCacheClearing() {
const duration = Duration(minutes: 2);
_cacheClearTimer = Timer.periodic(duration, (Timer t) => _clearCache());
}
void _clearCache() {
_cache.clear();
}
void dispose() {
_cacheClearTimer?.cancel();
}
Future<SpeechToTextModel> get(
SpeechToTextRequestModel requestModel,
) async {
final int cacheKey = requestModel.hashCode;
if (_cache.containsKey(cacheKey)) {
return _cache[cacheKey]!.data;
} else {
final Future<SpeechToTextModel> response = _fetchResponse(
accessToken: await _pangeaController.userController.accessToken,
requestModel: requestModel,
);
_cache[cacheKey] = _SpeechToTextCacheItem(data: response);
return response;
}
}
Future<void> saveSpeechToTextAsRepresentationEvent(
SpeechToTextModel response,
SpeechToTextRequestModel requestModel,
) {
if (requestModel.audioEvent == null) {
debugPrint(
'Audio event is null, case of giving speech to text before message sent, currently not implemented',
);
return Future.value(null);
}
debugPrint('Saving transcript as matrix event');
requestModel.audioEvent?.room.sendPangeaEvent(
content: PangeaRepresentation(
langCode: response.langCode,
text: response.transcript.text,
originalSent: false,
originalWritten: false,
speechToText: response,
).toJson(),
parentEventId: requestModel.audioEvent!.eventId,
type: PangeaEventTypes.representation,
);
debugPrint('Transcript saved as matrix event');
return Future.value(null);
}
Future<SpeechToTextModel> _fetchResponse({
required String accessToken,
required SpeechToTextRequestModel requestModel,
}) async {
final Requests request = Requests(
choreoApiKey: Environment.choreoApiKey,
accessToken: accessToken,
);
final Response res = await request.post(
url: PApiUrls.speechToText,
body: requestModel.toJson(),
);
if (res.statusCode == 200) {
final Map<String, dynamic> json = jsonDecode(utf8.decode(res.bodyBytes));
final response = SpeechToTextModel.fromJson(json);
saveSpeechToTextAsRepresentationEvent(response, requestModel).onError(
(error, stackTrace) => ErrorHandler.logError(e: error, s: stackTrace),
);
return response;
} else {
debugPrint('Error converting speech to text: ${res.body}');
throw Exception('Failed to convert speech to text');
}
}
}

View file

@ -126,25 +126,6 @@ class TextToSpeechController {
return TextToSpeechResponse.fromJson(json);
}
// if (json["wave_form"] == null) {
// json["wave_form"] = getWaveForm();
// }
// return TextToSpeechResponse(
// audioContent: String.fromCharCodes(base64Decode(json["audio_content"])),
// mediaType: json["media_type"],
// durationMillis: durationMillis(json["duration_millis"]),
// waveform: getWaveForm(json["audio_content"]),
// );
// }
// static List<int> getWaveForm(audioContent) {
// return [];
// }
// static int durationMillis(audioContent) {
// return 0;
// }
static bool isOggFile(Uint8List bytes) {
// Check if the file has enough bytes for the header

View file

@ -0,0 +1,60 @@
AudioEncodingEnum mimeTypeToAudioEncoding(String mimeType) {
switch (mimeType) {
case 'audio/mpeg':
return AudioEncodingEnum.mp3;
case 'audio/mp4':
return AudioEncodingEnum.mp4;
case 'audio/ogg':
return AudioEncodingEnum.oggOpus;
case 'audio/x-flac':
return AudioEncodingEnum.flac;
case 'audio/x-wav':
return AudioEncodingEnum.linear16;
default:
return AudioEncodingEnum.encodingUnspecified;
}
}
enum AudioEncodingEnum {
encodingUnspecified,
linear16,
flac,
mulaw,
amr,
amrWb,
oggOpus,
speexWithHeaderByte,
mp3,
mp4,
webmOpus,
}
// Utility extension to map enum values to their corresponding string value as used by the API
extension AudioEncodingExtension on AudioEncodingEnum {
String get value {
switch (this) {
case AudioEncodingEnum.linear16:
return 'LINEAR16';
case AudioEncodingEnum.flac:
return 'FLAC';
case AudioEncodingEnum.mulaw:
return 'MULAW';
case AudioEncodingEnum.amr:
return 'AMR';
case AudioEncodingEnum.amrWb:
return 'AMR_WB';
case AudioEncodingEnum.oggOpus:
return 'OGG_OPUS';
case AudioEncodingEnum.speexWithHeaderByte:
return 'SPEEX_WITH_HEADER_BYTE';
case AudioEncodingEnum.mp3:
return 'MP3';
case AudioEncodingEnum.mp4:
return 'MP4';
case AudioEncodingEnum.webmOpus:
return 'WEBM_OPUS';
default:
return 'ENCODING_UNSPECIFIED';
}
}
}

View file

@ -0,0 +1,55 @@
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:material_symbols_icons/symbols.dart';
enum MessageMode { translation, definition, speechToText, textToSpeech }
extension MessageModeExtension on MessageMode {
IconData get icon {
switch (this) {
case MessageMode.translation:
return Icons.g_translate;
case MessageMode.textToSpeech:
return Symbols.text_to_speech;
case MessageMode.speechToText:
return Symbols.speech_to_text;
//TODO change icon for audio messages
case MessageMode.definition:
return Icons.book;
default:
return Icons.error; // Icon to indicate an error or unsupported mode
}
}
String title(BuildContext context) {
switch (this) {
case MessageMode.translation:
return L10n.of(context)!.translations;
case MessageMode.textToSpeech:
return L10n.of(context)!.messageAudio;
case MessageMode.speechToText:
return L10n.of(context)!.speechToTextTooltip;
case MessageMode.definition:
return L10n.of(context)!.definitions;
default:
return L10n.of(context)!
.oopsSomethingWentWrong; // Title to indicate an error or unsupported mode
}
}
String tooltip(BuildContext context) {
switch (this) {
case MessageMode.translation:
return L10n.of(context)!.translationTooltip;
case MessageMode.textToSpeech:
return L10n.of(context)!.audioTooltip;
case MessageMode.speechToText:
return L10n.of(context)!.speechToTextTooltip;
case MessageMode.definition:
return L10n.of(context)!.define;
default:
return L10n.of(context)!
.oopsSomethingWentWrong; // Title to indicate an error or unsupported mode
}
}
}

View file

@ -1,12 +1,11 @@
import 'dart:developer';
import 'package:flutter/foundation.dart';
import 'package:matrix/matrix.dart';
import 'package:fluffychat/pangea/constants/pangea_event_types.dart';
import 'package:fluffychat/pangea/models/choreo_record.dart';
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:flutter/foundation.dart';
import 'package:matrix/matrix.dart';
extension PangeaEvent on Event {
V getPangeaContent<V>() {

View file

@ -4,9 +4,10 @@ import 'dart:developer';
import 'package:fluffychat/pangea/constants/class_default_values.dart';
import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:fluffychat/pangea/constants/pangea_room_types.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/bot_options_model.dart';
import 'package:fluffychat/pangea/models/class_model.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/utils/bot_name.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:flutter/foundation.dart';
@ -22,10 +23,10 @@ import '../../config/app_config.dart';
import '../constants/pangea_event_types.dart';
import '../enum/construct_type_enum.dart';
import '../enum/use_type.dart';
import '../matrix_event_wrappers/construct_analytics_event.dart';
import '../models/choreo_record.dart';
import '../models/construct_analytics_event.dart';
import '../models/constructs_analytics_model.dart';
import '../models/message_data_models.dart';
import '../models/representation_content_model.dart';
import '../models/student_analytics_event.dart';
import '../models/student_analytics_summary_model.dart';
import '../utils/p_store.dart';

View file

@ -1,11 +1,10 @@
import 'package:flutter/foundation.dart';
import 'package:matrix/matrix.dart';
import 'package:fluffychat/pangea/extensions/pangea_event_extension.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:flutter/foundation.dart';
import 'package:matrix/matrix.dart';
import '../constants/pangea_event_types.dart';
import 'choreo_record.dart';
import '../models/choreo_record.dart';
class ChoreoEvent {
Event event;

View file

@ -3,12 +3,15 @@ import 'dart:convert';
import 'package:collection/collection.dart';
import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:fluffychat/pangea/controllers/text_to_speech_controller.dart';
import 'package:fluffychat/pangea/enum/audio_encoding_enum.dart';
import 'package:fluffychat/pangea/extensions/pangea_room_extension.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_representation_event.dart';
import 'package:fluffychat/pangea/models/choreo_record.dart';
import 'package:fluffychat/pangea/models/class_model.dart';
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/pangea_match_model.dart';
import 'package:fluffychat/pangea/models/pangea_representation_event.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/models/speech_to_text_models.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/utils/bot_name.dart';
import 'package:fluffychat/pangea/widgets/chat/message_audio_card.dart';
import 'package:flutter/material.dart';
@ -56,6 +59,8 @@ class PangeaMessageEvent {
Room get room => _event.room;
bool get isAudioMessage => _event.messageType == MessageTypes.Audio;
Event? _latestEditCache;
Event get _latestEdit => _latestEditCache ??= _event
.aggregatedEvents(
@ -153,7 +158,7 @@ class PangeaMessageEvent {
},
);
debugPrint("eventId in getAudioGlobal $eventId");
debugPrint("eventId in getTextToSpeechGlobal $eventId");
final Event? audioEvent =
eventId != null ? await room.getEventById(eventId) : null;
@ -167,10 +172,10 @@ class PangeaMessageEvent {
//get audio for text and language
//if no audio exists, create it
//if audio exists, return it
Future<Event?> getAudioGlobal(String langCode) async {
Future<Event?> getTextToSpeechGlobal(String langCode) async {
final String text = representationByLanguage(langCode)?.text ?? body;
final local = getAudioLocal(langCode, text);
final local = getTextToSpeechLocal(langCode, text);
if (local != null) return Future.value(local);
@ -228,16 +233,16 @@ class PangeaMessageEvent {
// .timeout(
// Durations.long4,
// onTimeout: () {
// debugPrint("timeout in getAudioGlobal");
// debugPrint("timeout in getTextToSpeechGlobal");
// return null;
// },
// );
debugPrint("eventId in getAudioGlobal $eventId");
debugPrint("eventId in getTextToSpeechGlobal $eventId");
return eventId != null ? room.getEventById(eventId) : null;
}
Event? getAudioLocal(String langCode, String text) {
Event? getTextToSpeechLocal(String langCode, String text) {
return allAudio.firstWhereOrNull(
(element) {
// Safely access the transcription map
@ -272,6 +277,78 @@ class PangeaMessageEvent {
null;
}).toSet();
Future<SpeechToTextModel?> getSpeechToText(
String l1Code,
String l2Code,
) async {
if (!isAudioMessage) {
ErrorHandler.logError(
e: 'Calling getSpeechToText on non-audio message',
s: StackTrace.current,
data: {
"content": _event.content,
"eventId": _event.eventId,
"roomId": _event.roomId,
"userId": _event.room.client.userID,
"account_data": _event.room.client.accountData,
},
);
return null;
}
final SpeechToTextModel? speechToTextLocal = representations
.firstWhereOrNull(
(element) => element.content.speechToText != null,
)
?.content
.speechToText;
if (speechToTextLocal != null) return speechToTextLocal;
final matrixFile = await _event.downloadAndDecryptAttachment();
// Pangea#
// File? file;
// TODO: Test on mobile and see if we need this case, doeesn't seem so
// if (!kIsWeb) {
// final tempDir = await getTemporaryDirectory();
// final fileName = Uri.encodeComponent(
// // #Pangea
// // widget.event.attachmentOrThumbnailMxcUrl()!.pathSegments.last,
// widget.messageEvent.event
// .attachmentOrThumbnailMxcUrl()!
// .pathSegments
// .last,
// // Pangea#
// );
// file = File('${tempDir.path}/${fileName}_${matrixFile.name}');
// await file.writeAsBytes(matrixFile.bytes);
// }
// audioFile = file;
debugPrint("mimeType ${matrixFile.mimeType}");
debugPrint("encoding ${mimeTypeToAudioEncoding(matrixFile.mimeType)}");
final SpeechToTextModel response =
await MatrixState.pangeaController.speechToText.get(
SpeechToTextRequestModel(
audioContent: matrixFile.bytes,
audioEvent: _event,
config: SpeechToTextAudioConfigModel(
encoding: mimeTypeToAudioEncoding(matrixFile.mimeType),
//this is the default in the RecordConfig in record package
//TODO: check if this is the correct value and make it a constant somewhere
sampleRateHertz: 22050,
userL1: l1Code,
userL2: l2Code,
),
),
);
return response;
}
List<RepresentationEvent>? _representations;
List<RepresentationEvent> get representations {
if (_representations != null) return _representations!;
@ -444,6 +521,8 @@ class PangeaMessageEvent {
),
);
},
).onError(
(error, stackTrace) => ErrorHandler.logError(e: error, s: stackTrace),
);
return pangeaRep;
@ -469,6 +548,7 @@ class PangeaMessageEvent {
_event.room.isSpaceAdmin &&
_event.senderId != BotName.byEnvironment &&
!room.isUserSpaceAdmin(_event.senderId) &&
_event.messageType != PangeaEventTypes.report &&
_event.messageType == MessageTypes.Text;
String get messageDisplayLangCode {

View file

@ -1,8 +1,10 @@
import 'dart:developer';
import 'package:fluffychat/pangea/extensions/pangea_event_extension.dart';
import 'package:fluffychat/pangea/models/pangea_choreo_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_choreo_event.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
import 'package:fluffychat/pangea/models/speech_to_text_models.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/repo/tokens_repo.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
@ -13,9 +15,9 @@ import 'package:sentry_flutter/sentry_flutter.dart';
import '../../widgets/matrix.dart';
import '../constants/language_keys.dart';
import '../constants/pangea_event_types.dart';
import '../models/choreo_record.dart';
import '../models/representation_content_model.dart';
import '../utils/error_handler.dart';
import 'choreo_record.dart';
import 'message_data_models.dart';
import 'pangea_tokens_event.dart';
class RepresentationEvent {
@ -25,12 +27,15 @@ class RepresentationEvent {
ChoreoRecord? _choreo;
Timeline timeline;
SpeechToTextModel? _speechToTextResponse;
RepresentationEvent({
required this.timeline,
Event? event,
PangeaRepresentation? content,
PangeaMessageTokens? tokens,
ChoreoRecord? choreo,
SpeechToTextModel? speechToTextResponse,
}) {
if (event != null && event.type != PangeaEventTypes.representation) {
throw Exception(
@ -41,10 +46,14 @@ class RepresentationEvent {
_content = content;
_tokens = tokens;
_choreo = choreo;
_speechToTextResponse = speechToTextResponse;
}
Event? get event => _event;
// Note: in the case where the event is the originalSent or originalWritten event,
// the content will be set on initialization by the PangeaMessageEvent
// Otherwise, the content will be fetched from the event where it is stored in content[type]
PangeaRepresentation get content {
if (_content != null) return _content!;
_content = _event?.getPangeaContent<PangeaRepresentation>();

View file

@ -1,9 +1,9 @@
import 'package:fluffychat/pangea/extensions/pangea_event_extension.dart';
import 'package:fluffychat/pangea/models/tokens_event_content_model.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:matrix/matrix.dart';
import 'package:fluffychat/pangea/extensions/pangea_event_extension.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import '../constants/pangea_event_types.dart';
import 'message_data_models.dart';
class TokensEvent {
Event event;

View file

@ -1,9 +1,8 @@
import 'dart:convert';
import 'package:fluffychat/pangea/constants/language_keys.dart';
import 'package:fluffychat/pangea/models/speech_to_text_models.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:matrix/matrix.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
/// this class is contained within a [RepresentationEvent]
/// this event is the child of a [EventTypes.Message]
/// the event has two potential children events -
@ -22,6 +21,9 @@ class PangeaRepresentation {
bool originalSent;
bool originalWritten;
// a representation can be create via speech to text on the original message
SpeechToTextModel? speechToText;
// how do we know which representation was sent by author?
// RepresentationEvent.text == PangeaMessageEvent.event.body
// use: to know whether directUse
@ -49,20 +51,33 @@ class PangeaRepresentation {
required this.text,
required this.originalSent,
required this.originalWritten,
this.speechToText,
});
factory PangeaRepresentation.fromJson(Map<String, dynamic> json) =>
PangeaRepresentation(
langCode: json[_langCodeKey],
text: json[_textKey],
originalSent: json[_originalSentKey] ?? false,
originalWritten: json[_originalWrittenKey] ?? false,
factory PangeaRepresentation.fromJson(Map<String, dynamic> json) {
if (json[_langCodeKey] == LanguageKeys.unknownLanguage) {
ErrorHandler.logError(
e: Exception("Language code cannot be 'unk'"),
s: StackTrace.current,
data: {"rep_content": json},
);
}
return PangeaRepresentation(
langCode: json[_langCodeKey],
text: json[_textKey],
originalSent: json[_originalSentKey] ?? false,
originalWritten: json[_originalWrittenKey] ?? false,
speechToText: json[_speechToTextKey] == null
? null
: SpeechToTextModel.fromJson(json[_speechToTextKey]),
);
}
static const _textKey = "txt";
static const _langCodeKey = "lang";
static const _originalSentKey = "snt";
static const _originalWrittenKey = "wrttn";
static const _speechToTextKey = "stt";
Map<String, dynamic> toJson() {
final data = <String, dynamic>{};
@ -70,35 +85,9 @@ class PangeaRepresentation {
data[_langCodeKey] = langCode;
if (originalSent) data[_originalSentKey] = originalSent;
if (originalWritten) data[_originalWrittenKey] = originalWritten;
return data;
}
}
/// this class lives within a [PangeaTokensEvent]
/// it always has a [RepresentationEvent] parent
/// These live as separate event so that anyone can add and edit tokens to
/// representation
class PangeaMessageTokens {
List<PangeaToken> tokens;
PangeaMessageTokens({
required this.tokens,
});
factory PangeaMessageTokens.fromJson(Map<String, dynamic> json) {
return PangeaMessageTokens(
tokens: (jsonDecode(json[_tokensKey] ?? "[]") as Iterable)
.map((e) => PangeaToken.fromJson(e))
.toList()
.cast<PangeaToken>(),
);
}
static const _tokensKey = "tkns";
Map<String, dynamic> toJson() {
final data = <String, dynamic>{};
data[_tokensKey] = jsonEncode(tokens.map((e) => e.toJson()).toList());
if (speechToText != null) {
data[_speechToTextKey] = speechToText!.toJson();
}
return data;
}
}

View file

@ -0,0 +1,218 @@
import 'dart:convert';
import 'package:fluffychat/pangea/enum/audio_encoding_enum.dart';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:matrix/matrix.dart';
class SpeechToTextAudioConfigModel {
final AudioEncodingEnum encoding;
final int sampleRateHertz;
final bool enableWordConfidence;
final bool enableAutomaticPunctuation;
final String userL1;
final String userL2;
SpeechToTextAudioConfigModel({
required this.encoding,
required this.userL1,
required this.userL2,
this.sampleRateHertz = 16000,
this.enableWordConfidence = true,
this.enableAutomaticPunctuation = true,
});
Map<String, dynamic> toJson() => {
"encoding": encoding.value,
"sample_rate_hertz": sampleRateHertz,
"user_l1": userL1,
"user_l2": userL2,
"enable_word_confidence": enableWordConfidence,
"enable_automatic_punctuation": enableAutomaticPunctuation,
};
}
class SpeechToTextRequestModel {
final Uint8List audioContent;
final SpeechToTextAudioConfigModel config;
final Event? audioEvent;
SpeechToTextRequestModel({
required this.audioContent,
required this.config,
this.audioEvent,
});
Map<String, dynamic> toJson() => {
"audio_content": base64Encode(audioContent),
"config": config.toJson(),
};
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;
if (other is! SpeechToTextRequestModel) return false;
return listEquals(audioContent, other.audioContent) &&
config == other.config;
}
@override
int get hashCode {
final bytesSample =
audioContent.length > 10 ? audioContent.sublist(0, 10) : audioContent;
return Object.hashAll([
Object.hashAll(bytesSample),
config.hashCode,
]);
}
}
class STTToken {
final PangeaToken token;
final Duration? startTime;
final Duration? endTime;
final int? confidence;
STTToken({
required this.token,
this.startTime,
this.endTime,
this.confidence,
});
int get offset => token.text.offset;
int get length => token.text.length;
Color color(BuildContext context) {
if (confidence == null) {
return Theme.of(context).textTheme.bodyMedium?.color ??
(Theme.of(context).brightness == Brightness.dark
? Colors.white
: Colors.black);
}
if (confidence! > 80) {
return const Color.fromARGB(255, 0, 152, 0);
}
if (confidence! > 50) {
return const Color.fromARGB(255, 184, 142, 43);
}
return Colors.red;
}
factory STTToken.fromJson(Map<String, dynamic> json) {
// debugPrint('STTToken.fromJson: $json');
return STTToken(
token: PangeaToken.fromJson(json['token']),
startTime: json['start_time'] != null
? Duration(milliseconds: json['start_time'] * 1000.toInt())
: null,
endTime: json['end_time'] != null
? Duration(milliseconds: json['end_time'] * 1000.toInt())
: null,
confidence: json['confidence'],
);
}
Map<String, dynamic> toJson() => {
"token": token,
"start_time": startTime?.inMilliseconds,
"end_time": endTime?.inMilliseconds,
"confidence": confidence,
};
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;
if (other is! STTToken) return false;
return token == other.token &&
startTime == other.startTime &&
endTime == other.endTime &&
confidence == other.confidence;
}
@override
int get hashCode {
return Object.hashAll([
token.hashCode,
startTime.hashCode,
endTime.hashCode,
confidence.hashCode,
]);
}
}
class Transcript {
final String text;
final int confidence;
final List<STTToken> sttTokens;
final String langCode;
Transcript({
required this.text,
required this.confidence,
required this.sttTokens,
required this.langCode,
});
factory Transcript.fromJson(Map<String, dynamic> json) => Transcript(
text: json['transcript'],
confidence: json['confidence'] <= 100
? json['confidence']
: json['confidence'] / 100,
sttTokens: (json['stt_tokens'] as List)
.map((e) => STTToken.fromJson(e))
.toList(),
langCode: json['lang_code'],
);
Map<String, dynamic> toJson() => {
"transcript": text,
"confidence": confidence,
"stt_tokens": sttTokens.map((e) => e.toJson()).toList(),
"lang_code": langCode,
};
}
class SpeechToTextResult {
final List<Transcript> transcripts;
SpeechToTextResult({required this.transcripts});
factory SpeechToTextResult.fromJson(Map<String, dynamic> json) =>
SpeechToTextResult(
transcripts: (json['transcripts'] as List)
.map((e) => Transcript.fromJson(e))
.toList(),
);
Map<String, dynamic> toJson() => {
"transcripts": transcripts.map((e) => e.toJson()).toList(),
};
}
class SpeechToTextModel {
final List<SpeechToTextResult> results;
SpeechToTextModel({
required this.results,
});
Transcript get transcript => results.first.transcripts.first;
String get langCode => results.first.transcripts.first.langCode;
factory SpeechToTextModel.fromJson(Map<String, dynamic> json) =>
SpeechToTextModel(
results: (json['results'] as List)
.map((e) => SpeechToTextResult.fromJson(e))
.toList(),
);
Map<String, dynamic> toJson() => {
"results": results.map((e) => e.toJson()).toList(),
};
}

View file

@ -0,0 +1,32 @@
import 'dart:convert';
import 'package:fluffychat/pangea/models/pangea_token_model.dart';
/// this class lives within a [PangeaTokensEvent]
/// it always has a [RepresentationEvent] parent
/// These live as separate event so that anyone can add and edit tokens to
/// representation
class PangeaMessageTokens {
List<PangeaToken> tokens;
PangeaMessageTokens({
required this.tokens,
});
factory PangeaMessageTokens.fromJson(Map<String, dynamic> json) {
return PangeaMessageTokens(
tokens: (jsonDecode(json[_tokensKey] ?? "[]") as Iterable)
.map((e) => PangeaToken.fromJson(e))
.toList()
.cast<PangeaToken>(),
);
}
static const _tokensKey = "tkns";
Map<String, dynamic> toJson() {
final data = <String, dynamic>{};
data[_tokensKey] = jsonEncode(tokens.map((e) => e.toJson()).toList());
return data;
}
}

View file

@ -51,6 +51,7 @@ class PApiUrls {
static String subseqStep = "/it_step";
static String textToSpeech = "${Environment.choreoApi}/text_to_speech";
static String speechToText = "${Environment.choreoApi}/speech_to_text";
///-------------------------------- revenue cat --------------------------
static String rcApiV1 = "https://api.revenuecat.com/v1";

View file

@ -5,11 +5,11 @@ import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/constants/pangea_event_types.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/enum/construct_type_enum.dart';
import 'package:fluffychat/pangea/models/construct_analytics_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/construct_analytics_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_representation_event.dart';
import 'package:fluffychat/pangea/models/constructs_analytics_model.dart';
import 'package:fluffychat/pangea/models/pangea_match_model.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/pangea_representation_event.dart';
import 'package:fluffychat/pangea/pages/analytics/base_analytics.dart';
import 'package:fluffychat/utils/date_time_extension.dart';
import 'package:fluffychat/utils/string_color.dart';

View file

@ -1,66 +0,0 @@
// import 'dart:async';
// import 'dart:convert';
// import 'package:fluffychat/pangea/config/environment.dart';
// import 'package:fluffychat/pangea/constants/model_keys.dart';
// import 'package:fluffychat/pangea/network/urls.dart';
// import 'package:http/http.dart';
// import '../network/requests.dart';
// class TextToSpeechRequest {
// String text;
// String langCode;
// TextToSpeechRequest({required this.text, required this.langCode});
// Map<String, dynamic> toJson() => {
// ModelKey.text: text,
// ModelKey.langCode: langCode,
// };
// }
// class TextToSpeechResponse {
// String audioContent;
// String mediaType;
// int durationMillis;
// List<int> waveform;
// TextToSpeechResponse({
// required this.audioContent,
// required this.mediaType,
// required this.durationMillis,
// required this.waveform,
// });
// factory TextToSpeechResponse.fromJson(
// Map<String, dynamic> json,
// ) =>
// TextToSpeechResponse(
// audioContent: json["audio_content"],
// mediaType: json["media_type"],
// durationMillis: json["duration_millis"],
// waveform: List<int>.from(json["wave_form"]),
// );
// }
// class TextToSpeechService {
// static Future<TextToSpeechResponse> get({
// required String accessToken,
// required TextToSpeechRequest params,
// }) async {
// final Requests request = Requests(
// choreoApiKey: Environment.choreoApiKey,
// accessToken: accessToken,
// );
// final Response res = await request.post(
// url: PApiUrls.textToSpeech,
// body: params.toJson(),
// );
// final Map<String, dynamic> json = jsonDecode(res.body);
// return TextToSpeechResponse.fromJson(json);
// }
// }

View file

@ -1,7 +1,6 @@
import 'package:flutter/material.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:flutter/material.dart';
class BotStyle {
static TextStyle text(
@ -25,6 +24,7 @@ class BotStyle {
? AppConfig.primaryColorLight
: AppConfig.primaryColor
: null,
inherit: true,
);
return existingStyle?.merge(botStyle) ?? botStyle;

View file

@ -16,7 +16,7 @@ import 'package:syncfusion_flutter_xlsio/xlsio.dart';
import 'package:universal_html/html.dart' as webFile;
import 'package:fluffychat/pangea/models/class_model.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import '../models/choreo_record.dart';

View file

@ -1,8 +1,8 @@
import 'package:fluffychat/pangea/constants/language_keys.dart';
import 'package:fluffychat/pangea/constants/model_keys.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/class_model.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:matrix/matrix.dart';

View file

@ -1,12 +1,10 @@
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:matrix/matrix.dart';
import 'package:fluffychat/pangea/constants/pangea_message_types.dart';
import 'package:fluffychat/pangea/constants/pangea_event_types.dart';
import 'package:fluffychat/pangea/extensions/client_extension.dart';
import 'package:fluffychat/pangea/extensions/pangea_room_extension.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:matrix/matrix.dart';
Future<void> reportMessage(
BuildContext context,
@ -66,7 +64,7 @@ Future<void> reportMessage(
final String message = "$messageTitle\n\n$messageBody";
for (final Room reportDM in reportDMs) {
final event = <String, dynamic>{
'msgtype': PangeaMessageTypes.report,
'msgtype': PangeaEventTypes.report,
'body': message,
};
await reportDM.sendEvent(event);

View file

@ -1,6 +1,7 @@
import 'package:fluffychat/pages/chat/events/audio_player.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/widgets/chat/toolbar_content_loading_indicator.dart';
import 'package:fluffychat/pangea/widgets/igc/card_error_widget.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
@ -33,7 +34,7 @@ class MessageAudioCardState extends State<MessageAudioCard> {
widget.messageEvent.representationByLanguage(langCode)?.text;
if (text != null) {
final Event? localEvent =
widget.messageEvent.getAudioLocal(langCode, text);
widget.messageEvent.getTextToSpeechLocal(langCode, text);
if (localEvent != null) {
localAudioEvent = localEvent;
if (mounted) setState(() => _isLoading = false);
@ -74,17 +75,9 @@ class MessageAudioCardState extends State<MessageAudioCard> {
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.all(8),
return Container(
child: _isLoading
? SizedBox(
height: 14,
width: 14,
child: CircularProgressIndicator(
strokeWidth: 2.0,
color: Theme.of(context).colorScheme.primary,
),
)
? const ToolbarContentLoadingIndicator()
: localAudioEvent != null || audioFile != null
? Container(
constraints: const BoxConstraints(

View file

@ -0,0 +1,182 @@
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/speech_to_text_models.dart';
import 'package:fluffychat/pangea/widgets/chat/toolbar_content_loading_indicator.dart';
import 'package:fluffychat/pangea/widgets/common/icon_number_widget.dart';
import 'package:fluffychat/pangea/widgets/igc/card_error_widget.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:material_symbols_icons/symbols.dart';
import '../../utils/bot_style.dart';
class MessageSpeechToTextCard extends StatefulWidget {
final PangeaMessageEvent messageEvent;
const MessageSpeechToTextCard({
super.key,
required this.messageEvent,
});
@override
MessageSpeechToTextCardState createState() => MessageSpeechToTextCardState();
}
class MessageSpeechToTextCardState extends State<MessageSpeechToTextCard> {
SpeechToTextModel? speechToTextResponse;
bool _fetchingTranscription = true;
Object? error;
STTToken? selectedToken;
String? get l1Code =>
MatrixState.pangeaController.languageController.activeL1Code(
roomID: widget.messageEvent.room.id,
);
String? get l2Code =>
MatrixState.pangeaController.languageController.activeL2Code(
roomID: widget.messageEvent.room.id,
);
// look for transcription in message event
// if not found, call API to transcribe audio
Future<void> getSpeechToText() async {
// try {
if (l1Code == null || l2Code == null) {
throw Exception('Language selection not found');
}
speechToTextResponse ??=
await widget.messageEvent.getSpeechToText(l1Code!, l2Code!);
debugPrint(
'Speech to text transcript: ${speechToTextResponse?.transcript.text}',
);
// } catch (e, s) {
// debugger(when: kDebugMode);
// error = e;
// ErrorHandler.logError(
// e: e,
// s: s,
// data: widget.messageEvent.event.content,
// );
// } finally {
setState(() => _fetchingTranscription = false);
// }
}
TextSpan _buildTranscriptText(BuildContext context) {
final Transcript transcript = speechToTextResponse!.transcript;
final List<InlineSpan> spans = [];
final String fullText = transcript.text;
int lastEnd = 0;
for (final token in transcript.sttTokens) {
// debugPrint('Token confidence: ${token.confidence}');
// debugPrint('color: ${token.color(context)}');
if (token.offset > lastEnd) {
// Add any plain text before the token
spans.add(
TextSpan(
text: fullText.substring(lastEnd, token.offset),
),
);
// debugPrint('Pre: ${fullText.substring(lastEnd, token.offset)}');
}
spans.add(
TextSpan(
text: fullText.substring(token.offset, token.offset + token.length),
style: BotStyle.text(
context,
existingStyle: TextStyle(color: token.color(context)),
setColor: false,
),
// gesturRecognizer that sets selectedToken on click
recognizer: TapGestureRecognizer()
..onTap = () {
debugPrint('Token tapped');
debugPrint(token.toJson().toString());
setState(() {
if (selectedToken == token) {
selectedToken = null;
} else {
selectedToken = token;
}
});
},
),
);
// debugPrint(
// 'Main: ${fullText.substring(token.offset, token.offset + token.length)}',
// );
lastEnd = token.offset + token.length;
}
if (lastEnd < fullText.length) {
// Add any remaining text after the last token
spans.add(
TextSpan(
text: fullText.substring(lastEnd),
),
);
// debugPrint('Post: ${fullText.substring(lastEnd)}');
}
return TextSpan(children: spans);
}
@override
void initState() {
super.initState();
getSpeechToText();
}
@override
Widget build(BuildContext context) {
if (_fetchingTranscription) {
return const ToolbarContentLoadingIndicator();
}
//done fetchig but not results means some kind of error
if (speechToTextResponse == null) {
return CardErrorWidget(error: error);
}
final int words = speechToTextResponse!.transcript.sttTokens.length;
final int accuracy = speechToTextResponse!.transcript.confidence;
final int total = words * accuracy;
//TODO: find better icons
return Column(
children: [
RichText(
text: _buildTranscriptText(context),
),
const SizedBox(height: 16),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
IconNumberWidget(
icon: Icons.abc,
number: (selectedToken == null ? words : 1).toString(),
toolTip: L10n.of(context)!.words,
),
IconNumberWidget(
icon: Symbols.target,
number:
"${selectedToken?.confidence ?? speechToTextResponse!.transcript.confidence}%",
toolTip: L10n.of(context)!.accuracy,
),
IconNumberWidget(
icon: Icons.speed,
number: (selectedToken?.confidence ?? total).toString(),
toolTip: L10n.of(context)!.points,
),
],
),
],
);
}
}

View file

@ -1,25 +1,27 @@
import 'dart:async';
import 'dart:developer';
import 'package:fluffychat/config/themes.dart';
import 'package:fluffychat/pages/chat/chat.dart';
import 'package:fluffychat/pangea/constants/local.key.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/enum/message_mode_enum.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/utils/any_state_holder.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/utils/overlay.dart';
import 'package:fluffychat/pangea/widgets/chat/message_audio_card.dart';
import 'package:fluffychat/pangea/widgets/chat/message_speech_to_text_card.dart';
import 'package:fluffychat/pangea/widgets/chat/message_text_selection.dart';
import 'package:fluffychat/pangea/widgets/chat/message_translation_card.dart';
import 'package:fluffychat/pangea/widgets/chat/message_unsubscribed_card.dart';
import 'package:fluffychat/pangea/widgets/chat/overlay_message.dart';
import 'package:fluffychat/pangea/widgets/igc/word_data_card.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:matrix/matrix.dart';
enum MessageMode { translation, play, definition }
class ToolbarDisplayController {
final PangeaMessageEvent pangeaMessageEvent;
final String targetId;
@ -96,6 +98,7 @@ class ToolbarDisplayController {
],
);
} catch (err) {
debugger(when: kDebugMode);
ErrorHandler.logError(e: err, s: StackTrace.current);
return;
}
@ -153,53 +156,12 @@ class MessageToolbar extends StatefulWidget {
}
class MessageToolbarState extends State<MessageToolbar> {
Widget? child;
Widget? toolbarContent;
MessageMode? currentMode;
bool updatingMode = false;
late StreamSubscription<String?> selectionStream;
late StreamSubscription<MessageMode> toolbarModeStream;
IconData getIconData(MessageMode mode) {
switch (mode) {
case MessageMode.translation:
return Icons.g_translate;
case MessageMode.play:
return Icons.play_arrow;
case MessageMode.definition:
return Icons.book;
default:
return Icons.error; // Icon to indicate an error or unsupported mode
}
}
String getModeTitle(MessageMode mode) {
switch (mode) {
case MessageMode.translation:
return L10n.of(context)!.translations;
case MessageMode.play:
return L10n.of(context)!.messageAudio;
case MessageMode.definition:
return L10n.of(context)!.definitions;
default:
return L10n.of(context)!
.oopsSomethingWentWrong; // Title to indicate an error or unsupported mode
}
}
String getModeTooltip(MessageMode mode) {
switch (mode) {
case MessageMode.translation:
return L10n.of(context)!.translationTooltip;
case MessageMode.play:
return L10n.of(context)!.audioTooltip;
case MessageMode.definition:
return L10n.of(context)!.define;
default:
return L10n.of(context)!
.oopsSomethingWentWrong; // Title to indicate an error or unsupported mode
}
}
void updateMode(MessageMode newMode) {
if (updatingMode) return;
debugPrint("updating toolbar mode");
@ -210,8 +172,8 @@ class MessageToolbarState extends State<MessageToolbar> {
updatingMode = true;
});
if (!subscribed) {
child = MessageUnsubscribedCard(
languageTool: getModeTitle(newMode),
toolbarContent = MessageUnsubscribedCard(
languageTool: newMode.title(context),
mode: newMode,
toolbarModeStream: widget.toolbarModeStream,
);
@ -220,13 +182,21 @@ class MessageToolbarState extends State<MessageToolbar> {
case MessageMode.translation:
showTranslation();
break;
case MessageMode.play:
playAudio();
case MessageMode.textToSpeech:
showTextToSpeech();
break;
case MessageMode.speechToText:
showSpeechToText();
break;
case MessageMode.definition:
showDefinition();
break;
default:
ErrorHandler.logError(
e: "Invalid toolbar mode",
s: StackTrace.current,
data: {"newMode": newMode},
);
break;
}
}
@ -237,28 +207,36 @@ class MessageToolbarState extends State<MessageToolbar> {
void showTranslation() {
debugPrint("show translation");
child = MessageTranslationCard(
toolbarContent = MessageTranslationCard(
messageEvent: widget.pangeaMessageEvent,
immersionMode: widget.immersionMode,
selection: widget.textSelection,
);
}
void playAudio() {
debugPrint("play audio");
child = MessageAudioCard(
void showTextToSpeech() {
debugPrint("show text to speech");
toolbarContent = MessageAudioCard(
messageEvent: widget.pangeaMessageEvent,
);
}
void showSpeechToText() {
debugPrint("show speech to text");
toolbarContent = MessageSpeechToTextCard(
messageEvent: widget.pangeaMessageEvent,
);
}
void showDefinition() {
debugPrint("show definition");
if (widget.textSelection.selectedText == null ||
widget.textSelection.selectedText!.isEmpty) {
child = const SelectToDefine();
toolbarContent = const SelectToDefine();
return;
}
child = WordDataCard(
toolbarContent = WordDataCard(
word: widget.textSelection.selectedText!,
wordLang: widget.pangeaMessageEvent.messageDisplayLangCode,
fullText: widget.textSelection.messageText,
@ -292,7 +270,11 @@ class MessageToolbarState extends State<MessageToolbar> {
) ??
true;
autoplay
? updateMode(MessageMode.play)
? updateMode(
widget.pangeaMessageEvent.isAudioMessage
? MessageMode.speechToText
: MessageMode.textToSpeech,
)
: updateMode(MessageMode.translation);
});
@ -350,8 +332,11 @@ class MessageToolbarState extends State<MessageToolbar> {
duration: FluffyThemes.animationDuration,
child: Column(
children: [
child ?? const SizedBox(),
SizedBox(height: child == null ? 0 : 20),
Padding(
padding: const EdgeInsets.all(8.0),
child: toolbarContent ?? const SizedBox(),
),
SizedBox(height: toolbarContent == null ? 0 : 20),
],
),
),
@ -360,10 +345,19 @@ class MessageToolbarState extends State<MessageToolbar> {
Row(
mainAxisSize: MainAxisSize.min,
children: MessageMode.values.map((mode) {
if ([MessageMode.definition, MessageMode.textToSpeech, MessageMode.translation]
.contains(mode) &&
widget.pangeaMessageEvent.isAudioMessage) {
return const SizedBox.shrink();
}
if (mode == MessageMode.speechToText &&
!widget.pangeaMessageEvent.isAudioMessage) {
return const SizedBox.shrink();
}
return Tooltip(
message: getModeTooltip(mode),
message: mode.tooltip(context),
child: IconButton(
icon: Icon(getIconData(mode)),
icon: Icon(mode.icon),
color: currentMode == mode
? Theme.of(context).colorScheme.primary
: null,

View file

@ -1,9 +1,10 @@
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/repo/full_text_translation_repo.dart';
import 'package:fluffychat/pangea/utils/bot_style.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/widgets/chat/message_text_selection.dart';
import 'package:fluffychat/pangea/widgets/chat/toolbar_content_loading_indicator.dart';
import 'package:fluffychat/pangea/widgets/igc/card_error_widget.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/material.dart';
@ -138,17 +139,9 @@ class MessageTranslationCardState extends State<MessageTranslationCard> {
return const CardErrorWidget();
}
return Padding(
padding: const EdgeInsets.all(8),
return Container(
child: _fetchingRepresentation
? SizedBox(
height: 14,
width: 14,
child: CircularProgressIndicator(
strokeWidth: 2.0,
color: Theme.of(context).colorScheme.primary,
),
)
? const ToolbarContentLoadingIndicator()
: selectionTranslation != null
? Text(
selectionTranslation!,

View file

@ -2,11 +2,12 @@ import 'dart:async';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/utils/bot_style.dart';
import 'package:fluffychat/pangea/widgets/chat/message_toolbar.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import '../../enum/message_mode_enum.dart';
class MessageUnsubscribedCard extends StatelessWidget {
final String languageTool;
final MessageMode mode;
@ -35,34 +36,31 @@ class MessageUnsubscribedCard extends StatelessWidget {
}
}
return Padding(
padding: const EdgeInsets.fromLTRB(10, 10, 10, 0),
child: Column(
children: [
Text(
style: BotStyle.text(context),
"${L10n.of(context)!.subscribedToUnlockTools} $languageTool",
textAlign: TextAlign.center,
),
const SizedBox(height: 10),
SizedBox(
width: double.infinity,
child: TextButton(
onPressed: onButtonPress,
style: ButtonStyle(
backgroundColor: MaterialStateProperty.all<Color>(
(AppConfig.primaryColor).withOpacity(0.1),
),
),
child: Text(
inTrialWindow
? L10n.of(context)!.activateTrial
: L10n.of(context)!.getAccess,
return Column(
children: [
Text(
style: BotStyle.text(context),
"${L10n.of(context)!.subscribedToUnlockTools} $languageTool",
textAlign: TextAlign.center,
),
const SizedBox(height: 10),
SizedBox(
width: double.infinity,
child: TextButton(
onPressed: onButtonPress,
style: ButtonStyle(
backgroundColor: MaterialStateProperty.all<Color>(
(AppConfig.primaryColor).withOpacity(0.1),
),
),
child: Text(
inTrialWindow
? L10n.of(context)!.activateTrial
: L10n.of(context)!.getAccess,
),
),
],
),
),
],
);
}
}

View file

@ -1,7 +1,7 @@
import 'package:fluffychat/config/themes.dart';
import 'package:fluffychat/pages/chat/events/message_content.dart';
import 'package:fluffychat/pangea/enum/use_type.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/widgets/chat/message_toolbar.dart';
import 'package:fluffychat/utils/date_time_extension.dart';
import 'package:flutter/material.dart';

View file

@ -0,0 +1,65 @@
import 'package:fluffychat/pangea/models/speech_to_text_models.dart';
import 'package:fluffychat/pangea/utils/bot_style.dart';
import 'package:flutter/material.dart';
class SpeechToTextText extends StatelessWidget {
final Transcript transcript;
const SpeechToTextText({super.key, required this.transcript});
@override
Widget build(BuildContext context) {
return RichText(
text: _buildTranscriptText(context, transcript),
);
}
TextSpan _buildTranscriptText(BuildContext context, Transcript transcript) {
final List<InlineSpan> spans = [];
final String fullText = transcript.text;
int lastEnd = 0;
for (final token in transcript.sttTokens) {
// debugPrint('Token confidence: ${token.confidence}');
// debugPrint('color: ${token.color(context)}');
if (token.offset > lastEnd) {
// Add any plain text before the token
spans.add(
TextSpan(
text: fullText.substring(lastEnd, token.offset),
),
);
// debugPrint('Pre: ${fullText.substring(lastEnd, token.offset)}');
}
spans.add(
TextSpan(
text: fullText.substring(token.offset, token.offset + token.length),
style: BotStyle.text(
context,
existingStyle: TextStyle(color: token.color(context)),
setColor: false,
),
),
);
// debugPrint(
// 'Main: ${fullText.substring(token.offset, token.offset + token.length)}',
// );
lastEnd = token.offset + token.length;
}
if (lastEnd < fullText.length) {
// Add any remaining text after the last token
spans.add(
TextSpan(
text: fullText.substring(lastEnd),
),
);
// debugPrint('Post: ${fullText.substring(lastEnd)}');
}
return TextSpan(children: spans);
}
}

View file

@ -3,7 +3,7 @@ import 'dart:developer';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pages/chat/chat.dart';
import 'package:fluffychat/pages/chat/events/audio_player.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/foundation.dart';
@ -50,7 +50,7 @@ class _TextToSpeechButtonState extends State<TextToSpeechButton> {
Event? get localAudioEvent =>
langCode != null && text != null && text!.isNotEmpty
? _pangeaMessageEvent.getAudioLocal(langCode!, text!)
? _pangeaMessageEvent.getTextToSpeechLocal(langCode!, text!)
: null;
String? get langCode =>
@ -69,7 +69,7 @@ class _TextToSpeechButtonState extends State<TextToSpeechButton> {
if (langCode == null || langCode!.isEmpty) return;
setState(() => _isLoading = true);
await _pangeaMessageEvent.getAudioGlobal(langCode!);
await _pangeaMessageEvent.getTextToSpeechGlobal(langCode!);
setState(() => _isLoading = false);
} catch (e) {
setState(() => _isLoading = false);

View file

@ -0,0 +1,19 @@
import 'package:flutter/material.dart';
class ToolbarContentLoadingIndicator extends StatelessWidget {
const ToolbarContentLoadingIndicator({
super.key,
});
@override
Widget build(BuildContext context) {
return SizedBox(
height: 14,
width: 14,
child: CircularProgressIndicator(
strokeWidth: 2.0,
color: Theme.of(context).colorScheme.primary,
),
);
}
}

View file

@ -0,0 +1,47 @@
import 'package:flutter/material.dart';
class IconNumberWidget extends StatelessWidget {
final IconData icon;
final String number;
final Color? iconColor;
final double? iconSize;
final String? toolTip;
const IconNumberWidget({
super.key,
required this.icon,
required this.number,
this.toolTip,
this.iconColor,
this.iconSize,
});
Widget _content(BuildContext context) {
return Row(
mainAxisSize: MainAxisSize.min,
children: <Widget>[
Icon(
icon,
color: iconColor ?? Theme.of(context).iconTheme.color,
size: iconSize ?? Theme.of(context).iconTheme.size,
),
const SizedBox(width: 8),
Text(
number.toString(),
style: TextStyle(
fontSize:
iconSize ?? Theme.of(context).textTheme.bodyMedium?.fontSize,
color: iconColor ?? Theme.of(context).textTheme.bodyMedium?.color,
),
),
],
);
}
@override
Widget build(BuildContext context) {
return toolTip != null
? Tooltip(message: toolTip!, child: _content(context))
: _content(context);
}
}

View file

@ -4,8 +4,8 @@ import 'dart:ui';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/constants/language_keys.dart';
import 'package:fluffychat/pangea/controllers/pangea_controller.dart';
import 'package:fluffychat/pangea/models/message_data_models.dart';
import 'package:fluffychat/pangea/models/pangea_message_event.dart';
import 'package:fluffychat/pangea/matrix_event_wrappers/pangea_message_event.dart';
import 'package:fluffychat/pangea/models/representation_content_model.dart';
import 'package:fluffychat/pangea/utils/error_handler.dart';
import 'package:fluffychat/pangea/utils/instructions.dart';
import 'package:fluffychat/pangea/widgets/chat/message_context_menu.dart';
@ -14,6 +14,7 @@ import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import '../../enum/message_mode_enum.dart';
import '../../models/pangea_match_model.dart';
class PangeaRichText extends StatefulWidget {
@ -88,7 +89,10 @@ class PangeaRichTextState extends State<PangeaRichText> {
.representationByLanguageGlobal(
langCode: widget.pangeaMessageEvent.messageDisplayLangCode,
)
.onError((error, stackTrace) => ErrorHandler.logError())
.onError(
(error, stackTrace) =>
ErrorHandler.logError(e: error, s: stackTrace),
)
.then((event) {
repEvent = event;
widget.toolbarController?.toolbar?.textSelection.setMessageText(
@ -159,7 +163,7 @@ class PangeaRichTextState extends State<PangeaRichText> {
),
onListen: () => widget.toolbarController?.showToolbar(
context,
mode: MessageMode.play,
mode: MessageMode.textToSpeech,
),
),
TextSpan(

View file

@ -3,7 +3,10 @@ PODS:
- FlutterMacOS
- audio_session (0.0.1):
- FlutterMacOS
- desktop_drop (0.0.1):
- connectivity_plus (0.0.1):
- FlutterMacOS
- ReachabilitySwift
- desktop_lifecycle (0.0.1):
- FlutterMacOS
- device_info_plus (0.0.1):
- FlutterMacOS
@ -13,6 +16,66 @@ PODS:
- FlutterMacOS
- file_selector_macos (0.0.1):
- FlutterMacOS
- Firebase/Analytics (10.18.0):
- Firebase/Core
- Firebase/Core (10.18.0):
- Firebase/CoreOnly
- FirebaseAnalytics (~> 10.18.0)
- Firebase/CoreOnly (10.18.0):
- FirebaseCore (= 10.18.0)
- Firebase/Messaging (10.18.0):
- Firebase/CoreOnly
- FirebaseMessaging (~> 10.18.0)
- firebase_analytics (10.8.0):
- Firebase/Analytics (= 10.18.0)
- firebase_core
- FlutterMacOS
- firebase_core (2.24.2):
- Firebase/CoreOnly (~> 10.18.0)
- FlutterMacOS
- firebase_messaging (14.7.10):
- Firebase/CoreOnly (~> 10.18.0)
- Firebase/Messaging (~> 10.18.0)
- firebase_core
- FlutterMacOS
- FirebaseAnalytics (10.18.0):
- FirebaseAnalytics/AdIdSupport (= 10.18.0)
- FirebaseCore (~> 10.0)
- FirebaseInstallations (~> 10.0)
- GoogleUtilities/AppDelegateSwizzler (~> 7.11)
- GoogleUtilities/MethodSwizzler (~> 7.11)
- GoogleUtilities/Network (~> 7.11)
- "GoogleUtilities/NSData+zlib (~> 7.11)"
- nanopb (< 2.30910.0, >= 2.30908.0)
- FirebaseAnalytics/AdIdSupport (10.18.0):
- FirebaseCore (~> 10.0)
- FirebaseInstallations (~> 10.0)
- GoogleAppMeasurement (= 10.18.0)
- GoogleUtilities/AppDelegateSwizzler (~> 7.11)
- GoogleUtilities/MethodSwizzler (~> 7.11)
- GoogleUtilities/Network (~> 7.11)
- "GoogleUtilities/NSData+zlib (~> 7.11)"
- nanopb (< 2.30910.0, >= 2.30908.0)
- FirebaseCore (10.18.0):
- FirebaseCoreInternal (~> 10.0)
- GoogleUtilities/Environment (~> 7.12)
- GoogleUtilities/Logger (~> 7.12)
- FirebaseCoreInternal (10.21.0):
- "GoogleUtilities/NSData+zlib (~> 7.8)"
- FirebaseInstallations (10.21.0):
- FirebaseCore (~> 10.0)
- GoogleUtilities/Environment (~> 7.8)
- GoogleUtilities/UserDefaults (~> 7.8)
- PromisesObjC (~> 2.1)
- FirebaseMessaging (10.18.0):
- FirebaseCore (~> 10.0)
- FirebaseInstallations (~> 10.0)
- GoogleDataTransport (~> 9.2)
- GoogleUtilities/AppDelegateSwizzler (~> 7.8)
- GoogleUtilities/Environment (~> 7.8)
- GoogleUtilities/Reachability (~> 7.8)
- GoogleUtilities/UserDefaults (~> 7.8)
- nanopb (< 2.30910.0, >= 2.30908.0)
- flutter_app_badger (1.3.0):
- FlutterMacOS
- flutter_local_notifications (0.0.1):
@ -32,12 +95,63 @@ PODS:
- FMDB/standard (2.7.5)
- geolocator_apple (1.2.0):
- FlutterMacOS
- GoogleAppMeasurement (10.18.0):
- GoogleAppMeasurement/AdIdSupport (= 10.18.0)
- GoogleUtilities/AppDelegateSwizzler (~> 7.11)
- GoogleUtilities/MethodSwizzler (~> 7.11)
- GoogleUtilities/Network (~> 7.11)
- "GoogleUtilities/NSData+zlib (~> 7.11)"
- nanopb (< 2.30910.0, >= 2.30908.0)
- GoogleAppMeasurement/AdIdSupport (10.18.0):
- GoogleAppMeasurement/WithoutAdIdSupport (= 10.18.0)
- GoogleUtilities/AppDelegateSwizzler (~> 7.11)
- GoogleUtilities/MethodSwizzler (~> 7.11)
- GoogleUtilities/Network (~> 7.11)
- "GoogleUtilities/NSData+zlib (~> 7.11)"
- nanopb (< 2.30910.0, >= 2.30908.0)
- GoogleAppMeasurement/WithoutAdIdSupport (10.18.0):
- GoogleUtilities/AppDelegateSwizzler (~> 7.11)
- GoogleUtilities/MethodSwizzler (~> 7.11)
- GoogleUtilities/Network (~> 7.11)
- "GoogleUtilities/NSData+zlib (~> 7.11)"
- nanopb (< 2.30910.0, >= 2.30908.0)
- GoogleDataTransport (9.3.0):
- GoogleUtilities/Environment (~> 7.7)
- nanopb (< 2.30910.0, >= 2.30908.0)
- PromisesObjC (< 3.0, >= 1.2)
- GoogleUtilities/AppDelegateSwizzler (7.12.0):
- GoogleUtilities/Environment
- GoogleUtilities/Logger
- GoogleUtilities/Network
- GoogleUtilities/Environment (7.12.0):
- PromisesObjC (< 3.0, >= 1.2)
- GoogleUtilities/Logger (7.12.0):
- GoogleUtilities/Environment
- GoogleUtilities/MethodSwizzler (7.12.0):
- GoogleUtilities/Logger
- GoogleUtilities/Network (7.12.0):
- GoogleUtilities/Logger
- "GoogleUtilities/NSData+zlib"
- GoogleUtilities/Reachability
- "GoogleUtilities/NSData+zlib (7.12.0)"
- GoogleUtilities/Reachability (7.12.0):
- GoogleUtilities/Logger
- GoogleUtilities/UserDefaults (7.12.0):
- GoogleUtilities/Logger
- in_app_purchase_storekit (0.0.1):
- Flutter
- FlutterMacOS
- just_audio (0.0.1):
- FlutterMacOS
- macos_ui (0.1.0):
- FlutterMacOS
- macos_window_utils (1.0.0):
- FlutterMacOS
- nanopb (2.30909.1):
- nanopb/decode (= 2.30909.1)
- nanopb/encode (= 2.30909.1)
- nanopb/decode (2.30909.1)
- nanopb/encode (2.30909.1)
- package_info_plus (0.0.1):
- FlutterMacOS
- pasteboard (0.0.1):
@ -45,8 +159,24 @@ PODS:
- path_provider_foundation (0.0.1):
- Flutter
- FlutterMacOS
- record_macos (0.2.0):
- PromisesObjC (2.3.1)
- purchases_flutter (5.8.0):
- FlutterMacOS
- PurchasesHybridCommon (= 6.3.0)
- PurchasesHybridCommon (6.3.0):
- RevenueCat (= 4.26.1)
- ReachabilitySwift (5.0.0)
- record_darwin (1.0.0):
- Flutter
- FlutterMacOS
- RevenueCat (4.26.1)
- Sentry/HybridSDK (8.17.2):
- SentryPrivate (= 8.17.2)
- sentry_flutter (0.0.1):
- Flutter
- FlutterMacOS
- Sentry/HybridSDK (= 8.17.2)
- SentryPrivate (8.17.2)
- share_plus (0.0.1):
- FlutterMacOS
- shared_preferences_foundation (0.0.1):
@ -80,11 +210,15 @@ PODS:
DEPENDENCIES:
- appkit_ui_element_colors (from `Flutter/ephemeral/.symlinks/plugins/appkit_ui_element_colors/macos`)
- audio_session (from `Flutter/ephemeral/.symlinks/plugins/audio_session/macos`)
- desktop_drop (from `Flutter/ephemeral/.symlinks/plugins/desktop_drop/macos`)
- connectivity_plus (from `Flutter/ephemeral/.symlinks/plugins/connectivity_plus/macos`)
- desktop_lifecycle (from `Flutter/ephemeral/.symlinks/plugins/desktop_lifecycle/macos`)
- device_info_plus (from `Flutter/ephemeral/.symlinks/plugins/device_info_plus/macos`)
- dynamic_color (from `Flutter/ephemeral/.symlinks/plugins/dynamic_color/macos`)
- emoji_picker_flutter (from `Flutter/ephemeral/.symlinks/plugins/emoji_picker_flutter/macos`)
- file_selector_macos (from `Flutter/ephemeral/.symlinks/plugins/file_selector_macos/macos`)
- firebase_analytics (from `Flutter/ephemeral/.symlinks/plugins/firebase_analytics/macos`)
- firebase_core (from `Flutter/ephemeral/.symlinks/plugins/firebase_core/macos`)
- firebase_messaging (from `Flutter/ephemeral/.symlinks/plugins/firebase_messaging/macos`)
- flutter_app_badger (from `Flutter/ephemeral/.symlinks/plugins/flutter_app_badger/macos`)
- flutter_local_notifications (from `Flutter/ephemeral/.symlinks/plugins/flutter_local_notifications/macos`)
- flutter_secure_storage_macos (from `Flutter/ephemeral/.symlinks/plugins/flutter_secure_storage_macos/macos`)
@ -92,13 +226,16 @@ DEPENDENCIES:
- flutter_webrtc (from `Flutter/ephemeral/.symlinks/plugins/flutter_webrtc/macos`)
- FlutterMacOS (from `Flutter/ephemeral`)
- geolocator_apple (from `Flutter/ephemeral/.symlinks/plugins/geolocator_apple/macos`)
- in_app_purchase_storekit (from `Flutter/ephemeral/.symlinks/plugins/in_app_purchase_storekit/darwin`)
- just_audio (from `Flutter/ephemeral/.symlinks/plugins/just_audio/macos`)
- macos_ui (from `Flutter/ephemeral/.symlinks/plugins/macos_ui/macos`)
- macos_window_utils (from `Flutter/ephemeral/.symlinks/plugins/macos_window_utils/macos`)
- package_info_plus (from `Flutter/ephemeral/.symlinks/plugins/package_info_plus/macos`)
- pasteboard (from `Flutter/ephemeral/.symlinks/plugins/pasteboard/macos`)
- path_provider_foundation (from `Flutter/ephemeral/.symlinks/plugins/path_provider_foundation/darwin`)
- record_macos (from `Flutter/ephemeral/.symlinks/plugins/record_macos/macos`)
- purchases_flutter (from `Flutter/ephemeral/.symlinks/plugins/purchases_flutter/macos`)
- record_darwin (from `Flutter/ephemeral/.symlinks/plugins/record_darwin/macos`)
- sentry_flutter (from `Flutter/ephemeral/.symlinks/plugins/sentry_flutter/macos`)
- share_plus (from `Flutter/ephemeral/.symlinks/plugins/share_plus/macos`)
- shared_preferences_foundation (from `Flutter/ephemeral/.symlinks/plugins/shared_preferences_foundation/darwin`)
- sqflite (from `Flutter/ephemeral/.symlinks/plugins/sqflite/macos`)
@ -111,7 +248,23 @@ DEPENDENCIES:
SPEC REPOS:
trunk:
- Firebase
- FirebaseAnalytics
- FirebaseCore
- FirebaseCoreInternal
- FirebaseInstallations
- FirebaseMessaging
- FMDB
- GoogleAppMeasurement
- GoogleDataTransport
- GoogleUtilities
- nanopb
- PromisesObjC
- PurchasesHybridCommon
- ReachabilitySwift
- RevenueCat
- Sentry
- SentryPrivate
- SQLCipher
- WebRTC-SDK
@ -120,8 +273,10 @@ EXTERNAL SOURCES:
:path: Flutter/ephemeral/.symlinks/plugins/appkit_ui_element_colors/macos
audio_session:
:path: Flutter/ephemeral/.symlinks/plugins/audio_session/macos
desktop_drop:
:path: Flutter/ephemeral/.symlinks/plugins/desktop_drop/macos
connectivity_plus:
:path: Flutter/ephemeral/.symlinks/plugins/connectivity_plus/macos
desktop_lifecycle:
:path: Flutter/ephemeral/.symlinks/plugins/desktop_lifecycle/macos
device_info_plus:
:path: Flutter/ephemeral/.symlinks/plugins/device_info_plus/macos
dynamic_color:
@ -130,6 +285,12 @@ EXTERNAL SOURCES:
:path: Flutter/ephemeral/.symlinks/plugins/emoji_picker_flutter/macos
file_selector_macos:
:path: Flutter/ephemeral/.symlinks/plugins/file_selector_macos/macos
firebase_analytics:
:path: Flutter/ephemeral/.symlinks/plugins/firebase_analytics/macos
firebase_core:
:path: Flutter/ephemeral/.symlinks/plugins/firebase_core/macos
firebase_messaging:
:path: Flutter/ephemeral/.symlinks/plugins/firebase_messaging/macos
flutter_app_badger:
:path: Flutter/ephemeral/.symlinks/plugins/flutter_app_badger/macos
flutter_local_notifications:
@ -144,6 +305,8 @@ EXTERNAL SOURCES:
:path: Flutter/ephemeral
geolocator_apple:
:path: Flutter/ephemeral/.symlinks/plugins/geolocator_apple/macos
in_app_purchase_storekit:
:path: Flutter/ephemeral/.symlinks/plugins/in_app_purchase_storekit/darwin
just_audio:
:path: Flutter/ephemeral/.symlinks/plugins/just_audio/macos
macos_ui:
@ -156,8 +319,12 @@ EXTERNAL SOURCES:
:path: Flutter/ephemeral/.symlinks/plugins/pasteboard/macos
path_provider_foundation:
:path: Flutter/ephemeral/.symlinks/plugins/path_provider_foundation/darwin
record_macos:
:path: Flutter/ephemeral/.symlinks/plugins/record_macos/macos
purchases_flutter:
:path: Flutter/ephemeral/.symlinks/plugins/purchases_flutter/macos
record_darwin:
:path: Flutter/ephemeral/.symlinks/plugins/record_darwin/macos
sentry_flutter:
:path: Flutter/ephemeral/.symlinks/plugins/sentry_flutter/macos
share_plus:
:path: Flutter/ephemeral/.symlinks/plugins/share_plus/macos
shared_preferences_foundation:
@ -180,11 +347,21 @@ EXTERNAL SOURCES:
SPEC CHECKSUMS:
appkit_ui_element_colors: 39bb2d80be3f19b152ccf4c70d5bbe6cba43d74a
audio_session: dea1f41890dbf1718f04a56f1d6150fd50039b72
desktop_drop: 69eeff437544aa619c8db7f4481b3a65f7696898
connectivity_plus: 18d3c32514c886e046de60e9c13895109866c747
desktop_lifecycle: a600c10e12fe033c7be9078f2e929b8241f2c1e3
device_info_plus: 5401765fde0b8d062a2f8eb65510fb17e77cf07f
dynamic_color: 2eaa27267de1ca20d879fbd6e01259773fb1670f
emoji_picker_flutter: 533634326b1c5de9a181ba14b9758e6dfe967a20
file_selector_macos: 468fb6b81fac7c0e88d71317f3eec34c3b008ff9
Firebase: 414ad272f8d02dfbf12662a9d43f4bba9bec2a06
firebase_analytics: 687a47ef9af9c5a8a9fc612c100987f843d0a281
firebase_core: a74ee8b3ab5f91ae6b73f4913eaca996c24458b6
firebase_messaging: 1298099739b30786ab5be9fdbfe00b2019065745
FirebaseAnalytics: 4d310b35c48eaa4a058ddc04bdca6bdb5dc0fe80
FirebaseCore: 2322423314d92f946219c8791674d2f3345b598f
FirebaseCoreInternal: 43c1788eaeee9d1b97caaa751af567ce11010d00
FirebaseInstallations: 390ea1d10a4d02b20c965cbfd527ee9b3b412acb
FirebaseMessaging: 9bc34a98d2e0237e1b121915120d4d48ddcf301e
flutter_app_badger: 55a64b179f8438e89d574320c77b306e327a1730
flutter_local_notifications: 3805ca215b2fb7f397d78b66db91f6a747af52e4
flutter_secure_storage_macos: d56e2d218c1130b262bef8b4a7d64f88d7f9c9ea
@ -193,25 +370,38 @@ SPEC CHECKSUMS:
FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24
FMDB: 2ce00b547f966261cd18927a3ddb07cb6f3db82a
geolocator_apple: 821be05bbdb1b49500e029ebcbf2d6acf2dfb966
GoogleAppMeasurement: 70ce9aa438cff1cfb31ea3e660bcc67734cb716e
GoogleDataTransport: 57c22343ab29bc686febbf7cbb13bad167c2d8fe
GoogleUtilities: 0759d1a57ebb953965c2dfe0ba4c82e95ccc2e34
in_app_purchase_storekit: 9e9931234f0adcf71ae323f8c83785b96030edf1
just_audio: 9b67ca7b97c61cfc9784ea23cd8cc55eb226d489
macos_ui: 6229a8922cd97bafb7d9636c8eb8dfb0744183ca
macos_window_utils: 933f91f64805e2eb91a5bd057cf97cd097276663
nanopb: d4d75c12cd1316f4a64e3c6963f879ecd4b5e0d5
package_info_plus: 02d7a575e80f194102bef286361c6c326e4c29ce
pasteboard: 9b69dba6fedbb04866be632205d532fe2f6b1d99
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
record_macos: 937889e0f2a7a12b6fc14e97a3678e5a18943de6
path_provider_foundation: 3784922295ac71e43754bd15e0653ccfd36a147c
PromisesObjC: c50d2056b5253dadbd6c2bea79b0674bd5a52fa4
purchases_flutter: 36a8c669148173e56f19dfc20df724bc734ab475
PurchasesHybridCommon: 5ee5e13fe009876850a03f52bb0349b6fa91d976
ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825
record_darwin: 1f6619f2abac4d1ca91d3eeab038c980d76f1517
RevenueCat: 4e8899a69fd57180ef166237d1eb670023be05de
Sentry: 64a9f9c3637af913adcf53deced05bbe452d1410
sentry_flutter: 57912cf425e09398bdf47f38842a1fcb9836f1be
SentryPrivate: 024c6fed507ac39ae98e6d087034160f942920d5
share_plus: 76dd39142738f7a68dd57b05093b5e8193f220f7
shared_preferences_foundation: 5b919d13b803cadd15ed2dc053125c68730e5126
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
sqflite: a5789cceda41d54d23f31d6de539d65bb14100ea
sqflite_sqlcipher: d1ac7c60596e4d624d9757e3ec96e9cfafb734d6
SQLCipher: 905b145f65f349f26da9e60a19901ad24adcd381
url_launcher_macos: d2691c7dd33ed713bf3544850a623080ec693d95
video_compress: c896234f100791b5fef7f049afa38f6d2ef7b42f
video_player_avfoundation: e9e6f9cae7d7a6d9b43519b0aab382bca60fcfd1
video_player_avfoundation: 02011213dab73ae3687df27ce441fbbcc82b5579
wakelock_plus: 4783562c9a43d209c458cb9b30692134af456269
WebRTC-SDK: c24d2a6c9f571f2ed42297cb8ffba9557093142b
window_to_front: 4cdc24ddd8461ad1a55fa06286d6a79d8b29e8d8
PODFILE CHECKSUM: d0975b16fbdecb73b109d8fbc88aa77ffe4c7a8d
COCOAPODS: 1.14.3
COCOAPODS: 1.15.2

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -56,7 +56,7 @@ dependencies:
git: https://github.com/krille-chan/flutter_shortcuts.git
flutter_typeahead: ^5.2.0
flutter_web_auth_2: ^3.1.1
flutter_webrtc: ^0.9.46
flutter_webrtc: ^0.10.3
future_loading_dialog: ^0.3.0
geolocator: ^7.6.2
go_router: ^13.2.2
@ -82,7 +82,7 @@ dependencies:
provider: ^6.0.2
punycode: ^1.0.0
qr_code_scanner: ^1.0.1
receive_sharing_intent: ^1.4.5
receive_sharing_intent: 1.4.5 # Update needs more work
record: 4.4.4 # Upgrade to 5 currently breaks playing on iOS
scroll_to_index: ^3.0.1
share_plus: ^8.0.2
@ -115,6 +115,7 @@ dependencies:
in_app_purchase: ^3.1.13
jwt_decode: ^0.3.1
language_tool: ^2.2.0
material_symbols_icons: ^4.2741.0
open_file: ^3.3.2
purchases_flutter: ^6.26.0
sentry_flutter: ^7.19.0