fix(phonetic_transcription): fixed models

This commit is contained in:
wcjord 2025-06-16 16:13:40 -04:00
parent 40a6e5a10b
commit daeaf900f3
7 changed files with 346 additions and 45 deletions

View file

@ -1,13 +1,4 @@
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:collection/collection.dart';
import 'package:flutter_dotenv/flutter_dotenv.dart';
import 'package:flutter_secure_storage/flutter_secure_storage.dart';
import 'package:get_storage/get_storage.dart';
import 'package:matrix/matrix.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/pangea/common/config/environment.dart';
import 'package:fluffychat/pangea/common/utils/error_handler.dart';
@ -15,6 +6,14 @@ import 'package:fluffychat/pangea/common/utils/firebase_analytics.dart';
import 'package:fluffychat/pangea/learning_settings/utils/p_language_store.dart';
import 'package:fluffychat/utils/client_manager.dart';
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_dotenv/flutter_dotenv.dart';
import 'package:flutter_secure_storage/flutter_secure_storage.dart';
import 'package:get_storage/get_storage.dart';
import 'package:matrix/matrix.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'config/setting_keys.dart';
import 'utils/background_push.dart';
import 'widgets/fluffy_chat_app.dart';

View file

@ -1,7 +1,4 @@
import 'package:flutter/material.dart';
import 'package:collection/collection.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pangea/analytics_details_popup/analytics_details_popup_content.dart';
import 'package:fluffychat/pangea/analytics_misc/construct_use_model.dart';
@ -15,6 +12,7 @@ import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_
import 'package:fluffychat/pangea/toolbar/widgets/practice_activity/word_text_with_audio_button.dart';
import 'package:fluffychat/pangea/toolbar/widgets/word_zoom/lemma_meaning_widget.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/material.dart';
/// Displays information about selected lemma, and its usage
class VocabDetailsView extends StatelessWidget {
@ -53,14 +51,33 @@ class VocabDetailsView extends StatelessWidget {
: _construct.lemmaCategory.darkColor(context));
return AnalyticsDetailsViewContent(
title: WordTextWithAudioButton(
text: _construct.lemma,
style: Theme.of(context).textTheme.headlineLarge?.copyWith(
color: textColor,
title: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
WordTextWithAudioButton(
text: _construct.lemma,
style: Theme.of(context).textTheme.headlineLarge?.copyWith(
color: textColor,
),
iconSize: _iconSize,
uniqueID: "${_construct.lemma}-${_construct.category}",
langCode: _userL2!,
),
if (MatrixState.pangeaController.languageController.userL2 != null)
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: PhoneticTranscriptionWidget(
text: _construct.lemma,
textLanguage:
MatrixState.pangeaController.languageController.userL2!,
style: Theme.of(context).textTheme.bodyMedium?.copyWith(
color: textColor.withAlpha((0.7 * 255).toInt()),
fontSize: 18,
),
iconSize: _iconSize * 0.8,
),
),
iconSize: _iconSize,
uniqueID: "${_construct.lemma}-${_construct.category}",
langCode: _userL2!,
],
),
subtitle: Column(
children: [

View file

@ -22,6 +22,14 @@ class PangeaTokenText {
);
}
static PangeaTokenText fromString(String content) {
return PangeaTokenText(
offset: 0,
content: content,
length: content.length,
);
}
static const String _offsetKey = "offset";
static const String _contentKey = "content";
static const String _lengthKey = "length";

View file

@ -1,8 +1,7 @@
import 'package:flutter/material.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pangea/learning_settings/constants/language_constants.dart';
import 'package:fluffychat/pangea/learning_settings/enums/l2_support_enum.dart';
import 'package:flutter/material.dart';
class LanguageModel {
final String langCode;
@ -80,3 +79,27 @@ class LanguageModel {
@override
int get hashCode => langCode.hashCode;
}
class LanguageArc {
final LanguageModel l1;
final LanguageModel l2;
LanguageArc({
required this.l1,
required this.l2,
});
factory LanguageArc.fromJson(Map<String, dynamic> json) {
return LanguageArc(
l1: LanguageModel.fromJson(json['l1'] as Map<String, dynamic>),
l2: LanguageModel.fromJson(json['l2'] as Map<String, dynamic>),
);
}
Map<String, dynamic> toJson() {
return {
'l1': l1.toJson(),
'l2': l2.toJson(),
};
}
}

View file

@ -1,33 +1,33 @@
import 'package:fluffychat/pangea/events/models/pangea_token_text_model.dart';
import 'package:fluffychat/pangea/learning_settings/models/language_model.dart';
class PhoneticTranscriptionRequest {
final String l1;
final String l2;
final String content;
final LanguageArc arc;
final PangeaTokenText content;
final bool requiresTokenization;
PhoneticTranscriptionRequest({
required this.l1,
required this.l2,
required this.arc,
required this.content,
this.requiresTokenization = true,
this.requiresTokenization = false,
});
factory PhoneticTranscriptionRequest.fromJson(Map<String, dynamic> json) {
return PhoneticTranscriptionRequest(
l1: json['l1'] as String,
l2: json['l2'] as String,
content: json['content'] as String,
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
content:
PangeaTokenText.fromJson(json['content'] as Map<String, dynamic>),
requiresTokenization: json['requires_tokenization'] ?? true,
);
}
Map<String, dynamic> toJson() {
return {
'l1': l1,
'l2': l2,
'content': content,
'arc': arc.toJson(),
'content': content.toJson(),
'requires_tokenization': requiresTokenization,
};
}
String get storageKey => 'l1:$l1,l2:$l2,content:$content';
String get storageKey => '${arc.l1}-${arc.l2}-${content.hashCode}';
}

View file

@ -1,8 +1,107 @@
import 'package:fluffychat/pangea/events/models/pangea_token_text_model.dart';
import 'package:fluffychat/pangea/learning_settings/models/language_model.dart';
enum PhoneticTranscriptionDelimEnum { sp, noSp }
extension PhoneticTranscriptionDelimEnumExt on PhoneticTranscriptionDelimEnum {
String get value {
switch (this) {
case PhoneticTranscriptionDelimEnum.sp:
return " ";
case PhoneticTranscriptionDelimEnum.noSp:
return "";
}
}
static PhoneticTranscriptionDelimEnum fromString(String s) {
switch (s) {
case " ":
return PhoneticTranscriptionDelimEnum.sp;
case "":
return PhoneticTranscriptionDelimEnum.noSp;
default:
return PhoneticTranscriptionDelimEnum.sp;
}
}
}
class PhoneticTranscriptionToken {
final LanguageArc arc;
final PangeaTokenText tokenL2;
final PangeaTokenText phoneticL1Transcription;
PhoneticTranscriptionToken({
required this.arc,
required this.tokenL2,
required this.phoneticL1Transcription,
});
factory PhoneticTranscriptionToken.fromJson(Map<String, dynamic> json) {
return PhoneticTranscriptionToken(
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
tokenL2:
PangeaTokenText.fromJson(json['token_l2'] as Map<String, dynamic>),
phoneticL1Transcription: PangeaTokenText.fromJson(
json['phonetic_l1_transcription'] as Map<String, dynamic>,
),
);
}
Map<String, dynamic> toJson() => {
'arc': arc.toJson(),
'token_l2': tokenL2.toJson(),
'phonetic_l1_transcription': phoneticL1Transcription.toJson(),
};
}
class PhoneticTranscription {
final LanguageArc arc;
final PangeaTokenText transcriptionL2;
final List<PhoneticTranscriptionToken> phoneticTranscription;
final PhoneticTranscriptionDelimEnum delim;
PhoneticTranscription({
required this.arc,
required this.transcriptionL2,
required this.phoneticTranscription,
this.delim = PhoneticTranscriptionDelimEnum.sp,
});
factory PhoneticTranscription.fromJson(Map<String, dynamic> json) {
return PhoneticTranscription(
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
transcriptionL2: PangeaTokenText.fromJson(
json['transcription_l2'] as Map<String, dynamic>,
),
phoneticTranscription: (json['phonetic_transcription'] as List)
.map(
(e) =>
PhoneticTranscriptionToken.fromJson(e as Map<String, dynamic>),
)
.toList(),
delim: json['delim'] != null
? PhoneticTranscriptionDelimEnumExt.fromString(
json['delim'] as String,
)
: PhoneticTranscriptionDelimEnum.sp,
);
}
Map<String, dynamic> toJson() => {
'arc': arc.toJson(),
'transcription_l2': transcriptionL2.toJson(),
'phonetic_transcription':
phoneticTranscription.map((e) => e.toJson()).toList(),
'delim': delim.value,
};
}
class PhoneticTranscriptionResponse {
final Map<String, dynamic> arc;
final Map<String, dynamic> content;
final Map<String, dynamic> tokenization;
final Map<String, dynamic> phoneticTranscriptionResult;
final LanguageArc arc;
final PangeaTokenText content;
final Map<String, dynamic>
tokenization; // You can define a typesafe model if needed
final PhoneticTranscription phoneticTranscriptionResult;
DateTime? expireAt;
PhoneticTranscriptionResponse({
@ -15,11 +114,13 @@ class PhoneticTranscriptionResponse {
factory PhoneticTranscriptionResponse.fromJson(Map<String, dynamic> json) {
return PhoneticTranscriptionResponse(
arc: Map<String, dynamic>.from(json['arc'] as Map),
content: Map<String, dynamic>.from(json['content'] as Map),
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
content:
PangeaTokenText.fromJson(json['content'] as Map<String, dynamic>),
tokenization: Map<String, dynamic>.from(json['tokenization'] as Map),
phoneticTranscriptionResult: Map<String, dynamic>.from(
json['phonetic_transcription_result'] as Map),
phoneticTranscriptionResult: PhoneticTranscription.fromJson(
json['phonetic_transcription_result'] as Map<String, dynamic>,
),
expireAt: json['expireAt'] == null
? null
: DateTime.parse(json['expireAt'] as String),
@ -28,10 +129,10 @@ class PhoneticTranscriptionResponse {
Map<String, dynamic> toJson() {
return {
'arc': arc,
'content': content,
'arc': arc.toJson(),
'content': content.toJson(),
'tokenization': tokenization,
'phonetic_transcription_result': phoneticTranscriptionResult,
'phonetic_transcription_result': phoneticTranscriptionResult.toJson(),
'expireAt': expireAt?.toIso8601String(),
};
}

View file

@ -1,3 +1,4 @@
<<<<<<< Updated upstream
import 'package:flutter/material.dart';
import 'package:fluffychat/l10n/l10n.dart';
@ -27,10 +28,51 @@ class PhoneticTranscriptionState extends State<PhoneticTranscription> {
String? error;
PhoneticTranscriptionResponse? _response;
=======
import 'dart:async';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pangea/common/utils/error_handler.dart';
import 'package:fluffychat/pangea/events/models/pangea_token_text_model.dart';
import 'package:fluffychat/pangea/learning_settings/models/language_model.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_repo.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_request.dart';
import 'package:fluffychat/pangea/toolbar/controllers/tts_controller.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'package:flutter/material.dart';
class PhoneticTranscriptionWidget extends StatefulWidget {
final String text;
final LanguageModel textLanguage;
final TextStyle? style;
final double? iconSize;
const PhoneticTranscriptionWidget({
super.key,
required this.text,
required this.textLanguage,
this.style,
this.iconSize,
});
@override
State<PhoneticTranscriptionWidget> createState() =>
_PhoneticTranscriptionWidgetState();
}
class _PhoneticTranscriptionWidgetState
extends State<PhoneticTranscriptionWidget> {
late Future<String?> _transcriptionFuture;
bool _hovering = false;
bool _isPlaying = false;
bool _isLoading = false;
late final StreamSubscription _loadingChoreoSubscription;
>>>>>>> Stashed changes
@override
void initState() {
super.initState();
<<<<<<< Updated upstream
_fetchPhoneticTranscription();
}
@ -71,11 +113,69 @@ class PhoneticTranscriptionState extends State<PhoneticTranscription> {
_loading = false;
});
}
=======
_transcriptionFuture = _fetchTranscription();
_loadingChoreoSubscription =
TtsController.loadingChoreoStream.stream.listen((val) {
if (mounted) setState(() => _isLoading = val);
});
}
@override
void dispose() {
TtsController.stop();
_loadingChoreoSubscription.cancel();
super.dispose();
}
Future<String?> _fetchTranscription() async {
if (MatrixState.pangeaController.languageController.userL1 == null) {
ErrorHandler.logError(
e: Exception('User L1 is not set'),
data: {
'text': widget.text,
'textLanguageCode': widget.textLanguage.langCode,
},
);
return widget.text; // Fallback to original text if no L1 is set
}
final req = PhoneticTranscriptionRequest(
arc: LanguageArc(
l1: MatrixState.pangeaController.languageController.userL1!,
l2: widget.textLanguage,
),
content: PangeaTokenText.fromString(widget.text),
// arc can be omitted for default empty map
);
final res = await PhoneticTranscriptionRepo.get(req);
return res.phoneticTranscriptionResult.phoneticTranscription.first
.phoneticL1Transcription.content;
}
Future<void> _handleAudioTap(BuildContext context) async {
if (_isPlaying) {
await TtsController.stop();
setState(() => _isPlaying = false);
} else {
await TtsController.tryToSpeak(
widget.text,
context: context,
targetID: 'phonetic-transcription-${widget.text}',
langCode: widget.textLanguage.langCode,
onStart: () {
if (mounted) setState(() => _isPlaying = true);
},
onStop: () {
if (mounted) setState(() => _isPlaying = false);
},
);
>>>>>>> Stashed changes
}
}
@override
Widget build(BuildContext context) {
<<<<<<< Updated upstream
if (error != null) {
return Row(
mainAxisSize: MainAxisSize.min,
@ -101,6 +201,59 @@ class PhoneticTranscriptionState extends State<PhoneticTranscription> {
return Text(
'Phonetic transcription for "${widget.text}" in ${widget.l2}',
style: Theme.of(context).textTheme.bodyLarge,
=======
return FutureBuilder<String?>(
future: _transcriptionFuture,
builder: (context, snapshot) {
final transcription = snapshot.data ?? '';
return MouseRegion(
onEnter: (_) => setState(() => _hovering = true),
onExit: (_) => setState(() => _hovering = false),
child: GestureDetector(
onTap: () => _handleAudioTap(context),
child: AnimatedContainer(
duration: const Duration(milliseconds: 150),
decoration: BoxDecoration(
color: _hovering
? Colors.grey.withAlpha((0.2 * 255).round())
: Colors.transparent,
borderRadius: BorderRadius.circular(6),
),
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
Text(
transcription.isNotEmpty ? transcription : widget.text,
style:
widget.style ?? Theme.of(context).textTheme.bodyMedium,
),
const SizedBox(width: 8),
Tooltip(
message: _isPlaying
? L10n.of(context).stop
: L10n.of(context).playAudio,
child: _isLoading
? const SizedBox(
width: 16,
height: 16,
child: CircularProgressIndicator(strokeWidth: 3),
)
: Icon(
_isPlaying ? Icons.pause_outlined : Icons.volume_up,
size: widget.iconSize ?? 24,
color: _isPlaying
? Theme.of(context).colorScheme.primary
: Theme.of(context).iconTheme.color,
),
),
],
),
),
),
);
},
>>>>>>> Stashed changes
);
}
}