Merge pull request #3077 from pangeachat/3046-add-phonetic-transcription-for-language-with-a-non-latin-script

3046-add-phonetic-transcription-for-language-with-a-non-latin-script
This commit is contained in:
ggurdin 2025-06-16 16:36:03 -04:00 committed by GitHub
commit 583281af26
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 485 additions and 7 deletions

View file

@ -11,6 +11,8 @@ import 'package:fluffychat/pangea/lemmas/lemma_emoji_row.dart';
import 'package:fluffychat/pangea/morphs/get_grammar_copy.dart';
import 'package:fluffychat/pangea/morphs/morph_features_enum.dart';
import 'package:fluffychat/pangea/morphs/morph_icon.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_widget.dart';
import 'package:fluffychat/pangea/toolbar/utils/shrinkable_text.dart';
import 'package:fluffychat/pangea/toolbar/widgets/practice_activity/word_text_with_audio_button.dart';
import 'package:fluffychat/pangea/toolbar/widgets/word_zoom/lemma_meaning_widget.dart';
import 'package:fluffychat/widgets/matrix.dart';
@ -26,6 +28,9 @@ class VocabDetailsView extends StatelessWidget {
ConstructUses get _construct => constructId.constructUses;
String? get _userL1 =>
MatrixState.pangeaController.languageController.userL1?.langCode;
/// Get the language code for the current lemma
String? get _userL2 =>
MatrixState.pangeaController.languageController.userL2?.langCode;
@ -49,14 +54,34 @@ class VocabDetailsView extends StatelessWidget {
: _construct.lemmaCategory.darkColor(context));
return AnalyticsDetailsViewContent(
title: WordTextWithAudioButton(
text: _construct.lemma,
style: Theme.of(context).textTheme.headlineLarge?.copyWith(
color: textColor,
title: Column(
children: [
LayoutBuilder(
builder: (context, constraints) {
return ShrinkableText(
text: _construct.lemma,
maxWidth: constraints.maxWidth - 40.0,
style: Theme.of(context).textTheme.headlineLarge?.copyWith(
color: textColor,
),
);
},
),
if (MatrixState.pangeaController.languageController.userL2 != null)
Padding(
padding: const EdgeInsets.only(top: 4.0),
child: PhoneticTranscriptionWidget(
text: _construct.lemma,
textLanguage:
MatrixState.pangeaController.languageController.userL2!,
style: Theme.of(context).textTheme.bodyMedium?.copyWith(
color: textColor.withAlpha((0.7 * 255).toInt()),
fontSize: 18,
),
iconSize: _iconSize * 0.8,
),
),
iconSize: _iconSize,
uniqueID: "${_construct.lemma}-${_construct.category}",
langCode: _userL2!,
],
),
subtitle: Column(
children: [

View file

@ -86,4 +86,7 @@ class PApiUrls {
static String rcProductsTrial = "${PApiUrls.subscriptionEndpoint}/free_trial";
static String rcSubscription = PApiUrls.subscriptionEndpoint;
static String phoneticTranscription =
"${PApiUrls.choreoEndpoint}/phonetic_transcription";
}

View file

@ -22,6 +22,14 @@ class PangeaTokenText {
);
}
static PangeaTokenText fromString(String content) {
return PangeaTokenText(
offset: 0,
content: content,
length: content.length,
);
}
static const String _offsetKey = "offset";
static const String _contentKey = "content";
static const String _lengthKey = "length";

View file

@ -80,3 +80,27 @@ class LanguageModel {
@override
int get hashCode => langCode.hashCode;
}
class LanguageArc {
final LanguageModel l1;
final LanguageModel l2;
LanguageArc({
required this.l1,
required this.l2,
});
factory LanguageArc.fromJson(Map<String, dynamic> json) {
return LanguageArc(
l1: LanguageModel.fromJson(json['l1'] as Map<String, dynamic>),
l2: LanguageModel.fromJson(json['l2'] as Map<String, dynamic>),
);
}
Map<String, dynamic> toJson() {
return {
'l1': l1.toJson(),
'l2': l2.toJson(),
};
}
}

View file

@ -0,0 +1,72 @@
import 'dart:convert';
import 'dart:developer';
import 'package:flutter/foundation.dart';
import 'package:get_storage/get_storage.dart';
import 'package:http/http.dart';
import 'package:fluffychat/pangea/common/config/environment.dart';
import 'package:fluffychat/pangea/common/network/requests.dart';
import 'package:fluffychat/pangea/common/network/urls.dart';
import 'package:fluffychat/pangea/common/utils/error_handler.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_request.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_response.dart';
import 'package:fluffychat/widgets/matrix.dart';
class PhoneticTranscriptionRepo {
static final GetStorage _storage =
GetStorage('phonetic_transcription_storage');
static void set(
PhoneticTranscriptionRequest request,
PhoneticTranscriptionResponse response,
) {
response.expireAt ??= DateTime.now().add(const Duration(days: 100));
_storage.write(request.storageKey, response.toJson());
}
static Future<PhoneticTranscriptionResponse> _fetch(
PhoneticTranscriptionRequest request,
) async {
final cachedJson = _storage.read(request.storageKey);
final cached = cachedJson == null
? null
: PhoneticTranscriptionResponse.fromJson(cachedJson);
if (cached != null) {
if (DateTime.now().isBefore(cached.expireAt!)) {
return cached;
} else {
_storage.remove(request.storageKey);
}
}
final Requests req = Requests(
choreoApiKey: Environment.choreoApiKey,
accessToken: MatrixState.pangeaController.userController.accessToken,
);
final Response res = await req.post(
url: PApiUrls.phoneticTranscription,
body: request.toJson(),
);
final decodedBody = jsonDecode(utf8.decode(res.bodyBytes));
final response = PhoneticTranscriptionResponse.fromJson(decodedBody);
set(request, response);
return response;
}
static Future<PhoneticTranscriptionResponse> get(
PhoneticTranscriptionRequest request,
) async {
try {
return await _fetch(request);
} catch (e) {
debugger(when: kDebugMode);
ErrorHandler.logError(e: e, data: request.toJson());
rethrow;
}
}
}

View file

@ -0,0 +1,33 @@
import 'package:fluffychat/pangea/events/models/pangea_token_text_model.dart';
import 'package:fluffychat/pangea/learning_settings/models/language_model.dart';
class PhoneticTranscriptionRequest {
final LanguageArc arc;
final PangeaTokenText content;
final bool requiresTokenization;
PhoneticTranscriptionRequest({
required this.arc,
required this.content,
this.requiresTokenization = false,
});
factory PhoneticTranscriptionRequest.fromJson(Map<String, dynamic> json) {
return PhoneticTranscriptionRequest(
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
content:
PangeaTokenText.fromJson(json['content'] as Map<String, dynamic>),
requiresTokenization: json['requires_tokenization'] ?? true,
);
}
Map<String, dynamic> toJson() {
return {
'arc': arc.toJson(),
'content': content.toJson(),
'requires_tokenization': requiresTokenization,
};
}
String get storageKey => '${arc.l1}-${arc.l2}-${content.hashCode}';
}

View file

@ -0,0 +1,156 @@
import 'package:fluffychat/pangea/events/models/pangea_token_text_model.dart';
import 'package:fluffychat/pangea/learning_settings/models/language_model.dart';
enum PhoneticTranscriptionDelimEnum { sp, noSp }
extension PhoneticTranscriptionDelimEnumExt on PhoneticTranscriptionDelimEnum {
String get value {
switch (this) {
case PhoneticTranscriptionDelimEnum.sp:
return " ";
case PhoneticTranscriptionDelimEnum.noSp:
return "";
}
}
static PhoneticTranscriptionDelimEnum fromString(String s) {
switch (s) {
case " ":
return PhoneticTranscriptionDelimEnum.sp;
case "":
return PhoneticTranscriptionDelimEnum.noSp;
default:
return PhoneticTranscriptionDelimEnum.sp;
}
}
}
class PhoneticTranscriptionToken {
final LanguageArc arc;
final PangeaTokenText tokenL2;
final PangeaTokenText phoneticL1Transcription;
PhoneticTranscriptionToken({
required this.arc,
required this.tokenL2,
required this.phoneticL1Transcription,
});
factory PhoneticTranscriptionToken.fromJson(Map<String, dynamic> json) {
return PhoneticTranscriptionToken(
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
tokenL2:
PangeaTokenText.fromJson(json['token_l2'] as Map<String, dynamic>),
phoneticL1Transcription: PangeaTokenText.fromJson(
json['phonetic_l1_transcription'] as Map<String, dynamic>,
),
);
}
Map<String, dynamic> toJson() => {
'arc': arc.toJson(),
'token_l2': tokenL2.toJson(),
'phonetic_l1_transcription': phoneticL1Transcription.toJson(),
};
}
class PhoneticTranscription {
final LanguageArc arc;
final PangeaTokenText transcriptionL2;
final List<PhoneticTranscriptionToken> phoneticTranscription;
final PhoneticTranscriptionDelimEnum delim;
PhoneticTranscription({
required this.arc,
required this.transcriptionL2,
required this.phoneticTranscription,
this.delim = PhoneticTranscriptionDelimEnum.sp,
});
factory PhoneticTranscription.fromJson(Map<String, dynamic> json) {
return PhoneticTranscription(
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
transcriptionL2: PangeaTokenText.fromJson(
json['transcription_l2'] as Map<String, dynamic>,
),
phoneticTranscription: (json['phonetic_transcription'] as List)
.map(
(e) =>
PhoneticTranscriptionToken.fromJson(e as Map<String, dynamic>),
)
.toList(),
delim: json['delim'] != null
? PhoneticTranscriptionDelimEnumExt.fromString(
json['delim'] as String,
)
: PhoneticTranscriptionDelimEnum.sp,
);
}
Map<String, dynamic> toJson() => {
'arc': arc.toJson(),
'transcription_l2': transcriptionL2.toJson(),
'phonetic_transcription':
phoneticTranscription.map((e) => e.toJson()).toList(),
'delim': delim.value,
};
}
class PhoneticTranscriptionResponse {
final LanguageArc arc;
final PangeaTokenText content;
final Map<String, dynamic>
tokenization; // You can define a typesafe model if needed
final PhoneticTranscription phoneticTranscriptionResult;
DateTime? expireAt;
PhoneticTranscriptionResponse({
required this.arc,
required this.content,
required this.tokenization,
required this.phoneticTranscriptionResult,
this.expireAt,
});
factory PhoneticTranscriptionResponse.fromJson(Map<String, dynamic> json) {
return PhoneticTranscriptionResponse(
arc: LanguageArc.fromJson(json['arc'] as Map<String, dynamic>),
content:
PangeaTokenText.fromJson(json['content'] as Map<String, dynamic>),
tokenization: Map<String, dynamic>.from(json['tokenization'] as Map),
phoneticTranscriptionResult: PhoneticTranscription.fromJson(
json['phonetic_transcription_result'] as Map<String, dynamic>,
),
expireAt: json['expireAt'] == null
? null
: DateTime.parse(json['expireAt'] as String),
);
}
Map<String, dynamic> toJson() {
return {
'arc': arc.toJson(),
'content': content.toJson(),
'tokenization': tokenization,
'phonetic_transcription_result': phoneticTranscriptionResult.toJson(),
'expireAt': expireAt?.toIso8601String(),
};
}
@override
bool operator ==(Object other) =>
identical(this, other) ||
other is PhoneticTranscriptionResponse &&
runtimeType == other.runtimeType &&
arc == other.arc &&
content == other.content &&
tokenization == other.tokenization &&
phoneticTranscriptionResult == other.phoneticTranscriptionResult;
@override
int get hashCode =>
arc.hashCode ^
content.hashCode ^
tokenization.hashCode ^
phoneticTranscriptionResult.hashCode;
}

View file

@ -0,0 +1,157 @@
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pangea/common/utils/error_handler.dart';
import 'package:fluffychat/pangea/events/models/pangea_token_text_model.dart';
import 'package:fluffychat/pangea/learning_settings/models/language_model.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_repo.dart';
import 'package:fluffychat/pangea/phonetic_transcription/phonetic_transcription_request.dart';
import 'package:fluffychat/pangea/toolbar/controllers/tts_controller.dart';
import 'package:fluffychat/widgets/matrix.dart';
class PhoneticTranscriptionWidget extends StatefulWidget {
final String text;
final LanguageModel textLanguage;
final TextStyle? style;
final double? iconSize;
const PhoneticTranscriptionWidget({
super.key,
required this.text,
required this.textLanguage,
this.style,
this.iconSize,
});
@override
State<PhoneticTranscriptionWidget> createState() =>
_PhoneticTranscriptionWidgetState();
}
class _PhoneticTranscriptionWidgetState
extends State<PhoneticTranscriptionWidget> {
late Future<String?> _transcriptionFuture;
bool _hovering = false;
bool _isPlaying = false;
bool _isLoading = false;
late final StreamSubscription _loadingChoreoSubscription;
@override
void initState() {
super.initState();
_transcriptionFuture = _fetchTranscription();
_loadingChoreoSubscription =
TtsController.loadingChoreoStream.stream.listen((val) {
if (mounted) setState(() => _isLoading = val);
});
}
@override
void dispose() {
TtsController.stop();
_loadingChoreoSubscription.cancel();
super.dispose();
}
Future<String?> _fetchTranscription() async {
if (MatrixState.pangeaController.languageController.userL1 == null) {
ErrorHandler.logError(
e: Exception('User L1 is not set'),
data: {
'text': widget.text,
'textLanguageCode': widget.textLanguage.langCode,
},
);
return widget.text; // Fallback to original text if no L1 is set
}
final req = PhoneticTranscriptionRequest(
arc: LanguageArc(
l1: MatrixState.pangeaController.languageController.userL1!,
l2: widget.textLanguage,
),
content: PangeaTokenText.fromString(widget.text),
// arc can be omitted for default empty map
);
final res = await PhoneticTranscriptionRepo.get(req);
return res.phoneticTranscriptionResult.phoneticTranscription.first
.phoneticL1Transcription.content;
}
Future<void> _handleAudioTap(BuildContext context) async {
if (_isPlaying) {
await TtsController.stop();
setState(() => _isPlaying = false);
} else {
await TtsController.tryToSpeak(
widget.text,
context: context,
targetID: 'phonetic-transcription-${widget.text}',
langCode: widget.textLanguage.langCode,
onStart: () {
if (mounted) setState(() => _isPlaying = true);
},
onStop: () {
if (mounted) setState(() => _isPlaying = false);
},
);
}
}
@override
Widget build(BuildContext context) {
return FutureBuilder<String?>(
future: _transcriptionFuture,
builder: (context, snapshot) {
final transcription = snapshot.data ?? '';
return MouseRegion(
onEnter: (_) => setState(() => _hovering = true),
onExit: (_) => setState(() => _hovering = false),
child: GestureDetector(
onTap: () => _handleAudioTap(context),
child: AnimatedContainer(
duration: const Duration(milliseconds: 150),
decoration: BoxDecoration(
color: _hovering
? Colors.grey.withAlpha((0.2 * 255).round())
: Colors.transparent,
borderRadius: BorderRadius.circular(6),
),
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
Text(
"/${transcription.isNotEmpty ? transcription : widget.text}/",
style:
widget.style ?? Theme.of(context).textTheme.bodyMedium,
),
const SizedBox(width: 8),
Tooltip(
message: _isPlaying
? L10n.of(context).stop
: L10n.of(context).playAudio,
child: _isLoading
? const SizedBox(
width: 16,
height: 16,
child: CircularProgressIndicator(strokeWidth: 3),
)
: Icon(
_isPlaying ? Icons.pause_outlined : Icons.volume_up,
size: widget.iconSize ?? 24,
color: _isPlaying
? Theme.of(context).colorScheme.primary
: Theme.of(context).iconTheme.color,
),
),
],
),
),
),
);
},
);
}
}