QOL: Mzansi AI Chat Look and Feel pt2 & Startup question

This commit is contained in:
2025-11-07 14:29:05 +02:00
parent d75da5389a
commit d9fb9dd758
13 changed files with 225 additions and 87 deletions

View File

@@ -3,7 +3,6 @@ import 'package:mzansi_innovation_hub/mih_components/mih_package_components/mih_
import 'package:mzansi_innovation_hub/mih_components/mih_package_components/mih_package_action.dart';
import 'package:mzansi_innovation_hub/mih_components/mih_package_components/mih_package_tools.dart';
import 'package:mzansi_innovation_hub/mih_components/mih_providers/mzansi_ai_provider.dart';
import 'package:mzansi_innovation_hub/mih_packages/mzansi_ai/package_tools/ai_chat.dart';
import 'package:flutter/material.dart';
import 'package:mzansi_innovation_hub/mih_packages/mzansi_ai/package_tools/mih_ai_chat.dart';
import 'package:provider/provider.dart';
@@ -37,9 +36,9 @@ class _MzansiAiState extends State<MzansiAi> {
temp[const Icon(Icons.chat)] = () {
context.read<MzansiAiProvider>().setToolIndex(0);
};
temp[const Icon(Icons.chat)] = () {
context.read<MzansiAiProvider>().setToolIndex(1);
};
// temp[const Icon(Icons.chat)] = () {
// context.read<MzansiAiProvider>().setToolIndex(1);
// };
return MihPackageTools(
tools: temp,
@@ -49,7 +48,7 @@ class _MzansiAiState extends State<MzansiAi> {
List<Widget> getToolBody() {
List<Widget> toolBodies = [
AiChat(),
// AiChat(),
MihAiChat(),
];
return toolBodies;
@@ -58,7 +57,7 @@ class _MzansiAiState extends State<MzansiAi> {
List<String> getToolTitle() {
List<String> toolTitles = [
"Ask Mzansi",
"New Chat",
// "New Chat",
];
return toolTitles;
}

View File

@@ -1,7 +1,6 @@
import 'dart:async';
import 'dart:convert';
import 'package:flutter_speed_dial/flutter_speed_dial.dart';
import 'package:gpt_markdown/gpt_markdown.dart';
import 'package:mzansi_innovation_hub/main.dart';
import 'package:mzansi_innovation_hub/mih_components/mih_package_components/mih_icons.dart';
import 'package:mzansi_innovation_hub/mih_components/mih_providers/mzansi_ai_provider.dart';
@@ -221,8 +220,21 @@ class _AiChatState extends State<AiChat> {
child: Column(
mainAxisSize: MainAxisSize.max,
children: [
// SelectionArea(
// child: GptMarkdown(
// snapshot.requireData,
// textAlign: TextAlign.left,
// style: TextStyle(
// color: MihColors.getSecondaryColor(
// MzansiInnovationHub.of(context)!.theme.mode ==
// "Dark"),
// fontSize: _chatFrontSize,
// fontWeight: FontWeight.bold,
// ),
// ),
// ),
SelectionArea(
child: GptMarkdown(
child: Text(
snapshot.requireData,
textAlign: TextAlign.left,
style: TextStyle(

View File

@@ -124,54 +124,68 @@ class _MihAiChatState extends State<MihAiChat> {
}
Future<void> initTts() async {
List<Map> _voices = [];
List<String> _voicesString = [];
// await _flutterTts.setLanguage("en-US");
await _flutterTts.setSpeechRate(1);
_flutterTts.getVoices.then(
(data) {
try {
_voices = List<Map>.from(data);
try {
await _flutterTts.setSpeechRate(1);
// await _flutterTts.setLanguage("en-US");
setState(() {
_voices = _voices
.where(
(_voice) => _voice["name"].toLowerCase().contains("en-us"))
.toList();
_voicesString =
_voices.map((_voice) => _voice["name"] as String).toList();
_voicesString.sort();
_flutterTts.setVoice(
{
"name": _voicesString.first,
"locale": _voices
.where((_voice) =>
_voice["name"].contains(_voicesString.first))
.first["locale"]
},
);
});
// Safer voice selection with error handling
_flutterTts.getVoices.then((data) {
try {
final voices = List<Map>.from(data);
final englishVoices = voices.where((voice) {
final name = voice["name"]?.toString().toLowerCase() ?? '';
final locale = voice["locale"]?.toString().toLowerCase() ?? '';
return name.contains("en-us") || locale.contains("en_us");
}).toList();
if (englishVoices.isNotEmpty) {
// Use the first available English voice
_flutterTts.setVoice({"name": englishVoices.first["name"]});
}
// If no voices found, use default
} catch (e) {
print(e);
KenLogger.error("Error setting TTS voice: $e");
}
},
);
_flutterTts.setStartHandler(() {
setState(() {
ttsOn = true;
});
} catch (e) {
KenLogger.error("Error initializing TTS: $e");
}
_flutterTts.setStartHandler(() {
if (mounted) {
setState(() {
ttsOn = true;
});
}
});
_flutterTts.setCompletionHandler(() {
setState(() {
ttsOn = false;
});
if (mounted) {
setState(() {
ttsOn = false;
});
}
});
_flutterTts.setErrorHandler((message) {
setState(() {
ttsOn = false;
});
if (mounted) {
setState(() {
ttsOn = false;
});
}
});
}
void initStartQuestion() {
WidgetsBinding.instance.addPostFrameCallback((_) async {
final mzansiAiProvider = context.read<MzansiAiProvider>();
final startQuestion = mzansiAiProvider.startUpQuestion;
if (startQuestion != null && startQuestion.isNotEmpty) {
final stream =
mzansiAiProvider.ollamaProvider.sendMessageStream(startQuestion);
stream.listen((chunk) {});
mzansiAiProvider.clearStartUpQuestion();
}
});
}
@@ -179,6 +193,7 @@ class _MihAiChatState extends State<MihAiChat> {
void initState() {
super.initState();
initTts();
initStartQuestion();
}
@override
@@ -192,6 +207,13 @@ class _MihAiChatState extends State<MihAiChat> {
return Consumer<MzansiAiProvider>(
builder: (BuildContext context, MzansiAiProvider mzansiAiProvider,
Widget? child) {
// final startupQuestion = mzansiAiProvider.startUpQuestion;
// if (startupQuestion != null) {
// WidgetsBinding.instance.addPostFrameCallback((_) {
// mzansiAiProvider.ollamaProvider.sendMessageStream(startupQuestion);
// mzansiAiProvider.setStartUpQuestion(null);
// });
// }
bool hasHistory = mzansiAiProvider.ollamaProvider.history.isNotEmpty;
KenLogger.success("has history: $hasHistory");
KenLogger.success(
@@ -200,8 +222,10 @@ class _MihAiChatState extends State<MihAiChat> {
children: [
LlmChatView(
provider: mzansiAiProvider.ollamaProvider,
messageSender: mzansiAiProvider.ollamaProvider.sendMessageStream,
// welcomeMessage:
// "Mzansi AI is here to help. Send us a messahe and we'll try our best to assist you.",
autofocus: false,
enableAttachments: false,
enableVoiceNotes: true,
style: mzansiAiProvider.getChatStyle(context),