Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 73 additions & 11 deletions packages/firebase_ai/firebase_ai/example/lib/pages/chat_page.dart
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,10 @@ class _ChatPageState extends State<ChatPage> {
void _initializeChat() {
final generationConfig = GenerationConfig(
thinkingConfig: _enableThinking
? ThinkingConfig.withThinkingLevel(
ThinkingLevel.high,
? ThinkingConfig.withThinkingBudget(
Comment thread
cynthiajoan marked this conversation as resolved.
null,
includeThoughts: true,
)
) // Using thinkingBudget since we are testing with gemini 2.5
: null,
);
if (widget.useVertexBackend) {
Expand Down Expand Up @@ -143,14 +143,27 @@ class _ChatPageState extends State<ChatPage> {
dimension: 15,
),
if (!_loading)
IconButton(
onPressed: () async {
await _sendChatMessage(_textController.text);
},
icon: Icon(
Icons.send,
color: Theme.of(context).colorScheme.primary,
),
Row(
children: [
IconButton(
onPressed: () {
_sendChatMessage(_textController.text);
},
icon: Icon(
Icons.send,
color: Theme.of(context).colorScheme.primary,
),
),
IconButton(
onPressed: () {
_sendStreamingChatMessage(_textController.text);
},
icon: Icon(
Icons.stream,
color: Theme.of(context).colorScheme.primary,
),
),
],
)
else
const CircularProgressIndicator(),
Expand All @@ -163,6 +176,55 @@ class _ChatPageState extends State<ChatPage> {
);
}

Future<void> _sendStreamingChatMessage(String message) async {
setState(() {
_loading = true;
});

try {
_messages.add(MessageData(text: message, fromUser: true));
final responseStream = _chat?.sendMessageStream(
Content.text(message),
);

if (responseStream == null) {
_showError('No response from API.');
return;
}
final textBuffer = StringBuffer();
_messages.add(MessageData(text: textBuffer.toString(), fromUser: false));

await for (final response in responseStream) {
final thought = response.thoughtSummary;
if (thought != null) {
_messages.insert(
_messages.length - 1,
MessageData(text: thought, fromUser: false, isThought: true),
);
}
textBuffer.write(response.text ?? '');
setState(() {
_messages.last =
MessageData(text: textBuffer.toString(), fromUser: false);
});
_scrollDown();
}

if (textBuffer.isEmpty) {
_showError('No response from API.');
return;
}
} catch (e) {
_showError(e.toString());
} finally {
_textController.clear();
setState(() {
_loading = false;
});
_textFieldFocus.requestFocus();
}
}

Future<void> _sendChatMessage(String message) async {
setState(() {
_loading = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,7 @@ class _FunctionCallingPageState extends State<FunctionCallingPage> {
await _runTest(() async {
final autoFunctionCallChat = _autoFunctionCallModel.startChat();
const prompt =
'What is the weather like in Boston, MA on 10/02 in year 2024?';
'Tell a bedtime story, and in the end show what is the weather like in Boston, MA on 10/02 in year 2024?';

_messages.add(MessageData(text: prompt, fromUser: true));
setState(() {});
Expand Down
Loading