From 04868e199c2f9bd2399eaba4a73cf9a83cc7fbe3 Mon Sep 17 00:00:00 2001 From: Daniel La Rocque Date: Mon, 28 Apr 2025 10:41:59 -0400 Subject: [PATCH] Add grounding with Google Search example to Firebase AI sample app --- .../src/components/Layout/RightSidebar.tsx | 58 +++++- .../Specific/ChatMessage.module.css | 2 +- .../src/components/Specific/ChatMessage.tsx | 168 +++++++++++++++++- ai/ai-react-app/src/config/firebase-config.ts | 2 +- .../src/services/firebaseAIService.ts | 5 + ai/ai-react-app/src/views/ChatView.tsx | 32 +++- 6 files changed, 249 insertions(+), 18 deletions(-) diff --git a/ai/ai-react-app/src/components/Layout/RightSidebar.tsx b/ai/ai-react-app/src/components/Layout/RightSidebar.tsx index 9c296c9b6..9e703ee35 100644 --- a/ai/ai-react-app/src/components/Layout/RightSidebar.tsx +++ b/ai/ai-react-app/src/components/Layout/RightSidebar.tsx @@ -5,6 +5,7 @@ import { AVAILABLE_GENERATIVE_MODELS, AVAILABLE_IMAGEN_MODELS, defaultFunctionCallingTool, + defaultGoogleSearchTool, } from "../../services/firebaseAIService"; import { ModelParams, @@ -158,6 +159,19 @@ const RightSidebar: React.FC = ({ nextState.tools = undefined; nextState.toolConfig = undefined; // Clear config when turning off } + } else if (name === "google-search-toggle") { + if (checked) { + // Turn ON Google Search Grounding + nextState.tools = [defaultGoogleSearchTool]; + + // Turn OFF JSON mode and Function Calling + nextState.generationConfig.responseMimeType = undefined; + nextState.generationConfig.responseSchema = undefined; + nextState.toolConfig = undefined; + } else { + // Turn OFF Google Search Grounding + nextState.tools = undefined; + } } console.log("[RightSidebar] Updated generative params state:", nextState); return nextState; @@ -219,6 +233,9 @@ const RightSidebar: React.FC = ({ generativeParams.toolConfig?.functionCallingConfig?.mode === FunctionCallingMode.ANY) && !!generativeParams.tools?.length; + const isGroundingWithGoogleSearchActive = !!generativeParams.tools?.some( + (tool) => "googleSearch" in tool, + ); return (
@@ -360,15 +377,17 @@ const RightSidebar: React.FC = ({ name="structured-output-toggle" checked={isStructuredOutputActive} onChange={handleToggleChange} - disabled={isFunctionCallingActive} + disabled={ + isFunctionCallingActive || isGroundingWithGoogleSearchActive + } />
+
+ + +
)} diff --git a/ai/ai-react-app/src/components/Specific/ChatMessage.module.css b/ai/ai-react-app/src/components/Specific/ChatMessage.module.css index 041f38b68..d5f72d3b7 100644 --- a/ai/ai-react-app/src/components/Specific/ChatMessage.module.css +++ b/ai/ai-react-app/src/components/Specific/ChatMessage.module.css @@ -109,7 +109,7 @@ .sourceSuperscript { font-size: 0.7em; vertical-align: super; - color: var(--brand-google-blue); + color: var(--google-blue); margin-left: 2px; font-weight: bold; user-select: none; diff --git a/ai/ai-react-app/src/components/Specific/ChatMessage.tsx b/ai/ai-react-app/src/components/Specific/ChatMessage.tsx index 6d0f94a49..6c10f4fa4 100644 --- a/ai/ai-react-app/src/components/Specific/ChatMessage.tsx +++ b/ai/ai-react-app/src/components/Specific/ChatMessage.tsx @@ -1,9 +1,23 @@ import React from "react"; -import { Content } from "firebase/ai"; +import { + Content, + GroundingChunk, + GroundingMetadata, + GroundingSupport, +} from "firebase/ai"; import styles from "./ChatMessage.module.css"; interface ChatMessageProps { + /** The message content object containing role and parts. */ message: Content; + groundingMetadata?: GroundingMetadata | null; +} + +interface ProcessedSegment { + startIndex: number; + endIndex: number; + chunkIndices: number[]; // 1-based for display + originalSupportIndex: number; // To link back if needed } /** @@ -23,13 +37,96 @@ const getMessageText = (message: Content): string => { .join(""); }; +const renderTextWithInlineHighlighting = ( + text: string, + supports: GroundingSupport[], + chunks: GroundingChunk[], +): React.ReactNode[] => { + if (!supports || supports.length === 0 || !text) { + return [text]; + } + + const segmentsToHighlight: ProcessedSegment[] = []; + + supports.forEach((support, supportIndex) => { + if (support.segment && support.groundingChunkIndices) { + const segment = support.segment; + if (segment.partIndex === undefined || segment.partIndex === 0) { + segmentsToHighlight.push({ + startIndex: segment.startIndex, + endIndex: segment.endIndex, // API's endIndex is typically exclusive + chunkIndices: support.groundingChunkIndices.map((ci) => ci + 1), // 1-based + originalSupportIndex: supportIndex, + }); + } + } + }); + + if (segmentsToHighlight.length === 0) { + return [text]; + } + + // Sort segments by start index, then by end index + segmentsToHighlight.sort((a, b) => { + if (a.startIndex !== b.startIndex) { + return a.startIndex - b.startIndex; + } + return b.endIndex - a.endIndex; // Longer segments first + }); + + const outputNodes: React.ReactNode[] = []; + let lastIndexProcessed = 0; + + segmentsToHighlight.forEach((seg, i) => { + // Add un-highlighted text before this segment + if (seg.startIndex > lastIndexProcessed) { + outputNodes.push(text.substring(lastIndexProcessed, seg.startIndex)); + } + + // Add the highlighted segment + // Ensure we don't re-highlight an already covered portion if a shorter segment comes later + const currentSegmentText = text.substring(seg.startIndex, seg.endIndex); + const tooltipText = seg.chunkIndices + .map((ci) => { + const chunk = chunks[ci - 1]; // ci is 1-based + return chunk.web?.title || chunk.web?.uri || `Source ${ci}`; + }) + .join("; "); + + outputNodes.push( + + {currentSegmentText} + + [{seg.chunkIndices.join(",")}] + + , + ); + lastIndexProcessed = Math.max(lastIndexProcessed, seg.endIndex); + }); + + // Add any remaining un-highlighted text + if (lastIndexProcessed < text.length) { + outputNodes.push(text.substring(lastIndexProcessed)); + } + + return outputNodes; +}; + /** * Renders a single chat message bubble, styled based on the message role ('user' or 'model'). * It only renders messages that should be visible in the log (user messages, or model messages * containing text). Function role messages or model messages consisting only of function calls * are typically not rendered directly as chat bubbles. */ -const ChatMessage: React.FC = ({ message }) => { +const ChatMessage: React.FC = ({ + message, + groundingMetadata, +}) => { const text = getMessageText(message); const isUser = message.role === "user"; const isModel = message.role === "model"; @@ -41,20 +138,79 @@ const ChatMessage: React.FC = ({ message }) => { // 1. 'function' role messages (these represent execution results, not direct chat). // 2. 'model' role messages that *only* contain function calls (these are instructions, not display text). // 3. 'system' role messages (handled separately, not usually in chat history display). - const shouldRender = isUser || (isModel && text.trim() !== ""); + const shouldRender = + isUser || + (isModel && text.trim() !== ""); if (!shouldRender) { return null; } + let messageContentNodes: React.ReactNode[]; + if ( + isModel && + groundingMetadata?.groundingSupports && + groundingMetadata?.groundingChunks + ) { + messageContentNodes = renderTextWithInlineHighlighting( + text, + groundingMetadata.groundingSupports, + groundingMetadata.groundingChunks, + ); + } else { + messageContentNodes = [text]; + } + return (
- {/* Use
 to preserve whitespace and newlines within the text content.
-                     Handles potential multi-line responses correctly. */}
-        
{text}
+
+          {messageContentNodes.map((node, index) => (
+            {node}
+          ))}
+        
+ {/* Source list rendering for grounded results. This display must comply with the display requirements in the Service Terms. */} + {isModel && + groundingMetadata && + (groundingMetadata.searchEntryPoint?.renderedContent || + (groundingMetadata.groundingChunks && + groundingMetadata.groundingChunks.length > 0) ? ( +
+ {groundingMetadata.searchEntryPoint?.renderedContent && ( +
+ )} + {groundingMetadata.groundingChunks && + groundingMetadata.groundingChunks.length > 0 && ( + <> +
Sources:
+ + + )} +
+ ) : null)}
); diff --git a/ai/ai-react-app/src/config/firebase-config.ts b/ai/ai-react-app/src/config/firebase-config.ts index 8cd3a394d..f0a7820e6 100644 --- a/ai/ai-react-app/src/config/firebase-config.ts +++ b/ai/ai-react-app/src/config/firebase-config.ts @@ -6,4 +6,4 @@ export const firebaseConfig = { storageBucket: "YOUR_STORAGE_BUCKET", messagingSenderId: "YOUR_MESSAGING_SENDER_ID", appId: "YOUR_APP_ID", -}; \ No newline at end of file +}; diff --git a/ai/ai-react-app/src/services/firebaseAIService.ts b/ai/ai-react-app/src/services/firebaseAIService.ts index 6e24e94f3..98ce14d0a 100644 --- a/ai/ai-react-app/src/services/firebaseAIService.ts +++ b/ai/ai-react-app/src/services/firebaseAIService.ts @@ -11,6 +11,7 @@ import { ModelParams, ImagenModelParams, FunctionCall, + GoogleSearchTool, } from "firebase/ai"; import { firebaseConfig } from "../config/firebase-config"; @@ -58,6 +59,10 @@ export const defaultFunctionCallingTool = { ], }; +export const defaultGoogleSearchTool: GoogleSearchTool = { + googleSearch: {} +} + export const defaultGenerativeParams: Omit = { // Model name itself is selected in the UI generationConfig: { diff --git a/ai/ai-react-app/src/views/ChatView.tsx b/ai/ai-react-app/src/views/ChatView.tsx index 01db1cbb0..f2dd7dd17 100644 --- a/ai/ai-react-app/src/views/ChatView.tsx +++ b/ai/ai-react-app/src/views/ChatView.tsx @@ -12,6 +12,7 @@ import { FunctionCall, AIError, AI, + GroundingMetadata, } from "firebase/ai"; import PromptInput from "../components/Common/PromptInput"; import ChatMessage from "../components/Specific/ChatMessage"; @@ -75,6 +76,8 @@ const ChatView: React.FC = ({ const [lastResponseParsedJson, setLastResponseParsedJson] = useState< object | null >(null); + const [lastGroundingMetadata, setLastGroundingMetadata] = + useState(null); const chatContainerRef = useRef(null); const chatSessionRef = useRef(null); @@ -102,6 +105,7 @@ const ChatView: React.FC = ({ setChatHistory([]); setError(null); onUsageMetadataChange(null); + setLastGroundingMetadata(null); console.log("[ChatView] New chat session initialized successfully."); } catch (initError: unknown) { console.error("[ChatView] Error initializing chat session:", initError); @@ -180,6 +184,13 @@ const ChatView: React.FC = ({ ? finalResponse.text() : ""; finalModelCandidate = finalResponse.candidates?.[0]; + console.log(`[ChatView] Final candidate:`, finalModelCandidate); + console.log( + `[ChatView] Grounding Metadata: ${finalModelCandidate?.groundingMetadata}`, + ); + setLastGroundingMetadata( + finalModelCandidate?.groundingMetadata || null, + ); if (!finalModelCandidate) { console.warn("[ChatView] No candidate in final response."); @@ -361,6 +372,7 @@ const ChatView: React.FC = ({ return; } + setLastGroundingMetadata(null); setIsLoading(true); setError(null); setLastResponseParsedJson(null); @@ -476,12 +488,20 @@ const ChatView: React.FC = ({ {`Start chatting with ${currentParams.model}!`}
)} - {chatHistory.map((message, index) => ( - - ))} + {chatHistory.map((message, index) => { + const isLastModelMessage = + message.role === "model" && index === chatHistory.length - 1; + return ( + + ); + })} {isLoading && (
.