|
2 | 2 | Copyright (c) 2024, 2025, Oracle and/or its affiliates. |
3 | 3 | Licensed under the Universal Permissive License v1.0 as shown at http://oss.oracle.com/licenses/upl. |
4 | 4 |
|
5 | | -Session States Set: |
6 | | -- user_client: Stores the Client |
| 5 | +This file merges the Streamlit Chatbot GUI with the MCPClient for a complete, |
| 6 | +runnable example demonstrating their integration. |
7 | 7 | """ |
8 | | - |
9 | | -# spell-checker:ignore streamlit, oraclevs, selectai |
| 8 | +# spell-checker:ignore streamlit, oraclevs, selectai, langgraph, prebuilt |
10 | 9 | import asyncio |
11 | 10 | import inspect |
12 | | -import json |
13 | 11 | import base64 |
14 | 12 |
|
15 | 13 | import streamlit as st |
16 | 14 | from streamlit import session_state as state |
17 | 15 |
|
18 | | -from client.content.config.models import get_models |
19 | | - |
20 | | -import client.utils.st_common as st_common |
21 | 16 | import client.utils.api_call as api_call |
22 | | - |
| 17 | +import client.utils.st_common as st_common |
| 18 | +from client.content.config.models import get_models |
23 | 19 | from client.utils.st_footer import render_chat_footer |
24 | | -import client.utils.client as client |
25 | 20 | import common.logging_config as logging_config |
| 21 | +from client.mcp.client import MCPClient |
26 | 22 |
|
27 | 23 | logger = logging_config.logging.getLogger("client.content.chatbot") |
28 | 24 |
|
@@ -82,89 +78,106 @@ async def main() -> None: |
82 | 78 | # Stop when sidebar configurations not set |
83 | 79 | if not state.enable_client: |
84 | 80 | st.stop() |
85 | | - |
86 | | - ######################################################################### |
87 | | - # Chatty-Bot Centre |
88 | | - ######################################################################### |
89 | | - # Establish the Client |
90 | | - if "user_client" not in state: |
91 | | - state.user_client = client.Client( |
92 | | - server=state.server, |
93 | | - settings=state.client_settings, |
94 | | - timeout=1200, |
95 | | - ) |
96 | | - user_client: client.Client = state.user_client |
97 | | - |
98 | | - history = await user_client.get_history() |
99 | | - st.chat_message("ai").write("Hello, how can I help you?") |
100 | | - vector_search_refs = [] |
101 | | - for message in history or []: |
102 | | - if not message["content"]: |
103 | | - continue |
104 | | - if message["role"] == "tool" and message["name"] == "oraclevs_tool": |
105 | | - vector_search_refs = json.loads(message["content"]) |
106 | | - if message["role"] in ("ai", "assistant"): |
107 | | - with st.chat_message("ai"): |
108 | | - st.markdown(message["content"]) |
109 | | - if vector_search_refs: |
110 | | - show_vector_search_refs(vector_search_refs) |
111 | | - vector_search_refs = [] |
112 | | - elif message["role"] in ("human", "user"): |
113 | | - with st.chat_message("human"): |
114 | | - content = message["content"] |
115 | | - if isinstance(content, list): |
116 | | - for part in content: |
117 | | - if part["type"] == "text": |
118 | | - st.write(part["text"]) |
119 | | - elif part["type"] == "image_url" and part["image_url"]["url"].startswith("data:image"): |
120 | | - st.image(part["image_url"]["url"]) |
121 | | - else: |
122 | | - st.write(content) |
123 | | - |
| 81 | + image_url = None |
| 82 | + # --- Chatty-Bot Centre --- |
| 83 | + # Get the model ID for the current client settings |
| 84 | + model_id = state.client_settings.get('ll_model', {}).get('model') |
| 85 | + |
| 86 | + # Prepare client settings for MCP client |
| 87 | + client_settings = state.client_settings.copy() |
| 88 | + if model_id: |
| 89 | + # Get the model configuration to retrieve the API key |
| 90 | + ll_models_enabled = st_common.enabled_models_lookup("ll") |
| 91 | + if model_id in ll_models_enabled and 'api_key' in ll_models_enabled[model_id]: |
| 92 | + client_settings['ll_model']['api_key'] = ll_models_enabled[model_id]['api_key'] |
| 93 | + |
| 94 | + # Initialize chat history in session state if it doesn't exist. |
| 95 | + if "messages" not in state: |
| 96 | + state.messages = [] |
| 97 | + |
| 98 | + st.chat_message("ai").write("Hello, how can I help you? (Now powered by MCPClient)") |
| 99 | + |
| 100 | + # Display a read-only view of the chat history from the session state. |
| 101 | + for message in state.messages: |
| 102 | + role = message["role"] |
| 103 | + # Standardize roles for display purposes |
| 104 | + if role in ("human", "user"): |
| 105 | + display_role = "human" |
| 106 | + elif role in ("ai", "assistant"): |
| 107 | + display_role = "assistant" |
| 108 | + else: # e.g. tool, system |
| 109 | + continue # Don't display tool or system messages directly |
| 110 | + |
| 111 | + with st.chat_message(display_role): |
| 112 | + content = message.get("content") |
| 113 | + if isinstance(content, list): # Handle multimodal content |
| 114 | + for part in content: |
| 115 | + if part.get("type") == "text": |
| 116 | + st.write(part["text"]) |
| 117 | + elif part.get("type") == "image_url" and part["image_url"]["url"].startswith("data:image"): |
| 118 | + st.image(part["image_url"]["url"]) |
| 119 | + else: |
| 120 | + st.write(content) |
| 121 | + |
| 122 | + # Render the chat input footer |
124 | 123 | sys_prompt = state.client_settings["prompts"]["sys"] |
125 | 124 | render_chat_footer() |
126 | | - if human_request := st.chat_input( |
| 125 | + |
| 126 | + # Handle new user input |
| 127 | + if prompt := st.chat_input( |
127 | 128 | f"Ask your question here... (current prompt: {sys_prompt})", |
| 129 | + key=f"chat_input_{len(state.messages)}", # Add a dynamic key |
128 | 130 | accept_file=True, |
129 | 131 | file_type=["jpg", "jpeg", "png"], |
130 | 132 | ): |
131 | | - st.chat_message("human").write(human_request.text) |
132 | | - file_b64 = None |
133 | | - if human_request["files"]: |
134 | | - file = human_request["files"][0] |
| 133 | + # Construct the user's message, potentially with an image |
| 134 | + user_content = [{"type": "text", "text": prompt.text}] |
| 135 | + if prompt.files: |
| 136 | + file = prompt.files[0] |
135 | 137 | file_bytes = file.read() |
136 | 138 | file_b64 = base64.b64encode(file_bytes).decode("utf-8") |
| 139 | + image_url = f"data:image/jpeg;base64,{file_b64}" |
| 140 | + user_content.append({"type": "image_url", "image_url": {"url": image_url}}) |
| 141 | + |
| 142 | + # Add user message to state and display it |
| 143 | + state.messages.append({"role": "user", "content": user_content}) # type: ignore |
| 144 | + with st.chat_message("human"): |
| 145 | + st.write(prompt.text) |
| 146 | + if image_url: |
| 147 | + st.image(image_url) |
| 148 | + |
| 149 | + # --- CORE LOGIC: Call MCPClient.invoke --- |
137 | 150 | try: |
138 | | - message_placeholder = st.chat_message("ai").empty() |
139 | | - full_answer = "" |
140 | | - async for chunk in user_client.stream(message=human_request.text, image_b64=file_b64): |
141 | | - full_answer += chunk |
142 | | - message_placeholder.markdown(full_answer) |
143 | | - # Stream until we hit the end then refresh to replace with history |
144 | | - st.rerun() |
145 | | - except Exception: |
146 | | - logger.error("Exception:", exc_info=1) |
147 | | - st.chat_message("ai").write( |
148 | | - """ |
149 | | - I'm sorry, something's gone wrong. Please try again. |
150 | | - If the problem persists, please raise an issue. |
151 | | - """ |
152 | | - ) |
153 | | - if st.button("Retry", key="reload_chatbot"): |
154 | | - st_common.clear_state_key("user_client") |
| 151 | + with st.chat_message("ai"): |
| 152 | + with st.spinner("Thinking..."): |
| 153 | + # FIX: Create, use, and automatically clean up the MCPClient in a single block. |
| 154 | + # This is the core of the solution to the RuntimeError. |
| 155 | + async with MCPClient(client_settings=client_settings) as mcp_client: |
| 156 | + final_answer, new_history = await mcp_client.invoke(message_history=state.messages) |
| 157 | + |
| 158 | + # The invoke method's final_answer is now the source of truth for display |
| 159 | + st.markdown(final_answer) |
| 160 | + |
| 161 | + # Update the session state with the full history from the MCP client |
| 162 | + state.messages = new_history |
| 163 | + |
| 164 | + except Exception as e: |
| 165 | + logger.error("Exception during invoke call:", exc_info=True) |
| 166 | + st.error(f"An error occurred: {e}") |
| 167 | + # Check if it's the event loop closed error or MCP session closed and offer a retry |
| 168 | + if "Event loop closed" in str(e) or "MCP session" in str(e): |
| 169 | + st.info("The connection was reset. Please try sending your message again.") |
| 170 | + if st.button("Retry", key="reload_chatbot_error"): |
155 | 171 | st.rerun() |
| 172 | + |
| 173 | + # Rerun to clear the input box and reflect the latest state |
| 174 | + st.rerun() |
156 | 175 |
|
157 | 176 |
|
158 | | -if __name__ == "__main__" or "page.py" in inspect.stack()[1].filename: |
| 177 | +if __name__ == "__main__" or ("page" in inspect.stack()[1].filename if inspect.stack() else False): |
159 | 178 | try: |
160 | 179 | asyncio.run(main()) |
161 | | - except ValueError as ex: |
162 | | - logger.exception("Bug detected: %s", ex) |
163 | | - st.error("It looks like you found a bug; please open an issue", icon="🛑") |
| 180 | + except Exception as ex: |
| 181 | + logger.exception("A critical error occurred: %s", ex) |
| 182 | + st.error(f"A critical error occurred: {ex}", icon="🛑") |
164 | 183 | st.stop() |
165 | | - except IndexError as ex: |
166 | | - logger.exception("Unable to contact the server: %s", ex) |
167 | | - st.error("Unable to contact the server, is it running?", icon="🚨") |
168 | | - if st.button("Retry", key="reload_chatbot"): |
169 | | - st_common.clear_state_key("user_client") |
170 | | - st.rerun() |
|
0 commit comments