Skip to content

Commit ac0f44c

Browse files
committed
MCP implemented successfully
1 parent bc01221 commit ac0f44c

File tree

19 files changed

+1604
-120
lines changed

19 files changed

+1604
-120
lines changed

src/client/content/chatbot.py

Lines changed: 94 additions & 81 deletions
Original file line numberDiff line numberDiff line change
@@ -2,27 +2,23 @@
22
Copyright (c) 2024, 2025, Oracle and/or its affiliates.
33
Licensed under the Universal Permissive License v1.0 as shown at http://oss.oracle.com/licenses/upl.
44
5-
Session States Set:
6-
- user_client: Stores the Client
5+
This file merges the Streamlit Chatbot GUI with the MCPClient for a complete,
6+
runnable example demonstrating their integration.
77
"""
8-
9-
# spell-checker:ignore streamlit, oraclevs, selectai
8+
# spell-checker:ignore streamlit, oraclevs, selectai, langgraph, prebuilt
109
import asyncio
1110
import inspect
12-
import json
1311
import base64
1412

1513
import streamlit as st
1614
from streamlit import session_state as state
1715

18-
from client.content.config.models import get_models
19-
20-
import client.utils.st_common as st_common
2116
import client.utils.api_call as api_call
22-
17+
import client.utils.st_common as st_common
18+
from client.content.config.models import get_models
2319
from client.utils.st_footer import render_chat_footer
24-
import client.utils.client as client
2520
import common.logging_config as logging_config
21+
from client.mcp.client import MCPClient
2622

2723
logger = logging_config.logging.getLogger("client.content.chatbot")
2824

@@ -82,89 +78,106 @@ async def main() -> None:
8278
# Stop when sidebar configurations not set
8379
if not state.enable_client:
8480
st.stop()
85-
86-
#########################################################################
87-
# Chatty-Bot Centre
88-
#########################################################################
89-
# Establish the Client
90-
if "user_client" not in state:
91-
state.user_client = client.Client(
92-
server=state.server,
93-
settings=state.client_settings,
94-
timeout=1200,
95-
)
96-
user_client: client.Client = state.user_client
97-
98-
history = await user_client.get_history()
99-
st.chat_message("ai").write("Hello, how can I help you?")
100-
vector_search_refs = []
101-
for message in history or []:
102-
if not message["content"]:
103-
continue
104-
if message["role"] == "tool" and message["name"] == "oraclevs_tool":
105-
vector_search_refs = json.loads(message["content"])
106-
if message["role"] in ("ai", "assistant"):
107-
with st.chat_message("ai"):
108-
st.markdown(message["content"])
109-
if vector_search_refs:
110-
show_vector_search_refs(vector_search_refs)
111-
vector_search_refs = []
112-
elif message["role"] in ("human", "user"):
113-
with st.chat_message("human"):
114-
content = message["content"]
115-
if isinstance(content, list):
116-
for part in content:
117-
if part["type"] == "text":
118-
st.write(part["text"])
119-
elif part["type"] == "image_url" and part["image_url"]["url"].startswith("data:image"):
120-
st.image(part["image_url"]["url"])
121-
else:
122-
st.write(content)
123-
81+
image_url = None
82+
# --- Chatty-Bot Centre ---
83+
# Get the model ID for the current client settings
84+
model_id = state.client_settings.get('ll_model', {}).get('model')
85+
86+
# Prepare client settings for MCP client
87+
client_settings = state.client_settings.copy()
88+
if model_id:
89+
# Get the model configuration to retrieve the API key
90+
ll_models_enabled = st_common.enabled_models_lookup("ll")
91+
if model_id in ll_models_enabled and 'api_key' in ll_models_enabled[model_id]:
92+
client_settings['ll_model']['api_key'] = ll_models_enabled[model_id]['api_key']
93+
94+
# Initialize chat history in session state if it doesn't exist.
95+
if "messages" not in state:
96+
state.messages = []
97+
98+
st.chat_message("ai").write("Hello, how can I help you? (Now powered by MCPClient)")
99+
100+
# Display a read-only view of the chat history from the session state.
101+
for message in state.messages:
102+
role = message["role"]
103+
# Standardize roles for display purposes
104+
if role in ("human", "user"):
105+
display_role = "human"
106+
elif role in ("ai", "assistant"):
107+
display_role = "assistant"
108+
else: # e.g. tool, system
109+
continue # Don't display tool or system messages directly
110+
111+
with st.chat_message(display_role):
112+
content = message.get("content")
113+
if isinstance(content, list): # Handle multimodal content
114+
for part in content:
115+
if part.get("type") == "text":
116+
st.write(part["text"])
117+
elif part.get("type") == "image_url" and part["image_url"]["url"].startswith("data:image"):
118+
st.image(part["image_url"]["url"])
119+
else:
120+
st.write(content)
121+
122+
# Render the chat input footer
124123
sys_prompt = state.client_settings["prompts"]["sys"]
125124
render_chat_footer()
126-
if human_request := st.chat_input(
125+
126+
# Handle new user input
127+
if prompt := st.chat_input(
127128
f"Ask your question here... (current prompt: {sys_prompt})",
129+
key=f"chat_input_{len(state.messages)}", # Add a dynamic key
128130
accept_file=True,
129131
file_type=["jpg", "jpeg", "png"],
130132
):
131-
st.chat_message("human").write(human_request.text)
132-
file_b64 = None
133-
if human_request["files"]:
134-
file = human_request["files"][0]
133+
# Construct the user's message, potentially with an image
134+
user_content = [{"type": "text", "text": prompt.text}]
135+
if prompt.files:
136+
file = prompt.files[0]
135137
file_bytes = file.read()
136138
file_b64 = base64.b64encode(file_bytes).decode("utf-8")
139+
image_url = f"data:image/jpeg;base64,{file_b64}"
140+
user_content.append({"type": "image_url", "image_url": {"url": image_url}})
141+
142+
# Add user message to state and display it
143+
state.messages.append({"role": "user", "content": user_content}) # type: ignore
144+
with st.chat_message("human"):
145+
st.write(prompt.text)
146+
if image_url:
147+
st.image(image_url)
148+
149+
# --- CORE LOGIC: Call MCPClient.invoke ---
137150
try:
138-
message_placeholder = st.chat_message("ai").empty()
139-
full_answer = ""
140-
async for chunk in user_client.stream(message=human_request.text, image_b64=file_b64):
141-
full_answer += chunk
142-
message_placeholder.markdown(full_answer)
143-
# Stream until we hit the end then refresh to replace with history
144-
st.rerun()
145-
except Exception:
146-
logger.error("Exception:", exc_info=1)
147-
st.chat_message("ai").write(
148-
"""
149-
I'm sorry, something's gone wrong. Please try again.
150-
If the problem persists, please raise an issue.
151-
"""
152-
)
153-
if st.button("Retry", key="reload_chatbot"):
154-
st_common.clear_state_key("user_client")
151+
with st.chat_message("ai"):
152+
with st.spinner("Thinking..."):
153+
# FIX: Create, use, and automatically clean up the MCPClient in a single block.
154+
# This is the core of the solution to the RuntimeError.
155+
async with MCPClient(client_settings=client_settings) as mcp_client:
156+
final_answer, new_history = await mcp_client.invoke(message_history=state.messages)
157+
158+
# The invoke method's final_answer is now the source of truth for display
159+
st.markdown(final_answer)
160+
161+
# Update the session state with the full history from the MCP client
162+
state.messages = new_history
163+
164+
except Exception as e:
165+
logger.error("Exception during invoke call:", exc_info=True)
166+
st.error(f"An error occurred: {e}")
167+
# Check if it's the event loop closed error or MCP session closed and offer a retry
168+
if "Event loop closed" in str(e) or "MCP session" in str(e):
169+
st.info("The connection was reset. Please try sending your message again.")
170+
if st.button("Retry", key="reload_chatbot_error"):
155171
st.rerun()
172+
173+
# Rerun to clear the input box and reflect the latest state
174+
st.rerun()
156175

157176

158-
if __name__ == "__main__" or "page.py" in inspect.stack()[1].filename:
177+
if __name__ == "__main__" or ("page" in inspect.stack()[1].filename if inspect.stack() else False):
159178
try:
160179
asyncio.run(main())
161-
except ValueError as ex:
162-
logger.exception("Bug detected: %s", ex)
163-
st.error("It looks like you found a bug; please open an issue", icon="🛑")
180+
except Exception as ex:
181+
logger.exception("A critical error occurred: %s", ex)
182+
st.error(f"A critical error occurred: {ex}", icon="🛑")
164183
st.stop()
165-
except IndexError as ex:
166-
logger.exception("Unable to contact the server: %s", ex)
167-
st.error("Unable to contact the server, is it running?", icon="🚨")
168-
if st.button("Retry", key="reload_chatbot"):
169-
st_common.clear_state_key("user_client")
170-
st.rerun()
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import inspect
2+
3+
from client.mcp.frontend import display_commands_tab, display_ide_tab, get_fastapi_base_url, get_server_capabilities
4+
5+
import streamlit as st
6+
7+
def main():
8+
fastapi_base_url = get_fastapi_base_url()
9+
tools, resources, prompts = get_server_capabilities(fastapi_base_url)
10+
if "chat_history" not in st.session_state:
11+
st.session_state.chat_history = []
12+
ide, commands = st.tabs(["🛠️ IDE", "📚 Available Commands"])
13+
14+
with ide:
15+
# Display the IDE tab using the original AI Optimizer logic.
16+
display_ide_tab()
17+
with commands:
18+
# Display the commands tab using the original AI Optimizer logic.
19+
display_commands_tab(tools, resources, prompts)
20+
21+
22+
23+
if __name__ == "__main__" or "page.py" in inspect.stack()[1].filename:
24+
main()

src/client/content/config/settings.py

Lines changed: 26 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -37,15 +37,32 @@
3737
#############################################################################
3838
def get_settings(include_sensitive: bool = False):
3939
"""Get Server-Side Settings"""
40-
settings = api_call.get(
41-
endpoint="v1/settings",
42-
params={
43-
"client": state.client_settings["client"],
44-
"full_config": True,
45-
"incl_sensitive": include_sensitive,
46-
},
47-
)
48-
return settings
40+
try:
41+
settings = api_call.get(
42+
endpoint="v1/settings",
43+
params={
44+
"client": state.client_settings["client"],
45+
"full_config": True,
46+
"incl_sensitive": include_sensitive,
47+
},
48+
)
49+
return settings
50+
except api_call.ApiError as e:
51+
if "not found" in str(e):
52+
# If client settings not found, create them
53+
logger.info("Client settings not found, creating new ones")
54+
api_call.post(endpoint="v1/settings", params={"client": state.client_settings["client"]})
55+
settings = api_call.get(
56+
endpoint="v1/settings",
57+
params={
58+
"client": state.client_settings["client"],
59+
"full_config": True,
60+
"incl_sensitive": include_sensitive,
61+
},
62+
)
63+
return settings
64+
else:
65+
raise
4966

5067
def save_settings(settings):
5168
"""Save Settings after changing client"""

0 commit comments

Comments
 (0)