Skip to content

Commit 9c64ff6

Browse files
authored
Merge pull request #171 from codelion/feat-add-mcp-plugin-again
- Add MCP plugin, that allows you to use any LLM with any MCP Server - Support streaming responses in local inference - Fix regression with the simple gradio GUI
2 parents 98d59a4 + 8dbf4a1 commit 9c64ff6

File tree

5 files changed

+696
-12
lines changed

5 files changed

+696
-12
lines changed

optillm.py

Lines changed: 53 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,29 @@ def get_config():
118118

119119
plugin_approaches = {}
120120

121+
def normalize_message_content(messages):
122+
"""
123+
Ensure all message content fields are strings, not lists.
124+
Some models don't handle list-format content correctly.
125+
"""
126+
normalized_messages = []
127+
for message in messages:
128+
normalized_message = message.copy()
129+
content = message.get('content', '')
130+
131+
# Convert list content to string if needed
132+
if isinstance(content, list):
133+
# Extract text content from the list
134+
text_content = ' '.join(
135+
item.get('text', '') for item in content
136+
if isinstance(item, dict) and item.get('type') == 'text'
137+
)
138+
normalized_message['content'] = text_content
139+
140+
normalized_messages.append(normalized_message)
141+
142+
return normalized_messages
143+
121144
def none_approach(
122145
client: Any,
123146
model: str,
@@ -143,10 +166,13 @@ def none_approach(
143166
model = model[5:]
144167

145168
try:
146-
# Make the direct completion call with original messages and parameters
169+
# Normalize message content to ensure it's always string
170+
normalized_messages = normalize_message_content(original_messages)
171+
172+
# Make the direct completion call with normalized messages and parameters
147173
response = client.chat.completions.create(
148174
model=model,
149-
messages=original_messages,
175+
messages=normalized_messages,
150176
**kwargs
151177
)
152178

@@ -320,12 +346,32 @@ def execute_single_approach(approach, system_prompt, initial_query, client, mode
320346
import inspect
321347
sig = inspect.signature(plugin_func)
322348

323-
if 'request_config' in sig.parameters:
324-
# Plugin supports request_config
325-
return plugin_func(system_prompt, initial_query, client, model, request_config=request_config)
349+
# Check if the plugin function is async
350+
is_async = inspect.iscoroutinefunction(plugin_func)
351+
352+
if is_async:
353+
# For async functions, we need to run them in an event loop
354+
import asyncio
355+
loop = asyncio.new_event_loop()
356+
asyncio.set_event_loop(loop)
357+
try:
358+
if 'request_config' in sig.parameters:
359+
# Plugin supports request_config
360+
result = loop.run_until_complete(plugin_func(system_prompt, initial_query, client, model, request_config=request_config))
361+
else:
362+
# Legacy plugin without request_config support
363+
result = loop.run_until_complete(plugin_func(system_prompt, initial_query, client, model))
364+
return result
365+
finally:
366+
loop.close()
326367
else:
327-
# Legacy plugin without request_config support
328-
return plugin_func(system_prompt, initial_query, client, model)
368+
# For synchronous functions, call directly
369+
if 'request_config' in sig.parameters:
370+
# Plugin supports request_config
371+
return plugin_func(system_prompt, initial_query, client, model, request_config=request_config)
372+
else:
373+
# Legacy plugin without request_config support
374+
return plugin_func(system_prompt, initial_query, client, model)
329375
else:
330376
raise ValueError(f"Unknown approach: {approach}")
331377

optillm/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import os
33

44
# Version information
5-
__version__ = "0.1.6"
5+
__version__ = "0.1.7"
66

77
# Get the path to the root optillm.py
88
spec = util.spec_from_file_location(

0 commit comments

Comments
 (0)