Skip to content

Commit 85a1fbf

Browse files
authored
Merge pull request #9 from feloy/fix-7/do-not-encode-messages
fix: do not encode messages to prompt
2 parents f6cb0ee + 68d0731 commit 85a1fbf

File tree

1 file changed

+3
-11
lines changed

1 file changed

+3
-11
lines changed

src/podman_ai_lab_stack/podman_ai_lab.py

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -190,18 +190,10 @@ async def _get_params(self, request: Union[ChatCompletionRequest, CompletionRequ
190190

191191
input_dict = {}
192192
media_present = request_has_media(request)
193-
llama_model = request.model
194193
if isinstance(request, ChatCompletionRequest):
195-
if media_present or not llama_model:
196-
contents = [await convert_message_to_openai_dict_for_podman_ai_lab(m) for m in request.messages]
197-
# flatten the list of lists
198-
input_dict["messages"] = [item for sublist in contents for item in sublist]
199-
else:
200-
input_dict["raw"] = True
201-
input_dict["prompt"] = await chat_completion_request_to_prompt(
202-
request,
203-
llama_model,
204-
)
194+
contents = [await convert_message_to_openai_dict_for_podman_ai_lab(m) for m in request.messages]
195+
# flatten the list of lists
196+
input_dict["messages"] = [item for sublist in contents for item in sublist]
205197
else:
206198
assert not media_present, "Ollama does not support media for Completion requests"
207199
input_dict["prompt"] = await completion_request_to_prompt(request)

0 commit comments

Comments
 (0)