Files
codex_truenas_helper/llamaCpp.Wrapper.app/openai_translate.py
Rushabh Gosar 5d1a0ee72b Initial commit
2026-01-07 16:54:39 -08:00

141 lines
4.7 KiB
Python

import time
import uuid
from typing import Any, Dict, List, Tuple
def _messages_from_input(input_value: Any) -> List[Dict[str, Any]]:
if isinstance(input_value, str):
return [{"role": "user", "content": input_value}]
if isinstance(input_value, list):
messages: List[Dict[str, Any]] = []
for item in input_value:
if isinstance(item, str):
messages.append({"role": "user", "content": item})
elif isinstance(item, dict):
role = item.get("role") or "user"
content = item.get("content") or item.get("text") or ""
if item.get("type") == "input_image":
content = [{"type": "image_url", "image_url": {"url": item.get("image_url", "")}}]
messages.append({"role": role, "content": content})
return messages
return [{"role": "user", "content": str(input_value)}]
def _normalize_tools(tools: Any) -> Any:
if not isinstance(tools, list):
return tools
normalized = []
for tool in tools:
if not isinstance(tool, dict):
normalized.append(tool)
continue
if "function" in tool:
normalized.append(tool)
continue
if tool.get("type") == "function" and ("name" in tool or "parameters" in tool or "description" in tool):
function = {
"name": tool.get("name"),
"parameters": tool.get("parameters"),
"description": tool.get("description"),
}
function = {k: v for k, v in function.items() if v is not None}
normalized.append({"type": "function", "function": function})
continue
normalized.append(tool)
return normalized
def _normalize_tool_choice(tool_choice: Any) -> Any:
if not isinstance(tool_choice, dict):
return tool_choice
if "function" in tool_choice:
return tool_choice
if tool_choice.get("type") == "function" and "name" in tool_choice:
return {"type": "function", "function": {"name": tool_choice.get("name")}}
return tool_choice
def normalize_chat_payload(payload: Dict[str, Any]) -> Dict[str, Any]:
if "return_format" in payload and "response_format" not in payload:
if payload["return_format"] == "json":
payload["response_format"] = {"type": "json_object"}
if "functions" in payload and "tools" not in payload:
functions = payload.get("functions")
if isinstance(functions, list):
tools = []
for func in functions:
if isinstance(func, dict):
tools.append({"type": "function", "function": func})
if tools:
payload["tools"] = tools
payload.pop("functions", None)
if "tools" in payload:
payload["tools"] = _normalize_tools(payload.get("tools"))
if "tool_choice" in payload:
payload["tool_choice"] = _normalize_tool_choice(payload.get("tool_choice"))
return payload
def responses_to_chat_payload(payload: Dict[str, Any]) -> Tuple[Dict[str, Any], str]:
model = payload.get("model") or "unknown"
messages = _messages_from_input(payload.get("input", ""))
chat_payload: Dict[str, Any] = {
"model": model,
"messages": messages,
}
passthrough_keys = [
"temperature",
"top_p",
"max_output_tokens",
"stream",
"tools",
"tool_choice",
"response_format",
"return_format",
"frequency_penalty",
"presence_penalty",
"seed",
"stop",
]
for key in passthrough_keys:
if key in payload:
if key == "max_output_tokens":
chat_payload["max_tokens"] = payload[key]
elif key == "return_format" and payload[key] == "json":
chat_payload["response_format"] = {"type": "json_object"}
else:
chat_payload[key] = payload[key]
return normalize_chat_payload(chat_payload), model
def chat_to_responses(chat: Dict[str, Any], model: str) -> Dict[str, Any]:
response_id = f"resp_{uuid.uuid4().hex}"
created = int(time.time())
content = ""
if chat.get("choices"):
choice = chat["choices"][0]
message = choice.get("message") or {}
content = message.get("content") or ""
return {
"id": response_id,
"object": "response",
"created": created,
"model": model,
"output": [
{
"id": f"msg_{uuid.uuid4().hex}",
"type": "message",
"role": "assistant",
"content": [
{"type": "output_text", "text": content}
],
}
],
"usage": chat.get("usage", {}),
}