fix minimax support

This commit is contained in:
abnerhexu
2026-01-23 19:44:18 +08:00
parent 77264c172f
commit 3c50124b62
2 changed files with 32 additions and 3 deletions

View File

@@ -87,7 +87,7 @@ class UAVAgentGUI:
self.provider_var = tk.StringVar(value="Ollama") self.provider_var = tk.StringVar(value="Ollama")
self.model_var = tk.StringVar() self.model_var = tk.StringVar()
self.uav_base_url_var = tk.StringVar(value="http://localhost:8000") self.uav_base_url_var = tk.StringVar(value="http://100.80.12.144:8000")
self.uav_api_key_var = tk.StringVar(value="agent_secret_key_change_in_production") # UAV API key for authentication self.uav_api_key_var = tk.StringVar(value="agent_secret_key_change_in_production") # UAV API key for authentication
self.temperature_var = tk.DoubleVar(value=0.1) self.temperature_var = tk.DoubleVar(value=0.1)
self.verbose_var = tk.BooleanVar(value=True) self.verbose_var = tk.BooleanVar(value=True)

View File

@@ -8,16 +8,40 @@ from langchain_classic.agents import AgentExecutor
from langchain_classic.prompts import PromptTemplate from langchain_classic.prompts import PromptTemplate
from langchain_ollama import ChatOllama from langchain_ollama import ChatOllama
from langchain_openai import ChatOpenAI from langchain_openai import ChatOpenAI
from langchain_core.outputs import ChatResult
from langchain_core.messages import AIMessage
from uav_api_client import UAVAPIClient from uav_api_client import UAVAPIClient
from uav_langchain_tools import create_uav_tools from uav_langchain_tools import create_uav_tools
from template.agent_prompt import AGENT_PROMPT from template.agent_prompt import AGENT_PROMPT
from template.parsing_error import PARSING_ERROR_TEMPLATE from template.parsing_error import PARSING_ERROR_TEMPLATE
from typing import Optional, Dict, Any from typing import Optional, Dict, Any, List
import json import json
import os import os
from pathlib import Path from pathlib import Path
class MiniMaxChatOpenAI(ChatOpenAI):
"""Custom ChatOpenAI class to handle MiniMax's reasoning_split feature"""
def _create_chat_result(self, response: Any) -> ChatResult:
result = super()._create_chat_result(response)
if hasattr(response, "choices") and response.choices:
for i, choice in enumerate(response.choices):
# MiniMax puts reasoning in reasoning_details when reasoning_split=True
if hasattr(choice.message, "reasoning_details") and choice.message.reasoning_details:
reasoning = choice.message.reasoning_details[0].get('text', '')
if reasoning and i < len(result.generations):
gen = result.generations[i]
if isinstance(gen.message, AIMessage):
# Store in additional_kwargs for later access if needed
gen.message.additional_kwargs["reasoning_content"] = reasoning
# Prepend to content for ReAct agent compatibility
if "Thought:" not in gen.message.content:
gen.message.content = f"Thought: {reasoning}\n" + gen.message.content
return result
def load_llm_settings(settings_path: str = "llm_settings.json") -> Optional[Dict[str, Any]]: def load_llm_settings(settings_path: str = "llm_settings.json") -> Optional[Dict[str, Any]]:
"""Load LLM settings from JSON file""" """Load LLM settings from JSON file"""
try: try:
@@ -264,6 +288,11 @@ class UAVControlAgent:
"base_url": final_base_url "base_url": final_base_url
} }
if llm_model.startswith("MiniMax"):
# Enable reasoning_split for MiniMax models
kwargs["model_kwargs"] = {"extra_body": {"reasoning_split": True}}
self.llm = MiniMaxChatOpenAI(**kwargs)
else:
self.llm = ChatOpenAI(**kwargs) self.llm = ChatOpenAI(**kwargs)
if self.debug: if self.debug: