bug fixed in auto navigate for ellipse

This commit is contained in:
abnerhexu
2026-01-24 11:05:52 +08:00
parent 26b1a62566
commit 9b05507c5e
4 changed files with 395 additions and 38 deletions

View File

@@ -8,6 +8,7 @@ from langchain_classic.agents import AgentExecutor
from langchain_classic.prompts import PromptTemplate
from langchain_ollama import ChatOllama
from langchain_openai import ChatOpenAI
from langchain_anthropic import ChatAnthropic
from langchain_core.outputs import ChatResult
from langchain_core.messages import AIMessage
from uav_api_client import UAVAPIClient
@@ -260,7 +261,7 @@ class UAVControlAgent:
print(f"✅ Ollama LLM initialized")
print()
elif llm_provider in ["openai", "openai-compatible"]:
elif llm_provider in ["openai", "openai-compatible", "anthropic-compatible"]:
if not llm_api_key:
raise ValueError(f"API key is required for {llm_provider} provider. Use --llm-api-key or set environment variable.")
@@ -272,7 +273,7 @@ class UAVControlAgent:
if not llm_base_url:
raise ValueError("llm_base_url is required for openai-compatible provider")
final_base_url = llm_base_url
provider_name = "OpenAI-Compatible API"
provider_name = "Anthropic-Compatible API" if llm_provider == "anthropic-compatible" else "OpenAI-Compatible API"
if self.debug:
print(f" Provider: {provider_name}")
@@ -281,19 +282,22 @@ class UAVControlAgent:
print(f" API Key: {'*' * (len(llm_api_key) - 4) + llm_api_key[-4:] if len(llm_api_key) > 4 else '****'}")
# Create LLM instance
kwargs = {
"model": llm_model,
"temperature": temperature,
"api_key": llm_api_key,
"base_url": final_base_url
}
if llm_model == "MiniMax-M2.1":
reasoning = {
"effort": "low", # 'low', 'medium', or 'high'
"summary": "auto", # 'detailed', 'auto', or None
if llm_provider == "anthropic-compatible":
kwargs = {
"model_name": llm_model,
"temperature": temperature,
"api_key": llm_api_key,
"base_url": final_base_url
}
# TODO: MiniMax的API对OpenAI的兼容性有点问题。。。
self.llm = ChatOpenAI(**kwargs)
self.llm = ChatAnthropic(**kwargs)
else:
kwargs = {
"model": llm_model,
"temperature": temperature,
"api_key": llm_api_key,
"base_url": final_base_url
}
self.llm = ChatOpenAI(**kwargs)
if self.debug:
print(f"{provider_name} LLM initialized")