This commit is contained in:
abnerhexu
2026-01-23 22:47:05 +08:00
2 changed files with 26 additions and 2 deletions

View File

@@ -87,7 +87,7 @@ class UAVAgentGUI:
self.provider_var = tk.StringVar(value="Ollama")
self.model_var = tk.StringVar()
self.uav_base_url_var = tk.StringVar(value="http://localhost:8000")
self.uav_base_url_var = tk.StringVar(value="http://127.0.0.1:8000")
self.uav_api_key_var = tk.StringVar(value="agent_secret_key_change_in_production") # UAV API key for authentication
self.temperature_var = tk.DoubleVar(value=0.1)
self.verbose_var = tk.BooleanVar(value=True)

View File

@@ -8,16 +8,40 @@ from langchain_classic.agents import AgentExecutor
from langchain_classic.prompts import PromptTemplate
from langchain_ollama import ChatOllama
from langchain_openai import ChatOpenAI
from langchain_core.outputs import ChatResult
from langchain_core.messages import AIMessage
from uav_api_client import UAVAPIClient
from uav_langchain_tools import create_uav_tools
from template.agent_prompt import AGENT_PROMPT
from template.parsing_error import PARSING_ERROR_TEMPLATE
from typing import Optional, Dict, Any
from typing import Optional, Dict, Any, List
import json
import os
from pathlib import Path
class EnhancedChatOpenAI(ChatOpenAI):
"""ChatOpenAI subclass that captures reasoning_content if provided by the API"""
def _create_chat_result(self, response: Any) -> ChatResult:
result = super()._create_chat_result(response)
if hasattr(response, "choices") and response.choices:
for i, choice in enumerate(response.choices):
# Handle MiniMax reasoning_details
if hasattr(choice.message, "reasoning_details") and choice.message.reasoning_details:
reasoning = choice.message.reasoning_details[0].get('text', '')
if reasoning and i < len(result.generations):
gen = result.generations[i]
if isinstance(gen.message, AIMessage):
# Store in additional_kwargs
gen.message.additional_kwargs["reasoning_content"] = reasoning
# Prepend to content for ReAct agent visibility
if "Thought:" not in gen.message.content:
gen.message.content = f"Thought: {reasoning}\n" + gen.message.content
return result
def load_llm_settings(settings_path: str = "llm_settings.json") -> Optional[Dict[str, Any]]:
"""Load LLM settings from JSON file"""
try: