first commit
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
|
||||
import os, json, httpx
|
||||
import os, json, httpx, traceback
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -7,7 +7,6 @@ AGENT_URL = os.getenv("AGENT_URL", "http://ai-agent:8080")
|
||||
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o-mini")
|
||||
|
||||
def _read_api_key():
|
||||
# Prefer Docker secret if available
|
||||
path = os.getenv("OPENAI_API_KEY_FILE", "/run/secrets/openai_api_key")
|
||||
if os.path.exists(path):
|
||||
return open(path, "r").read().strip()
|
||||
@@ -26,9 +25,16 @@ class ChatIn(BaseModel):
|
||||
|
||||
app = FastAPI(title="AI Relay (LLM -> Agent)")
|
||||
|
||||
_last_debug = {"openai_request": None, "openai_response": None, "agent_request": None, "agent_response": None, "error": None}
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {"ok": True}
|
||||
return {"ok": True, "model": OPENAI_MODEL, "agent_url": AGENT_URL}
|
||||
|
||||
@app.get("/last-raw")
|
||||
def last_raw():
|
||||
# Expose last request/response for debugging
|
||||
return _last_debug
|
||||
|
||||
@app.post("/chat")
|
||||
async def chat(inp: ChatIn):
|
||||
@@ -48,21 +54,42 @@ async def chat(inp: ChatIn):
|
||||
],
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
r = await client.post(url, headers=headers, json=body)
|
||||
if r.status_code >= 400:
|
||||
raise HTTPException(502, f"OpenAI error: {r.text}")
|
||||
data = r.json()
|
||||
_last_debug["openai_request"] = {"url": url, "body": body}
|
||||
_last_debug["openai_response"] = None
|
||||
_last_debug["agent_request"] = None
|
||||
_last_debug["agent_response"] = None
|
||||
_last_debug["error"] = None
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
r = await client.post(url, headers=headers, json=body)
|
||||
_last_debug["openai_response"] = {"status": r.status_code, "text": r.text[:500]}
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
except httpx.RequestError as e:
|
||||
_last_debug["error"] = f"OpenAI network error: {str(e)}"
|
||||
raise HTTPException(502, f"OpenAI network error: {e}")
|
||||
except httpx.HTTPStatusError as e:
|
||||
_last_debug["error"] = f"OpenAI HTTP error: {e.response.text}"
|
||||
raise HTTPException(502, f"OpenAI error: {e.response.text}")
|
||||
|
||||
try:
|
||||
content = data["choices"][0]["message"]["content"]
|
||||
cmd = json.loads(content)
|
||||
except Exception as e:
|
||||
_last_debug["error"] = f"Parse error: {str(e)}; raw={str(data)[:300]}"
|
||||
raise HTTPException(500, f"Failed to parse model JSON: {e}; raw={str(data)[:300]}")
|
||||
|
||||
# Forward to the agent
|
||||
async with httpx.AsyncClient(timeout=15) as client:
|
||||
r = await client.post(f"{AGENT_URL}/command", json=cmd)
|
||||
if r.status_code >= 400:
|
||||
raise HTTPException(r.status_code, f"Agent error: {r.text}")
|
||||
return r.json()
|
||||
_last_debug["agent_request"] = {"url": f"{AGENT_URL}/command", "json": cmd}
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=15) as client:
|
||||
r = await client.post(f"{AGENT_URL}/command", json=cmd)
|
||||
_last_debug["agent_response"] = {"status": r.status_code, "text": r.text[:500]}
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
except httpx.RequestError as e:
|
||||
_last_debug["error"] = f"Agent network error: {str(e)}"
|
||||
raise HTTPException(502, f"Agent network error: {e}")
|
||||
except httpx.HTTPStatusError as e:
|
||||
_last_debug["error"] = f"Agent HTTP error: {e.response.text}"
|
||||
raise HTTPException(e.response.status_code, f"Agent error: {e.response.text}")
|
||||
|
||||
Reference in New Issue
Block a user