后端模型对话V1

pull/48/head
echo 2 months ago
parent b6cb7a58d8
commit bc65fec621

@ -1 +1,9 @@
DATABASE_URL=postgresql+asyncpg://echo:shenyongye123da%2A@192.168.43.228:5432/hadoop_fault_db
DATABASE_URL=postgresql+asyncpg://echo:shenyongye123da%2A@127.0.0.1:5432/hadoop_fault_db
DB_HOST=127.0.0.1
DB_PORT=5432
DB_NAME=hadoop_fault_db
DB_USER=echo
DB_PASSWORD=shenyongye123da*
LLM_API_KEY=sk-nmycwvibqotsoykzyxudcexkxwkechzdglksiynrkwfgwyqx
LLM_ENDPOINT=https://api.siliconflow.cn/v1
LLM_MODEL=deepseek-ai/DeepSeek-R1

@ -70,10 +70,71 @@ psql -h <host> -U <user> -d <db> -f ./doc/project/数据库建表脚本_postgres
> - `postgresql.conf``listen_addresses='*'`(或包含服务器地址)
> - `pg_hba.conf` 中允许来源网段,例如:`host all all 192.168.43.0/24 scram-sha-256`
## 启动数据库
PostgreSQL 15 启动与管理:
pg_ctl 方式(受限环境推荐):
```
# 启动
sudo -u postgres /usr/lib/postgresql/15/bin/pg_ctl -D /var/lib/postgresql/15/main -o "-c config_file=/etc/postgresql/15/main/postgresql.conf" -l /var/log/postgresql/postgresql-15-main.log start
# 状态
sudo -u postgres /usr/lib/postgresql/15/bin/pg_ctl -D /var/lib/postgresql/15/main status
# 停止
sudo -u postgres /usr/lib/postgresql/15/bin/pg_ctl -D /var/lib/postgresql/15/main stop
# 重启
sudo -u postgres /usr/lib/postgresql/15/bin/pg_ctl -D /var/lib/postgresql/15/main restart
```
systemd 方式(标准环境):
```
# 启动所有集群
sudo systemctl start postgresql
# 启动指定实例(根据系统实际单元名可能为 postgresql@15-main
sudo systemctl start postgresql@15-main
# 查看状态
sudo systemctl status postgresql
sudo systemctl status postgresql@15-main
```
进入psql命令行
PGPASSWORD='shenyongye123da*' psql -h 127.0.0.1 -U echo -d hadoop_fault_db
开机自启动:
```
# 所有集群自启
sudo systemctl enable postgresql
# 指定实例自启
sudo systemctl enable postgresql@15-main
```
取消自启动与验证:
```
# 取消自启(指定实例)
sudo systemctl disable postgresql@15-main
# 查看是否启用enabled/disabled
systemctl is-enabled postgresql@15-main
# 重启后验证监听状态
sudo -u postgres /usr/lib/postgresql/15/bin/pg_isready
```
连接验证:
```
# 管理员
export PGPASSWORD='password'
psql -h 127.0.0.1 -U postgres -d hadoop_fault_db -c "SELECT 1;"
# 应用账户 echo
export PGPASSWORD='shenyongye123da*'
psql -h 127.0.0.1 -U echo -d hadoop_fault_db -c "SELECT current_user;"
```
日志查看:
```
sudo tail -n 100 /var/log/postgresql/postgresql-15-main.log
```
## 启动服务
开发模式(自动重载):
```
# 通用方式(推荐)
进入backend目录:
python -m uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
# Windows 若未配置 python 到 PATH可使用 Python Launcher

@ -2,7 +2,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import String, Integer
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy import TIMESTAMP
from app.models import Base
from . import Base
class Cluster(Base):
__tablename__ = "clusters"
@ -31,4 +31,4 @@ class Cluster(Base):
"config_info": self.config_info,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
}

@ -2,7 +2,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import String, Integer
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy import TIMESTAMP, Text
from app.models import Base
from . import Base
class ExecLog(Base):
__tablename__ = "exec_logs"
@ -36,4 +36,4 @@ class ExecLog(Base):
"start_time": self.start_time.isoformat() if self.start_time else None,
"end_time": self.end_time.isoformat() if self.end_time else None,
"exit_code": self.exit_code,
}
}

@ -2,7 +2,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import String
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy import TIMESTAMP
from app.models import Base
from . import Base
class FaultRecord(Base):
__tablename__ = "fault_records"
@ -35,4 +35,4 @@ class FaultRecord(Base):
"title": self.title,
"status": self.status,
"created_at": self.created_at.isoformat() if self.created_at else None,
}
}

@ -2,7 +2,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import String
from sqlalchemy.dialects.postgresql import UUID, INET
from sqlalchemy import TIMESTAMP, Float
from app.models import Base
from . import Base
class Node(Base):
__tablename__ = "nodes"
@ -19,4 +19,3 @@ class Node(Base):
last_heartbeat: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))

@ -1,7 +1,7 @@
from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import String, Boolean
from sqlalchemy import TIMESTAMP, Text
from app.models import Base
from . import Base
class SystemLog(Base):
__tablename__ = "system_logs"

@ -2,11 +2,13 @@ from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, text
from pydantic import BaseModel, Field
import os
from ..db import get_db
from ..deps.auth import get_current_user
from ..models.system_logs import SystemLog
from ..agents.diagnosis_agent import run_diagnose_and_repair
from ..services.llm import LLMClient
router = APIRouter()
@ -20,6 +22,9 @@ class DiagnoseRepairReq(BaseModel):
auto: bool = Field(True, description="是否允许自动修复")
maxSteps: int = Field(3, ge=1, le=6, description="最多工具步数")
class ChatReq(BaseModel):
messages: list[dict] = Field(..., description="对话消息列表,形如[{role:'system'|'user'|'assistant', content:'...'}]")
def _get_username(u) -> str:
return getattr(u, "username", None) or (u.get("username") if isinstance(u, dict) else None) or "system"
@ -49,3 +54,17 @@ async def diagnose_repair(req: DiagnoseRepairReq, user=Depends(get_current_user)
except Exception:
raise HTTPException(status_code=500, detail="server_error")
@router.post("/ai/chat")
async def ai_chat(req: ChatReq, user=Depends(get_current_user)):
try:
llm = LLMClient()
resp = llm.chat(req.messages, tools=None, stream=False)
choices = resp.get("choices") or []
if not choices:
raise HTTPException(status_code=502, detail="llm_unavailable")
msg = choices[0].get("message") or {}
return {"reply": msg.get("content") or ""}
except HTTPException:
raise
except Exception:
raise HTTPException(status_code=500, detail="server_error")

@ -1,40 +1,63 @@
import os
from typing import Any, Dict, Iterable, List, Optional
from dotenv import load_dotenv
try:
import httpx
except Exception: # pragma: no cover
httpx = None
load_dotenv()
class LLMClient:
"""供应商大模型客户端,封装聊天与函数调用。
_DEFAULT_ENDPOINTS: Dict[str, str] = {
"openai": "https://api.openai.com/v1/chat/completions",
"siliconflow": "https://api.siliconflow.cn/v1/chat/completions",
"deepseek": "https://api.deepseek.com/v1/chat/completions",
}
_DEFAULT_MODELS: Dict[str, str] = {
"openai": "gpt-4o-mini",
"siliconflow": "deepseek-ai/DeepSeek-R1",
"deepseek": "deepseek-ai/DeepSeek-R1",
}
def _clean_str(s: str) -> str:
if s is None:
return ""
s = s.strip()
if (s.startswith("`") and s.endswith("`")) or (s.startswith('"') and s.endswith('"')) or (s.startswith("'") and s.endswith("'")):
s = s[1:-1].strip()
return s
def _normalize_endpoint(ep: str) -> str:
if not ep:
return ep
s = _clean_str(ep).rstrip("/")
if s.endswith("/v1"):
return s + "/chat/completions"
if s.endswith("/chat/completions"):
return s
return s
- 通过环境变量配置LLM_PROVIDER/LLM_ENDPOINT/LLM_MODEL/LLM_API_KEY
- 提供 chat(messages, tools, stream) 接口返回供应商原始响应字典
"""
class LLMClient:
def __init__(self):
self.provider = os.getenv("LLM_PROVIDER", "openai")
self.endpoint = os.getenv("LLM_ENDPOINT", "https://api.openai.com/v1/chat/completions")
self.model = os.getenv("LLM_MODEL", "gpt-4o-mini")
self.api_key = os.getenv("LLM_API_KEY", "")
self.provider = os.getenv("LLM_PROVIDER", "openai").strip().lower()
raw_endpoint = os.getenv("LLM_ENDPOINT", "") or _DEFAULT_ENDPOINTS.get(self.provider, _DEFAULT_ENDPOINTS["openai"])
self.endpoint = _normalize_endpoint(raw_endpoint)
self.model = _clean_str(os.getenv("LLM_MODEL", _DEFAULT_MODELS.get(self.provider, "gpt-4o-mini")))
api_key = os.getenv("LLM_API_KEY") or os.getenv("OPENAI_API_KEY") or os.getenv("DEEPSEEK_API_KEY") or os.getenv("SILICONFLOW_API_KEY") or ""
self.api_key = api_key
self.simulate = os.getenv("LLM_SIMULATE", "false").lower() == "true"
self.timeout = int(os.getenv("LLM_TIMEOUT", "30"))
def _headers(self) -> Dict[str, str]:
"""构造 HTTP 请求头。"""
return {
"Authorization": f"Bearer {self.api_key}" if self.api_key else "",
"Content-Type": "application/json",
}
def chat(self, messages: List[Dict[str, Any]], tools: Optional[List[Dict[str, Any]]] = None, stream: bool = False) -> Dict[str, Any]:
"""调用供应商聊天接口,支持函数调用工具描述。
- messagesOpenAI 兼容的消息列表
- toolsOpenAI 兼容的函数调用工具定义JSON Schema
- stream是否流式此处返回一次性结果SSE/WebSocket 由路由层实现
"""
if self.simulate or httpx is None:
return {
"choices": [
@ -51,8 +74,7 @@ class LLMClient:
if tools:
payload["tools"] = tools
payload["tool_choice"] = "auto"
with httpx.Client(timeout=30) as client:
with httpx.Client(timeout=self.timeout) as client:
resp = client.post(self.endpoint, headers=self._headers(), json=payload)
resp.raise_for_status()
return resp.json()

@ -6,3 +6,6 @@ python-dotenv
passlib[bcrypt]
bcrypt==3.2.0
PyJWT
langchain
langchain-openai
httpx

Loading…
Cancel
Save