parent
2a1fefc8d4
commit
cee4cefc28
@ -0,0 +1 @@
|
||||
|
||||
@ -0,0 +1,9 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
DATABASE_URL = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"postgresql+asyncpg://postgres:password@localhost:5432/hadoop_fault_db",
|
||||
)
|
||||
@ -0,0 +1,10 @@
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
|
||||
from app.config import DATABASE_URL
|
||||
|
||||
engine = create_async_engine(DATABASE_URL, echo=False, pool_pre_ping=True)
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
|
||||
|
||||
async def get_db() -> AsyncSession:
|
||||
"""获取一个异步数据库会话,用于依赖注入。"""
|
||||
async with SessionLocal() as session:
|
||||
yield session
|
||||
@ -0,0 +1,18 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from app.routers import health, clusters, faults, logs
|
||||
|
||||
app = FastAPI(title="Hadoop Fault Detecting API", version="v1")
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(health.router, prefix="/api/v1")
|
||||
app.include_router(clusters.router, prefix="/api/v1")
|
||||
app.include_router(faults.router, prefix="/api/v1")
|
||||
app.include_router(logs.router, prefix="/api/v1")
|
||||
@ -0,0 +1,4 @@
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
@ -0,0 +1,34 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String, Integer
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy import TIMESTAMP
|
||||
from app.models import Base
|
||||
|
||||
class Cluster(Base):
|
||||
__tablename__ = "clusters"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
uuid: Mapped[str] = mapped_column(UUID(as_uuid=False), unique=True)
|
||||
name: Mapped[str] = mapped_column(String(100), unique=True)
|
||||
type: Mapped[str] = mapped_column(String(50))
|
||||
node_count: Mapped[int] = mapped_column(Integer, default=0)
|
||||
health_status: Mapped[str] = mapped_column(String(20), default="unknown")
|
||||
description: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
config_info: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将集群对象转换为可序列化字典。"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"uuid": self.uuid,
|
||||
"name": self.name,
|
||||
"type": self.type,
|
||||
"node_count": self.node_count,
|
||||
"health_status": self.health_status,
|
||||
"description": self.description,
|
||||
"config_info": self.config_info,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
@ -0,0 +1,39 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String, Integer
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import TIMESTAMP, Text
|
||||
from app.models import Base
|
||||
|
||||
class ExecLog(Base):
|
||||
__tablename__ = "exec_logs"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
exec_id: Mapped[str] = mapped_column(String(32), unique=True)
|
||||
fault_id: Mapped[str] = mapped_column(String(32))
|
||||
command_type: Mapped[str] = mapped_column(String(50))
|
||||
script_path: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
command_content: Mapped[str] = mapped_column(Text)
|
||||
target_nodes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
risk_level: Mapped[str] = mapped_column(String(20), default="medium")
|
||||
execution_status: Mapped[str] = mapped_column(String(20), default="pending")
|
||||
start_time: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
||||
end_time: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
||||
duration: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
stdout_log: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
stderr_log: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
exit_code: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
operator: Mapped[str] = mapped_column(String(50), default="system")
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将执行日志转换为可序列化字典。"""
|
||||
return {
|
||||
"exec_id": self.exec_id,
|
||||
"fault_id": self.fault_id,
|
||||
"command_type": self.command_type,
|
||||
"execution_status": self.execution_status,
|
||||
"start_time": self.start_time.isoformat() if self.start_time else None,
|
||||
"end_time": self.end_time.isoformat() if self.end_time else None,
|
||||
"exit_code": self.exit_code,
|
||||
}
|
||||
@ -0,0 +1,38 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import TIMESTAMP
|
||||
from app.models import Base
|
||||
|
||||
class FaultRecord(Base):
|
||||
__tablename__ = "fault_records"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
fault_id: Mapped[str] = mapped_column(String(32), unique=True)
|
||||
cluster_id: Mapped[int | None] = mapped_column(nullable=True)
|
||||
fault_type: Mapped[str] = mapped_column(String(50))
|
||||
fault_level: Mapped[str] = mapped_column(String(20), default="medium")
|
||||
title: Mapped[str] = mapped_column(String(200))
|
||||
description: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
affected_nodes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
affected_clusters: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
root_cause: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
repair_suggestion: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
status: Mapped[str] = mapped_column(String(20), default="detected")
|
||||
assignee: Mapped[str | None] = mapped_column(String(50), nullable=True)
|
||||
reporter: Mapped[str] = mapped_column(String(50), default="system")
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
resolved_at: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将故障记录转换为可序列化字典。"""
|
||||
return {
|
||||
"fault_id": self.fault_id,
|
||||
"cluster_id": self.cluster_id,
|
||||
"fault_type": self.fault_type,
|
||||
"fault_level": self.fault_level,
|
||||
"title": self.title,
|
||||
"status": self.status,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
}
|
||||
@ -0,0 +1,33 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String, Boolean
|
||||
from sqlalchemy import TIMESTAMP, Text
|
||||
from app.models import Base
|
||||
|
||||
class SystemLog(Base):
|
||||
__tablename__ = "system_logs"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
log_id: Mapped[str] = mapped_column(String(32), unique=True)
|
||||
fault_id: Mapped[str | None] = mapped_column(String(32), nullable=True)
|
||||
cluster_id: Mapped[int | None] = mapped_column(nullable=True)
|
||||
timestamp: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
host: Mapped[str] = mapped_column(String(100))
|
||||
service: Mapped[str] = mapped_column(String(50))
|
||||
log_level: Mapped[str] = mapped_column(String(10))
|
||||
message: Mapped[str] = mapped_column(Text)
|
||||
exception: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
raw_log: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
processed: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将系统日志转换为可序列化字典。"""
|
||||
return {
|
||||
"log_id": self.log_id,
|
||||
"cluster_id": self.cluster_id,
|
||||
"timestamp": self.timestamp.isoformat() if self.timestamp else None,
|
||||
"service": self.service,
|
||||
"log_level": self.log_level,
|
||||
"message": self.message,
|
||||
"processed": self.processed,
|
||||
}
|
||||
@ -0,0 +1 @@
|
||||
|
||||
@ -0,0 +1,14 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.db import get_db
|
||||
from app.models.clusters import Cluster
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/clusters")
|
||||
async def list_clusters(db: AsyncSession = Depends(get_db)):
|
||||
"""查询集群列表。"""
|
||||
result = await db.execute(select(Cluster).limit(100))
|
||||
rows = result.scalars().all()
|
||||
return {"total": len(rows), "list": [c.to_dict() for c in rows]}
|
||||
@ -0,0 +1,14 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.db import get_db
|
||||
from app.models.fault_records import FaultRecord
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/faults")
|
||||
async def list_faults(db: AsyncSession = Depends(get_db)):
|
||||
"""查询故障记录。"""
|
||||
result = await db.execute(select(FaultRecord).limit(100))
|
||||
rows = result.scalars().all()
|
||||
return {"total": len(rows), "list": [f.to_dict() for f in rows]}
|
||||
@ -0,0 +1,11 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.db import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check(db: AsyncSession = Depends(get_db)):
|
||||
"""健康检查:测试数据库连通性。"""
|
||||
await db.execute("SELECT 1")
|
||||
return {"status": "ok"}
|
||||
@ -0,0 +1,23 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.db import get_db
|
||||
from app.models.system_logs import SystemLog
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/logs")
|
||||
async def list_logs(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
level: str | None = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
pageSize: int = Query(10, ge=1, le=100),
|
||||
):
|
||||
"""查询系统日志,支持按级别筛选与分页。"""
|
||||
stmt = select(SystemLog)
|
||||
if level:
|
||||
stmt = stmt.where(SystemLog.log_level == level)
|
||||
stmt = stmt.offset((page - 1) * pageSize).limit(pageSize)
|
||||
result = await db.execute(stmt)
|
||||
rows = result.scalars().all()
|
||||
return {"total": len(rows), "list": [l.to_dict() for l in rows]}
|
||||
@ -0,0 +1,9 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
DATABASE_URL = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"postgresql+asyncpg://postgres:password@localhost:5432/hadoop_fault_db",
|
||||
)
|
||||
@ -0,0 +1,10 @@
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
|
||||
from .config import DATABASE_URL
|
||||
|
||||
engine = create_async_engine(DATABASE_URL, echo=False, pool_pre_ping=True)
|
||||
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
|
||||
|
||||
async def get_db() -> AsyncSession:
|
||||
"""获取一个异步数据库会话,用于依赖注入。"""
|
||||
async with SessionLocal() as session:
|
||||
yield session
|
||||
@ -0,0 +1,18 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from .routers import health, clusters, faults, logs
|
||||
|
||||
app = FastAPI(title="Hadoop Fault Detecting API", version="v1")
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(health.router, prefix="/api/v1")
|
||||
app.include_router(clusters.router, prefix="/api/v1")
|
||||
app.include_router(faults.router, prefix="/api/v1")
|
||||
app.include_router(logs.router, prefix="/api/v1")
|
||||
@ -0,0 +1,4 @@
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
@ -0,0 +1,34 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String, Integer
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy import TIMESTAMP
|
||||
from . import Base
|
||||
|
||||
class Cluster(Base):
|
||||
__tablename__ = "clusters"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
uuid: Mapped[str] = mapped_column(UUID(as_uuid=False), unique=True)
|
||||
name: Mapped[str] = mapped_column(String(100), unique=True)
|
||||
type: Mapped[str] = mapped_column(String(50))
|
||||
node_count: Mapped[int] = mapped_column(Integer, default=0)
|
||||
health_status: Mapped[str] = mapped_column(String(20), default="unknown")
|
||||
description: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
config_info: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将集群对象转换为可序列化字典。"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"uuid": self.uuid,
|
||||
"name": self.name,
|
||||
"type": self.type,
|
||||
"node_count": self.node_count,
|
||||
"health_status": self.health_status,
|
||||
"description": self.description,
|
||||
"config_info": self.config_info,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
@ -0,0 +1,39 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String, Integer
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import TIMESTAMP, Text
|
||||
from . import Base
|
||||
|
||||
class ExecLog(Base):
|
||||
__tablename__ = "exec_logs"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
exec_id: Mapped[str] = mapped_column(String(32), unique=True)
|
||||
fault_id: Mapped[str] = mapped_column(String(32))
|
||||
command_type: Mapped[str] = mapped_column(String(50))
|
||||
script_path: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
command_content: Mapped[str] = mapped_column(Text)
|
||||
target_nodes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
risk_level: Mapped[str] = mapped_column(String(20), default="medium")
|
||||
execution_status: Mapped[str] = mapped_column(String(20), default="pending")
|
||||
start_time: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
||||
end_time: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
||||
duration: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
stdout_log: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
stderr_log: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
exit_code: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
operator: Mapped[str] = mapped_column(String(50), default="system")
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将执行日志转换为可序列化字典。"""
|
||||
return {
|
||||
"exec_id": self.exec_id,
|
||||
"fault_id": self.fault_id,
|
||||
"command_type": self.command_type,
|
||||
"execution_status": self.execution_status,
|
||||
"start_time": self.start_time.isoformat() if self.start_time else None,
|
||||
"end_time": self.end_time.isoformat() if self.end_time else None,
|
||||
"exit_code": self.exit_code,
|
||||
}
|
||||
@ -0,0 +1,38 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import TIMESTAMP
|
||||
from . import Base
|
||||
|
||||
class FaultRecord(Base):
|
||||
__tablename__ = "fault_records"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
fault_id: Mapped[str] = mapped_column(String(32), unique=True)
|
||||
cluster_id: Mapped[int | None] = mapped_column(nullable=True)
|
||||
fault_type: Mapped[str] = mapped_column(String(50))
|
||||
fault_level: Mapped[str] = mapped_column(String(20), default="medium")
|
||||
title: Mapped[str] = mapped_column(String(200))
|
||||
description: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
affected_nodes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
affected_clusters: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
root_cause: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
repair_suggestion: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
status: Mapped[str] = mapped_column(String(20), default="detected")
|
||||
assignee: Mapped[str | None] = mapped_column(String(50), nullable=True)
|
||||
reporter: Mapped[str] = mapped_column(String(50), default="system")
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
updated_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
resolved_at: Mapped[str | None] = mapped_column(TIMESTAMP(timezone=True), nullable=True)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将故障记录转换为可序列化字典。"""
|
||||
return {
|
||||
"fault_id": self.fault_id,
|
||||
"cluster_id": self.cluster_id,
|
||||
"fault_type": self.fault_type,
|
||||
"fault_level": self.fault_level,
|
||||
"title": self.title,
|
||||
"status": self.status,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
}
|
||||
@ -0,0 +1,33 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy import String, Boolean
|
||||
from sqlalchemy import TIMESTAMP, Text
|
||||
from . import Base
|
||||
|
||||
class SystemLog(Base):
|
||||
__tablename__ = "system_logs"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
log_id: Mapped[str] = mapped_column(String(32), unique=True)
|
||||
fault_id: Mapped[str | None] = mapped_column(String(32), nullable=True)
|
||||
cluster_id: Mapped[int | None] = mapped_column(nullable=True)
|
||||
timestamp: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
host: Mapped[str] = mapped_column(String(100))
|
||||
service: Mapped[str] = mapped_column(String(50))
|
||||
log_level: Mapped[str] = mapped_column(String(10))
|
||||
message: Mapped[str] = mapped_column(Text)
|
||||
exception: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
raw_log: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
processed: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
created_at: Mapped[str] = mapped_column(TIMESTAMP(timezone=True))
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""将系统日志转换为可序列化字典。"""
|
||||
return {
|
||||
"log_id": self.log_id,
|
||||
"cluster_id": self.cluster_id,
|
||||
"timestamp": self.timestamp.isoformat() if self.timestamp else None,
|
||||
"service": self.service,
|
||||
"log_level": self.log_level,
|
||||
"message": self.message,
|
||||
"processed": self.processed,
|
||||
}
|
||||
@ -0,0 +1,14 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from ..db import get_db
|
||||
from ..models.clusters import Cluster
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/clusters")
|
||||
async def list_clusters(db: AsyncSession = Depends(get_db)):
|
||||
"""查询集群列表。"""
|
||||
result = await db.execute(select(Cluster).limit(100))
|
||||
rows = result.scalars().all()
|
||||
return {"total": len(rows), "list": [c.to_dict() for c in rows]}
|
||||
@ -0,0 +1,14 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from ..db import get_db
|
||||
from ..models.fault_records import FaultRecord
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/faults")
|
||||
async def list_faults(db: AsyncSession = Depends(get_db)):
|
||||
"""查询故障记录。"""
|
||||
result = await db.execute(select(FaultRecord).limit(100))
|
||||
rows = result.scalars().all()
|
||||
return {"total": len(rows), "list": [f.to_dict() for f in rows]}
|
||||
@ -0,0 +1,11 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from ..db import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check(db: AsyncSession = Depends(get_db)):
|
||||
"""健康检查:测试数据库连通性。"""
|
||||
await db.execute("SELECT 1")
|
||||
return {"status": "ok"}
|
||||
@ -0,0 +1,23 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from ..db import get_db
|
||||
from ..models.system_logs import SystemLog
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/logs")
|
||||
async def list_logs(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
level: str | None = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
pageSize: int = Query(10, ge=1, le=100),
|
||||
):
|
||||
"""查询系统日志,支持按级别筛选与分页。"""
|
||||
stmt = select(SystemLog)
|
||||
if level:
|
||||
stmt = stmt.where(SystemLog.log_level == level)
|
||||
stmt = stmt.offset((page - 1) * pageSize).limit(pageSize)
|
||||
result = await db.execute(stmt)
|
||||
rows = result.scalars().all()
|
||||
return {"total": len(rows), "list": [l.to_dict() for l in rows]}
|
||||
@ -1,3 +1,6 @@
|
||||
fastapi
|
||||
uvicorn[standard]
|
||||
psycopg2-binary
|
||||
SQLAlchemy
|
||||
asyncpg
|
||||
python-dotenv
|
||||
psycopg2-binary
|
||||
|
||||
Loading…
Reference in new issue