added_ai_proxy

This commit is contained in:
lusixing
2026-01-26 15:57:52 -08:00
parent 22117cf9e8
commit e3fa788318
5 changed files with 150 additions and 2 deletions

View File

@@ -47,4 +47,22 @@ async def init_db():
)
session.add(admin_user)
await session.commit()
print("✅ Default admin user created (username: admin, password: admin123)")
print("✅ Default admin user created (username: admin, password: admin123)")
# 检查是否已存在 Gemini 配置
result = await session.execute(
select(models.AIConfig).where(models.AIConfig.provider_name == "gemini")
)
existing_gemini = result.scalars().first()
if not existing_gemini:
gemini_config = models.AIConfig(
provider_name="gemini",
api_key=os.getenv("GEMINI_API_KEY", "your-gemini-api-key"),
api_url="https://generativelanguage.googleapis.com/v1beta/openai/chat/completions",
default_model="gemini-3-flash-preview",
is_active=True
)
session.add(gemini_config)
await session.commit()
print("✅ Default Gemini AI configuration created")

View File

@@ -6,6 +6,7 @@ from . import models, schemas, auth, database
from passlib.context import CryptContext
from sqlalchemy.exc import IntegrityError
from contextlib import asynccontextmanager
import httpx
@asynccontextmanager
async def lifespan(app: FastAPI):
@@ -212,3 +213,53 @@ async def test1():
c = a+b
return {"msg": f"this is a msg {c}"}
@app.post("/ai/proxy", response_model=schemas.AIResponse)
async def ai_proxy(
ai_request: schemas.AIRequest,
current_user: models.User = Depends(auth.get_current_user),
db: AsyncSession = Depends(database.get_db)
):
"""
Proxy relay for AI requests.
Fetches AI configuration from the database.
"""
# Fetch active AI config
result = await db.execute(
select(models.AIConfig).where(models.AIConfig.is_active == True)
)
config = result.scalars().first()
if not config:
raise HTTPException(status_code=500, detail="AI configuration not found")
headers = {
"Authorization": f"Bearer {config.api_key}",
"Content-Type": "application/json"
}
# Prepare payload
payload = ai_request.model_dump()
payload["model"] = config.default_model
async with httpx.AsyncClient() as client:
try:
response = await client.post(
config.api_url,
json=payload,
headers=headers,
timeout=30.0
)
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
raise HTTPException(
status_code=e.response.status_code,
detail=f"AI provider returned an error: {e.response.text}"
)
except httpx.RequestError as e:
raise HTTPException(
status_code=500,
detail=f"An error occurred while requesting AI provider: {str(e)}"
)

View File

@@ -59,4 +59,21 @@ class AssetAssign(BaseModel):
heir_name: str
class DeclareGuale(BaseModel):
username: str
username: str
# AI Proxy Schemas
class AIMessage(BaseModel):
role: str
content: str
class AIRequest(BaseModel):
messages: List[AIMessage]
model: Optional[str] = None
class AIResponse(BaseModel):
id: str
object: str
created: int
model: str
choices: List[dict]
usage: dict