From e3fa788318834d10a1416d59d65a68eb9c4d3b0a Mon Sep 17 00:00:00 2001 From: lusixing <32328454+lusixing@users.noreply.github.com> Date: Mon, 26 Jan 2026 15:57:52 -0800 Subject: [PATCH] added_ai_proxy --- app/database.py | 20 +++++++++++++- app/main.py | 51 ++++++++++++++++++++++++++++++++++++ app/schemas.py | 19 +++++++++++++- docker-compose.yml | 1 + test/test_ai_proxy.py | 61 +++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 150 insertions(+), 2 deletions(-) create mode 100644 test/test_ai_proxy.py diff --git a/app/database.py b/app/database.py index ce12be0..aaaff19 100644 --- a/app/database.py +++ b/app/database.py @@ -47,4 +47,22 @@ async def init_db(): ) session.add(admin_user) await session.commit() - print("✅ Default admin user created (username: admin, password: admin123)") \ No newline at end of file + print("✅ Default admin user created (username: admin, password: admin123)") + + # 检查是否已存在 Gemini 配置 + result = await session.execute( + select(models.AIConfig).where(models.AIConfig.provider_name == "gemini") + ) + existing_gemini = result.scalars().first() + + if not existing_gemini: + gemini_config = models.AIConfig( + provider_name="gemini", + api_key=os.getenv("GEMINI_API_KEY", "your-gemini-api-key"), + api_url="https://generativelanguage.googleapis.com/v1beta/openai/chat/completions", + default_model="gemini-3-flash-preview", + is_active=True + ) + session.add(gemini_config) + await session.commit() + print("✅ Default Gemini AI configuration created") \ No newline at end of file diff --git a/app/main.py b/app/main.py index 42de947..1fd127c 100644 --- a/app/main.py +++ b/app/main.py @@ -6,6 +6,7 @@ from . import models, schemas, auth, database from passlib.context import CryptContext from sqlalchemy.exc import IntegrityError from contextlib import asynccontextmanager +import httpx @asynccontextmanager async def lifespan(app: FastAPI): @@ -212,3 +213,53 @@ async def test1(): c = a+b return {"msg": f"this is a msg {c}"} + +@app.post("/ai/proxy", response_model=schemas.AIResponse) +async def ai_proxy( + ai_request: schemas.AIRequest, + current_user: models.User = Depends(auth.get_current_user), + db: AsyncSession = Depends(database.get_db) +): + """ + Proxy relay for AI requests. + Fetches AI configuration from the database. + """ + # Fetch active AI config + result = await db.execute( + select(models.AIConfig).where(models.AIConfig.is_active == True) + ) + config = result.scalars().first() + + if not config: + raise HTTPException(status_code=500, detail="AI configuration not found") + + headers = { + "Authorization": f"Bearer {config.api_key}", + "Content-Type": "application/json" + } + + # Prepare payload + payload = ai_request.model_dump() + payload["model"] = config.default_model + + async with httpx.AsyncClient() as client: + try: + response = await client.post( + config.api_url, + json=payload, + headers=headers, + timeout=30.0 + ) + response.raise_for_status() + return response.json() + except httpx.HTTPStatusError as e: + raise HTTPException( + status_code=e.response.status_code, + detail=f"AI provider returned an error: {e.response.text}" + ) + except httpx.RequestError as e: + raise HTTPException( + status_code=500, + detail=f"An error occurred while requesting AI provider: {str(e)}" + ) + diff --git a/app/schemas.py b/app/schemas.py index 394e111..2d3b854 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -59,4 +59,21 @@ class AssetAssign(BaseModel): heir_name: str class DeclareGuale(BaseModel): - username: str \ No newline at end of file + username: str + +# AI Proxy Schemas +class AIMessage(BaseModel): + role: str + content: str + +class AIRequest(BaseModel): + messages: List[AIMessage] + model: Optional[str] = None + +class AIResponse(BaseModel): + id: str + object: str + created: int + model: str + choices: List[dict] + usage: dict \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index fb020bd..12b21ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,5 +24,6 @@ services: - "5678:5678" # 暴露调试端口 environment: - DATABASE_URL=postgresql+asyncpg://user:password@db:5432/fastapi_db + - GEMINI_API_KEY=key_here depends_on: - db \ No newline at end of file diff --git a/test/test_ai_proxy.py b/test/test_ai_proxy.py new file mode 100644 index 0000000..d728ddc --- /dev/null +++ b/test/test_ai_proxy.py @@ -0,0 +1,61 @@ +import httpx +import asyncio +import time + +# Testing against the running service on localhost +BASE_URL = "http://localhost:8000" + +async def test_ai_proxy_integration(): + async with httpx.AsyncClient(base_url=BASE_URL, timeout=30.0) as client: + print("--- Starting AI Proxy Integration Test ---") + + # 1. Register a new user + username = f"user_{int(time.time())}" + print(f"1. Registering user: {username}") + reg_res = await client.post("/register", json={ + "username": username, + "password": "testpassword" + }) + if reg_res.status_code != 200: + print(f"Registration failed: {reg_res.text}") + return + + # 2. Login to get token + print("2. Logging in...") + login_res = await client.post("/token", json={ + "username": username, + "password": "testpassword" + }) + if login_res.status_code != 200: + print(f"Login failed: {login_res.text}") + return + token = login_res.json()["access_token"] + + # 3. Request AI Proxy + print("3. Sending AI Proxy request...") + headers = {"Authorization": f"Bearer {token}"} + ai_request = { + "messages": [ + {"role": "user", "content": "Tell me a joke."} + ], + "model": "some-model" + } + + try: + response = await client.post("/ai/proxy", json=ai_request, headers=headers) + print(f"Response Status Code: {response.status_code}") + print(f"Response Content: {response.text[:200]}...") # Print first 200 chars + + if response.status_code == 200: + print("✅ Success: AI Proxy returned 200 OK") + elif response.status_code in [400, 401]: + print("ℹ️ Proxy worked, but AI provider returned error (likely invalid/missing API key)") + else: + print(f"❌ Unexpected status code: {response.status_code}") + except Exception as e: + print(f"❌ Request failed: {str(e)}") + + print("--- Test Completed ---") + +if __name__ == "__main__": + asyncio.run(test_ai_proxy_integration())