diff --git a/api/main.py b/api/main.py index c50a0a0..ed04c66 100644 --- a/api/main.py +++ b/api/main.py @@ -1,6 +1,7 @@ import os import httpx import fastapi +from keys import Keys from starlette.requests import Request from starlette.responses import StreamingResponse @@ -31,33 +32,43 @@ async def startup_event(): security.enable_proxy() security.ip_protection_check() + # Setup key cache + Keys() + @app.get('/') async def root(): """Returns the root endpoint.""" return { 'status': 'ok', - 'discord': 'https://discord.gg/mX9BYdFeQF', + 'discord': 'https://discord.gg/85gdcd57Xr', 'github': 'https://github.com/Luna-OSS' } async def _reverse_proxy(request: Request): target_url = f'https://api.openai.com/v1/{request.url.path}' - + key = Keys.get(request.body()['model']) + if not key: + return fastapi.responses.JSONResponse( + status_code=400, + content={ + 'error': 'No API Key for model given, please try again with a valid model.' + } + ) request_to_api = target_api_client.build_request( method=request.method, url=target_url, headers={ - 'Authorization': 'Bearer ' + os.getenv('OPENAI_KEY'), + 'Authorization': 'Bearer ' + key, 'Content-Type': 'application/json' }, content=await request.body(), ) api_response = await target_api_client.send(request_to_api, stream=True) - + print(f'[{request.method}] {request.url.path} {api_response.status_code}') - + Keys(key).unlock() return StreamingResponse( api_response.aiter_raw(), status_code=api_response.status_code,