First actually working version I guess (streaming support)

This commit is contained in:
nsde 2023-06-30 02:49:56 +02:00
parent c0a797599b
commit 23533b4aa3
5 changed files with 64 additions and 27 deletions

View file

@ -26,8 +26,8 @@ app.add_middleware(
async def startup_event():
"""Read up the API server."""
security.enable_proxy()
security.ip_protection_check()
# security.enable_proxy()
# security.ip_protection_check()
@app.get('/')
async def root():
@ -39,13 +39,4 @@ async def root():
'github': 'https://github.com/Luna-OSS'
}
async def _reverse_proxy(request: Request):
headers = {
name: value
for name, value in target_response.headers.items()
if name.lower() not in EXCLUDED_HEADERS
}
# ...
app.add_route('/{path:path}', _reverse_proxy, ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'])
app.add_route('/{path:path}', transfer.transfer_streaming_response, ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'])

View file

@ -40,6 +40,7 @@ class Proxy:
"""
return self.proxy_type# + 'h' if self.proxy_type.startswith('socks') else self.proxy_type
@property
def proxies(self):
"""Returns a dictionary of proxies, ready to be used with the requests library or httpx.
"""
@ -53,6 +54,11 @@ class Proxy:
return proxies_dict
@property
def url(self):
"""Returns the proxy URL."""
return f'{self.protocol}://{self.auth}{self.host}:{self.port}'
def __str__(self):
return f'{self.proxy_type}://{len(self.auth) * "*"}{self.host}:{self.port}'
@ -83,7 +89,7 @@ def check_proxy():
resp = httpx.get(
url='https://echo.hoppscotch.io/',
timeout=20,
proxies=active_proxy.proxies()
proxies=active_proxy.proxies
)
resp.raise_for_status()

View file

@ -1,5 +1,10 @@
"""Module for transferring requests to ClosedAI API"""
import os
import httpx
import aiohttp
import aiohttp_socks
import proxies
from dotenv import load_dotenv
@ -15,16 +20,48 @@ EXCLUDED_HEADERS = [
'connection'
]
async def stream_api_response(request, target_endpoint: str='https://api.openai.com/v1'):
async with httpx.AsyncClient(timeout=120) as client:
async with client.stream(
method=request.method,
url=f'{target_endpoint}/{request.url.path}',
headers={
'Authorization': 'Bearer ' + os.getenv('CLOSEDAI_KEY'),
'Content-Type': 'application/json'
},
data=await request.body(),
) as target_response:
target_response.raise_for_status()
proxy = proxies.active_proxy
proxy_connector = aiohttp_socks.ProxyConnector(
proxy_type=aiohttp_socks.ProxyType.SOCKS5,
host=proxy.ip_address,
port=proxy.port,
rdns=False,
username=proxy.username,
password=proxy.password
)
async def transfer_streaming_response(incoming_request, target_endpoint: str='https://api.openai.com/v1'):
"""Transfer a streaming response from the incoming request to the target endpoint"""
incoming_json_payload = await incoming_request.json()
async def receive_target_stream():
connector = aiohttp_socks.ProxyConnector(
proxy_type=aiohttp_socks.ProxyType.SOCKS5,
host=proxy.ip_address,
port=proxy.port,
rdns=False,
username=proxy.username,
password=proxy.password
)
async with aiohttp.ClientSession(
connector=connector,
timeout=aiohttp.ClientTimeout(total=120),
raise_for_status=True
) as session:
async with session.request(
method=incoming_request.method,
url=f'{target_endpoint}/{incoming_request.url.path}',
json=incoming_json_payload,
headers={
'Content-Type': 'application/json',
'Authorization': f'Bearer {os.getenv("CLOSEDAI_KEY")}'
}
) as response:
async for chunk in response.content.iter_any():
chunk = f'{chunk.decode("utf8")}\n\n'
yield chunk
return StreamingResponse(
content=receive_target_stream()
)

View file

@ -5,3 +5,5 @@ python-dotenv
rich
starlette
win_inet_pton
aiohttp-socks
aiohttp

View file

@ -35,12 +35,13 @@ def test_api(model: str=MODEL, messages: List[dict]=None) -> dict:
json_data = {
'model': model,
'messages': messages or MESSAGES,
'stream': True,
}
response = httpx.post(f'{ENDPOINT}/chat/completions', headers=headers, json=json_data, timeout=20)
response.raise_for_status()
return response.json()['choices'][0]
return response
def test_library():
"""Tests if the endpoint is working with the "Closed"AI library."""