mirror of
https://github.com/NovaOSS/nova-api.git
synced 2024-11-25 20:33:58 +01:00
First actually working version I guess (streaming support)
This commit is contained in:
parent
c0a797599b
commit
23533b4aa3
15
api/main.py
15
api/main.py
|
@ -26,8 +26,8 @@ app.add_middleware(
|
||||||
async def startup_event():
|
async def startup_event():
|
||||||
"""Read up the API server."""
|
"""Read up the API server."""
|
||||||
|
|
||||||
security.enable_proxy()
|
# security.enable_proxy()
|
||||||
security.ip_protection_check()
|
# security.ip_protection_check()
|
||||||
|
|
||||||
@app.get('/')
|
@app.get('/')
|
||||||
async def root():
|
async def root():
|
||||||
|
@ -39,13 +39,4 @@ async def root():
|
||||||
'github': 'https://github.com/Luna-OSS'
|
'github': 'https://github.com/Luna-OSS'
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _reverse_proxy(request: Request):
|
app.add_route('/{path:path}', transfer.transfer_streaming_response, ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'])
|
||||||
headers = {
|
|
||||||
name: value
|
|
||||||
for name, value in target_response.headers.items()
|
|
||||||
if name.lower() not in EXCLUDED_HEADERS
|
|
||||||
}
|
|
||||||
|
|
||||||
# ...
|
|
||||||
|
|
||||||
app.add_route('/{path:path}', _reverse_proxy, ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'])
|
|
||||||
|
|
|
@ -40,6 +40,7 @@ class Proxy:
|
||||||
"""
|
"""
|
||||||
return self.proxy_type# + 'h' if self.proxy_type.startswith('socks') else self.proxy_type
|
return self.proxy_type# + 'h' if self.proxy_type.startswith('socks') else self.proxy_type
|
||||||
|
|
||||||
|
@property
|
||||||
def proxies(self):
|
def proxies(self):
|
||||||
"""Returns a dictionary of proxies, ready to be used with the requests library or httpx.
|
"""Returns a dictionary of proxies, ready to be used with the requests library or httpx.
|
||||||
"""
|
"""
|
||||||
|
@ -53,6 +54,11 @@ class Proxy:
|
||||||
|
|
||||||
return proxies_dict
|
return proxies_dict
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self):
|
||||||
|
"""Returns the proxy URL."""
|
||||||
|
return f'{self.protocol}://{self.auth}{self.host}:{self.port}'
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.proxy_type}://{len(self.auth) * "*"}{self.host}:{self.port}'
|
return f'{self.proxy_type}://{len(self.auth) * "*"}{self.host}:{self.port}'
|
||||||
|
|
||||||
|
@ -83,7 +89,7 @@ def check_proxy():
|
||||||
resp = httpx.get(
|
resp = httpx.get(
|
||||||
url='https://echo.hoppscotch.io/',
|
url='https://echo.hoppscotch.io/',
|
||||||
timeout=20,
|
timeout=20,
|
||||||
proxies=active_proxy.proxies()
|
proxies=active_proxy.proxies
|
||||||
)
|
)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
|
"""Module for transferring requests to ClosedAI API"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import httpx
|
import aiohttp
|
||||||
|
import aiohttp_socks
|
||||||
|
|
||||||
|
import proxies
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
@ -15,16 +20,48 @@ EXCLUDED_HEADERS = [
|
||||||
'connection'
|
'connection'
|
||||||
]
|
]
|
||||||
|
|
||||||
async def stream_api_response(request, target_endpoint: str='https://api.openai.com/v1'):
|
proxy = proxies.active_proxy
|
||||||
async with httpx.AsyncClient(timeout=120) as client:
|
proxy_connector = aiohttp_socks.ProxyConnector(
|
||||||
async with client.stream(
|
proxy_type=aiohttp_socks.ProxyType.SOCKS5,
|
||||||
method=request.method,
|
host=proxy.ip_address,
|
||||||
url=f'{target_endpoint}/{request.url.path}',
|
port=proxy.port,
|
||||||
headers={
|
rdns=False,
|
||||||
'Authorization': 'Bearer ' + os.getenv('CLOSEDAI_KEY'),
|
username=proxy.username,
|
||||||
'Content-Type': 'application/json'
|
password=proxy.password
|
||||||
},
|
)
|
||||||
data=await request.body(),
|
|
||||||
) as target_response:
|
|
||||||
target_response.raise_for_status()
|
|
||||||
|
|
||||||
|
async def transfer_streaming_response(incoming_request, target_endpoint: str='https://api.openai.com/v1'):
|
||||||
|
"""Transfer a streaming response from the incoming request to the target endpoint"""
|
||||||
|
|
||||||
|
incoming_json_payload = await incoming_request.json()
|
||||||
|
|
||||||
|
async def receive_target_stream():
|
||||||
|
connector = aiohttp_socks.ProxyConnector(
|
||||||
|
proxy_type=aiohttp_socks.ProxyType.SOCKS5,
|
||||||
|
host=proxy.ip_address,
|
||||||
|
port=proxy.port,
|
||||||
|
rdns=False,
|
||||||
|
username=proxy.username,
|
||||||
|
password=proxy.password
|
||||||
|
)
|
||||||
|
async with aiohttp.ClientSession(
|
||||||
|
connector=connector,
|
||||||
|
timeout=aiohttp.ClientTimeout(total=120),
|
||||||
|
raise_for_status=True
|
||||||
|
) as session:
|
||||||
|
async with session.request(
|
||||||
|
method=incoming_request.method,
|
||||||
|
url=f'{target_endpoint}/{incoming_request.url.path}',
|
||||||
|
json=incoming_json_payload,
|
||||||
|
headers={
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': f'Bearer {os.getenv("CLOSEDAI_KEY")}'
|
||||||
|
}
|
||||||
|
) as response:
|
||||||
|
async for chunk in response.content.iter_any():
|
||||||
|
chunk = f'{chunk.decode("utf8")}\n\n'
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
content=receive_target_stream()
|
||||||
|
)
|
||||||
|
|
|
@ -5,3 +5,5 @@ python-dotenv
|
||||||
rich
|
rich
|
||||||
starlette
|
starlette
|
||||||
win_inet_pton
|
win_inet_pton
|
||||||
|
aiohttp-socks
|
||||||
|
aiohttp
|
||||||
|
|
|
@ -35,12 +35,13 @@ def test_api(model: str=MODEL, messages: List[dict]=None) -> dict:
|
||||||
json_data = {
|
json_data = {
|
||||||
'model': model,
|
'model': model,
|
||||||
'messages': messages or MESSAGES,
|
'messages': messages or MESSAGES,
|
||||||
|
'stream': True,
|
||||||
}
|
}
|
||||||
|
|
||||||
response = httpx.post(f'{ENDPOINT}/chat/completions', headers=headers, json=json_data, timeout=20)
|
response = httpx.post(f'{ENDPOINT}/chat/completions', headers=headers, json=json_data, timeout=20)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
return response.json()['choices'][0]
|
return response
|
||||||
|
|
||||||
def test_library():
|
def test_library():
|
||||||
"""Tests if the endpoint is working with the "Closed"AI library."""
|
"""Tests if the endpoint is working with the "Closed"AI library."""
|
||||||
|
|
Loading…
Reference in a new issue