Work in progess

This commit is contained in:
nsde 2023-08-03 03:50:04 +02:00
parent 08d31d7ad1
commit e76d675dc6
4 changed files with 54 additions and 42 deletions

View file

@ -1,6 +1,5 @@
import os import os
import bson import time
import datetime
from dotenv import load_dotenv from dotenv import load_dotenv
from motor.motor_asyncio import AsyncIOMotorClient from motor.motor_asyncio import AsyncIOMotorClient
@ -22,7 +21,7 @@ async def log_api_request(user, request, target_url):
model = payload['model'] model = payload['model']
new_log_item = { new_log_item = {
'timestamp': bson.timestamp.Timestamp(datetime.datetime.now(), 0), 'timestamp': time.time(),
'method': request.method, 'method': request.method,
'path': request.url.path, 'path': request.url.path,
'user_id': user['_id'], 'user_id': user['_id'],

View file

@ -1,5 +1,7 @@
import os import os
import requests import aiohttp
import asyncio
import aiohttp_socks
from dotenv import load_dotenv from dotenv import load_dotenv
@ -9,7 +11,7 @@ from helpers import exceptions
load_dotenv() load_dotenv()
async def stream(request: dict): async def stream(request: dict, demo_mode: bool=False):
headers = { headers = {
'Content-Type': 'application/json' 'Content-Type': 'application/json'
} }
@ -18,27 +20,42 @@ async def stream(request: dict):
headers[k] = v headers[k] = v
for _ in range(3): for _ in range(3):
response = requests.request( async with aiohttp.ClientSession(connector=proxies.default_proxy.connector) as session:
method=request.get('method', 'POST'), async with session.get(
url=request['url'], # 'GET',
json=request.get('payload', {}), 'https://checkip.amazonaws.com/'
headers=headers, ) as response:
timeout=int(os.getenv('TRANSFER_TIMEOUT', '120')), print(response.content)
proxies=proxies.default_proxy.urls, print(type(response.content))
stream=True
)
try: # html = await response.text()
response.raise_for_status() # print(html)
except Exception as exc:
if str(exc) == '429 Client Error: Too Many Requests for url: https://api.openai.com/v1/chat/completions': # async with session.get(
continue # method='GET',
else: # url='https://checkip.amazonaws.com',
break # method=request.get('method', 'POST'),
# url=request['url'],
# json=request.get('payload', {}),
# headers=headers,
# timeout=aiohttp.ClientTimeout(total=float(os.getenv('TRANSFER_TIMEOUT', '120'))),
# ) as response:
# try:
# await response.raise_for_status()
# except Exception as exc:
# if 'Too Many Requests' in str(exc):
# continue
# else:
# break
async for chunk in response.content.iter_chunks():
# chunk = f'{chunk.decode("utf8")}\n\n'
if demo_mode:
print(chunk)
for chunk in response.iter_lines():
chunk = f'{chunk.decode("utf8")}\n\n'
yield chunk yield chunk
if __name__ == '__main__': if __name__ == '__main__':
pass asyncio.run(stream({'method': 'GET', 'url': 'https://checkip.amazonaws.com'}, True))

View file

@ -52,7 +52,8 @@ class Proxy:
print(f'Detected IP: {detected_ip}') print(f'Detected IP: {detected_ip}')
return detected_ip.strip() return detected_ip.strip()
async def get_connector(self): @property
def connector(self):
proxy_types = { proxy_types = {
'http': aiohttp_socks.ProxyType.HTTP, 'http': aiohttp_socks.ProxyType.HTTP,
'https': aiohttp_socks.ProxyType.HTTP, 'https': aiohttp_socks.ProxyType.HTTP,
@ -60,7 +61,7 @@ class Proxy:
'socks5': aiohttp_socks.ProxyType.SOCKS5 'socks5': aiohttp_socks.ProxyType.SOCKS5
} }
connector = aiohttp_socks.ProxyConnector( return aiohttp_socks.ProxyConnector(
proxy_type=proxy_types[self.proxy_type], proxy_type=proxy_types[self.proxy_type],
host=self.host, host=self.host,
port=self.port, port=self.port,
@ -69,10 +70,6 @@ class Proxy:
password=self.password password=self.password
) )
await self.initialize_connector(connector)
return connector
default_proxy = Proxy( default_proxy = Proxy(
proxy_type=os.getenv('PROXY_TYPE', 'http'), proxy_type=os.getenv('PROXY_TYPE', 'http'),
host_or_ip=os.getenv('PROXY_HOST', '127.0.0.1'), host_or_ip=os.getenv('PROXY_HOST', '127.0.0.1'),
@ -81,16 +78,6 @@ default_proxy = Proxy(
password=os.getenv('PROXY_PASS') password=os.getenv('PROXY_PASS')
) )
def test_httpx():
import httpx
print(default_proxy.proxies)
with httpx.Client(
# proxies=default_proxy.proxies
) as client:
return client.get('https://checkip.amazonaws.com').text.strip()
def test_httpx_workaround(): def test_httpx_workaround():
import httpx import httpx
@ -113,6 +100,13 @@ def test_requests():
proxies=default_proxy.urls proxies=default_proxy.urls
).text.strip() ).text.strip()
async def test_aiohttp_socks():
async with aiohttp.ClientSession(connector=default_proxy.connector) as session:
async with session.get('https://checkip.amazonaws.com/') as response:
html = await response.text()
return html.strip()
if __name__ == '__main__': if __name__ == '__main__':
print(test_httpx()) # print(test_httpx())
# print(test_requests()) # print(test_requests())
print(asyncio.run(test_aiohttp_socks()))

View file

@ -89,4 +89,6 @@ async def handle(incoming_request):
print(target_request['url']) print(target_request['url'])
return errors.error(500, 'Sorry, the API is currenly under maintainance.', 'Please try again later.')
return starlette.responses.StreamingResponse(netclient.stream(target_request)) return starlette.responses.StreamingResponse(netclient.stream(target_request))