I think I fixed the errors

This commit is contained in:
nsde 2023-08-23 23:26:43 +02:00
parent a7b2ce7aa5
commit 110f6a2acd

View file

@ -106,6 +106,21 @@ async def stream(
# We haven't done any requests as of right now, everything until now was just preparation # We haven't done any requests as of right now, everything until now was just preparation
# Here, we process the request # Here, we process the request
async with aiohttp.ClientSession(connector=proxies.get_proxy().connector) as session: async with aiohttp.ClientSession(connector=proxies.get_proxy().connector) as session:
try:
async with session.get(
url='https://checkip.amazonaws.com',
timeout=aiohttp.ClientTimeout(
connect=3,
total=float(os.getenv('TRANSFER_TIMEOUT', '5'))
)
) as response:
for actual_ip in os.getenv('ACTUAL_IPS', '').split(' '):
if actual_ip in await response.text():
raise ValueError(f'Proxy {response.text()} is transparent!')
except Exception as exc:
continue
try: try:
async with session.request( async with session.request(
method=target_request.get('method', 'POST'), method=target_request.get('method', 'POST'),
@ -120,6 +135,9 @@ async def stream(
total=float(os.getenv('TRANSFER_TIMEOUT', '120')) total=float(os.getenv('TRANSFER_TIMEOUT', '120'))
), ),
) as response: ) as response:
if response.status == 429:
continue
if response.content_type == 'application/json': if response.content_type == 'application/json':
data = await response.json() data = await response.json()
@ -144,14 +162,21 @@ async def stream(
model=model, model=model,
target_request=target_request target_request=target_request
): ):
print(f'[STREAM] {chunk}')
yield chunk yield chunk
break break
except ProxyError as exc: except ProxyError as exc:
print('[!] Proxy error:', exc) print('[!] aiohttp came up with a dumb excuse to not work again ("pRoXy ErRor")')
continue continue
except ConnectionResetError as exc:
print('[!] aiohttp came up with a dumb excuse to not work again ("cOnNeCtIoN rEsEt")')
continue
print(f'[STREAM] {json_response}')
if is_chat and is_stream: if is_chat and is_stream:
yield await chat.create_chat_chunk(chat_id=chat_id, model=model, content=chat.CompletionStop) yield await chat.create_chat_chunk(chat_id=chat_id, model=model, content=chat.CompletionStop)
yield 'data: [DONE]\n\n' yield 'data: [DONE]\n\n'