some thingies

This commit is contained in:
nsde 2023-08-05 02:30:42 +02:00
parent bf7a6b565a
commit 160ceb5efd
7 changed files with 155 additions and 104 deletions

View file

@ -1,3 +1,4 @@
import json
import string
import random
import asyncio
@ -26,11 +27,14 @@ def create_chat_chunk(chat_id: str, model: str, content=None) -> dict:
'content': content
}
if not isinstance(content, str):
if content == CompletionStart:
delta = {
'role': 'assistant'
}
if content == CompletionStop:
delta = {}
chunk = {
'id': chat_id,
'object': 'chat.completion.chunk',
@ -40,13 +44,12 @@ def create_chat_chunk(chat_id: str, model: str, content=None) -> dict:
{
'delta': delta,
'index': 0,
'finish_reason': None if not(isinstance(content, str)) else 'stop'
'finish_reason': 'stop' if content == CompletionStop else None
}
],
}
print(chunk)
return chunk
return f'data: {json.dumps(chunk)}\n\n'
if __name__ == '__main__':
demo_chat_id = asyncio.run(create_chat_id())

View file

@ -2,7 +2,19 @@ import base64
import asyncio
async def get_ip(request) -> str:
return request.client.host
xff = None
if request.headers.get('x-forwarded-for'):
xff, *_ = request.headers['x-forwarded-for'].split(', ')
possible_ips = [
xff,
request.headers.get('cf-connecting-ip'),
request.client.host
]
detected_ip = next((i for i in possible_ips if i), None)
return detected_ip
async def add_proxy_auth_to_headers(username: str, password: str, headers: dict) -> dict:
proxy_auth = base64.b64encode(f'{username}:{password}'.encode()).decode()

View file

@ -4,11 +4,11 @@ import asyncio
import chat_providers
provider_modules = [
chat_providers.twa,
# chat_providers.twa,
# chat_providers.quantum,
# chat_providers.churchless,
# chat_providers.closed,
# chat_providers.closed4
chat_providers.churchless,
chat_providers.closed,
chat_providers.closed4
]
def _get_module_name(module) -> str:
@ -29,6 +29,9 @@ async def balance_chat_request(payload: dict) -> dict:
providers_available.append(provider_module)
if not providers_available:
raise NotImplementedError('This model does not exist.')
provider = random.choice(providers_available)
target = provider.chat_completion(**payload)
target['module'] = _get_module_name(provider)

View file

@ -2,9 +2,9 @@
import fastapi
from fastapi.middleware.cors import CORSMiddleware
from rich import print
from dotenv import load_dotenv
from fastapi.middleware.cors import CORSMiddleware
import core
import transfer

View file

@ -7,6 +7,7 @@ import asyncio
import aiohttp
import aiohttp_socks
from rich import print
from dotenv import load_dotenv
load_dotenv()
@ -71,7 +72,8 @@ class Proxy:
proxies_in_files = []
for proxy_type in ['http', 'socks4', 'socks5']:
try:
for proxy_type in ['http', 'socks4', 'socks5']:
with open(f'secret/proxies/{proxy_type}.txt') as f:
for line in f.readlines():
if line.strip() and not line.strip().startswith('#'):
@ -79,6 +81,8 @@ for proxy_type in ['http', 'socks4', 'socks5']:
line = line.split('#')[0]
proxies_in_files.append(f'{proxy_type}://{line.strip()}')
except FileNotFoundError:
pass
class ProxyChain:
def __init__(self):
@ -87,7 +91,11 @@ class ProxyChain:
self.get_random = Proxy(url=random_proxy)
self.connector = aiohttp_socks.ChainProxyConnector.from_urls(proxies_in_files)
default_chain = ProxyChain()
try:
default_chain = ProxyChain()
random_proxy = ProxyChain().get_random
except IndexError:
pass
default_proxy = Proxy(
proxy_type=os.getenv('PROXY_TYPE', 'http'),
@ -97,7 +105,6 @@ default_proxy = Proxy(
password=os.getenv('PROXY_PASS')
)
random_proxy = ProxyChain().get_random
def test_httpx_workaround():
import httpx
@ -129,24 +136,11 @@ async def test_aiohttp_socks():
async def streaming_aiohttp_socks():
async with aiohttp.ClientSession(connector=default_proxy.connector) as session:
async with session.post(
'https://free.churchless.tech/v1/chat/completions',
json={
"model": "gpt-3.5-turbo",
"messages": [
{
"role": "user",
"content": "Hi"
}
],
"stream": True
},
# headers={
# 'Authorization': 'Bearer MyDiscord'
# }
) as response:
html = await response.text()
return html.strip()
async with session.get('https://httpbin.org/get', headers={
'Authorization': 'x'
}) as response:
json = await response.json()
return json
async def text_httpx_socks():
import httpx
@ -163,5 +157,5 @@ if __name__ == '__main__':
# print(test_httpx())
# print(test_requests())
# print(asyncio.run(test_aiohttp_socks()))
# print(asyncio.run(streaming_aiohttp_socks()))
print(asyncio.run(text_httpx_socks()))
print(asyncio.run(streaming_aiohttp_socks()))
# print(asyncio.run(text_httpx_socks()))

View file

@ -7,6 +7,7 @@ import starlette
from rich import print
from dotenv import load_dotenv
from python_socks._errors import ProxyError
import proxies
import load_balancing
@ -38,6 +39,7 @@ async def stream(
input_tokens: int=0,
incoming_request: starlette.requests.Request=None,
):
payload = payload or DEMO_PAYLOAD
is_chat = False
@ -46,31 +48,38 @@ async def stream(
chat_id = await chat.create_chat_id()
model = payload['model']
chat_chunk = chat.create_chat_chunk(
yield chat.create_chat_chunk(
chat_id=chat_id,
model=model,
content=chat.CompletionStart
)
data = json.dumps(chat_chunk)
chunk = f'data: {data}'
yield chunk
yield chat.create_chat_chunk(
chat_id=chat_id,
model=model,
content=None
)
for _ in range(5):
if is_chat:
target_request = await load_balancing.balance_chat_request(payload)
else:
target_request = await load_balancing.balance_organic_request(payload)
headers = {
'Content-Type': 'application/json'
}
if is_chat:
target_request = await load_balancing.balance_chat_request(payload)
else:
target_request = await load_balancing.balance_organic_request({
'path': path,
'payload': payload,
'headers': headers
})
for k, v in target_request.get('headers', {}).items():
headers[k] = v
async with aiohttp.ClientSession(connector=proxies.random_proxy.connector) as session:
async with aiohttp.ClientSession(connector=proxies.default_proxy.connector) as session:
try:
async with session.request(
method=target_request.get('method', 'POST'),
url=target_request['url'],
@ -85,11 +94,13 @@ async def stream(
timeout=aiohttp.ClientTimeout(total=float(os.getenv('TRANSFER_TIMEOUT', '120'))),
) as response:
try:
await response.raise_for_status()
response.raise_for_status()
except Exception as exc:
if 'Too Many Requests' in str(exc):
print(429)
continue
# if 'Too Many Requests' in str(exc):
if user and incoming_request:
await logs.log_api_request(
@ -103,6 +114,29 @@ async def stream(
'$inc': {'credits': -credits_cost}
})
try:
async for chunk in response.content.iter_any():
chunk = f'{chunk.decode("utf8")}\n\n'
if chunk.strip():
if is_chat:
if target_request['module'] == 'twa':
data = json.loads(chunk.split('data: ')[1])
if data.get('text'):
chunk = chat.create_chat_chunk(
chat_id=chat_id,
model=model,
content=['text']
)
yield chunk
except Exception as exc:
if 'Connection closed' in str(exc):
print('connection closed')
continue
if not demo_mode:
ip_address = await network.get_ip(incoming_request)
@ -115,26 +149,13 @@ async def stream(
await stats.add_model(model)
await stats.add_tokens(input_tokens, model)
async for chunk in response.content.iter_any():
chunk = f'{chunk.decode("utf8")}\n\n'
if chunk.strip():
if is_chat:
if target_request['module'] == 'twa':
data = json.loads(chunk.split('data: ')[1])
if data.get('text'):
chat_chunk = chat.create_chat_chunk(
chat_id=chat_id,
model=model,
content=['text']
)
data = json.dumps(chat_chunk)
chunk = f'data: {data}'
yield chunk
break
except ProxyError:
print('proxy error')
continue
print(3)
if is_chat:
chat_chunk = chat.create_chat_chunk(
chat_id=chat_id,
@ -143,8 +164,7 @@ async def stream(
)
data = json.dumps(chat_chunk)
yield f'data: {data}'
yield 'data: [DONE]'
yield 'data: [DONE]\n\n'
if __name__ == '__main__':
asyncio.run(stream())

View file

@ -86,8 +86,27 @@ def test_all():
# print(test_api())
print(test_library())
def test_api(model: str=MODEL, messages: List[dict]=None) -> dict:
"""Tests an API api_endpoint."""
headers = {
'Authorization': 'Bearer ' + api_key
}
response = httpx.get(
url=f'{api_endpoint}/v1/usage',
headers=headers,
timeout=20
)
response.raise_for_status()
return response.text
if __name__ == '__main__':
# api_endpoint = 'https://api.nova-oss.com'
api_endpoint = 'http://localhost:2332'
api_endpoint = 'https://alpha-api.nova-oss.com'
api_key = os.getenv('TEST_NOVA_KEY')
test_all()
# test_all()
print(test_api())