Compare commits

..

2 commits

Author SHA1 Message Date
nsde 21331874db We're FULLY open source on GitHub now! 2023-10-05 15:06:33 +02:00
nsde 6ef8441681 Improved errors, checking and fixed ratelimit retrying 2023-10-05 14:17:53 +02:00
12 changed files with 241 additions and 39 deletions

3
.gitignore vendored
View file

@ -22,9 +22,6 @@ last_update.txt
.log
*.log.*
providers/*
providers/
secret/*
secret/
/secret

View file

@ -12,7 +12,7 @@ class KeyManager:
self.conn = AsyncIOMotorClient(os.environ['MONGO_URI'])
async def _get_collection(self, collection_name: str):
return self.conn[os.getenv('MONGO_NAME', 'nova-test')][collection_name]
return self.conn['nova-core'][collection_name]
async def add_key(self, provider: str, key: str, source: str='?'):
db = await self._get_collection('providerkeys')
@ -36,7 +36,7 @@ class KeyManager:
})
if key is None:
return ValueError('No keys available for this provider!')
return '--NO_KEY--'
return key['key']
@ -87,4 +87,4 @@ class KeyManager:
manager = KeyManager()
if __name__ == '__main__':
asyncio.run(manager.delete_empty_keys())
asyncio.run(manager.import_all())

View file

@ -8,8 +8,7 @@ async def error(code: int, message: str, tip: str) -> starlette.responses.Respon
'code': code,
'message': message,
'tip': tip,
'website': 'https://nova-oss.com',
'by': 'NovaOSS/Nova-API'
'powered_by': 'nova-api'
}}
return starlette.responses.Response(status_code=code, content=json.dumps(info))
@ -20,5 +19,6 @@ async def yield_error(code: int, message: str, tip: str) -> str:
return json.dumps({
'code': code,
'message': message,
'tip': tip
'tip': tip,
'powered_by': 'nova-api'
})

View file

@ -35,9 +35,9 @@ limiter = Limiter(
swallow_errors=True,
key_func=get_remote_address,
default_limits=[
'1/second',
'20/minute',
'300/hour'
'2/second',
'30/minute',
'400/hour'
])
app.state.limiter = limiter

10
api/providers/__init__.py Normal file
View file

@ -0,0 +1,10 @@
from . import \
closed, \
closed4
# closed432
MODULES = [
closed,
closed4,
# closed432,
]

52
api/providers/__main__.py Normal file
View file

@ -0,0 +1,52 @@
import os
import sys
from rich import print
def remove_duplicate_keys(file):
with open(file, 'r', encoding='utf8') as f:
lines = f.readlines()
unique_lines = set(lines)
with open(file, 'w', encoding='utf8') as f:
f.writelines(unique_lines)
try:
provider_name = sys.argv[1]
if provider_name == '--clear':
for file in os.listdir('secret/'):
if file.endswith('.txt'):
remove_duplicate_keys(f'secret/{file}')
exit()
except IndexError:
print('List of available providers:')
for file_name in os.listdir(os.path.dirname(__file__)):
if file_name.endswith('.py') and not file_name.startswith('_'):
print(file_name.split('.')[0])
sys.exit(0)
try:
provider = __import__(provider_name)
except ModuleNotFoundError as exc:
print(f'Provider "{provider_name}" not found.')
print('Available providers:')
for file_name in os.listdir(os.path.dirname(__file__)):
if file_name.endswith('.py') and not file_name.startswith('_'):
print(file_name.split('.')[0])
sys.exit(1)
if len(sys.argv) > 2:
model = sys.argv[2]
else:
model = provider.MODELS[-1]
print(f'{provider_name} @ {model}')
comp = provider.chat_completion(model=model)
print(comp)

35
api/providers/closed.py Normal file
View file

@ -0,0 +1,35 @@
from .helpers import utils
AUTH = True
ORGANIC = True
CONTEXT = True
STREAMING = True
MODERATIONS = True
ENDPOINT = 'https://api.openai.com'
MODELS = utils.GPT_3
async def get_key() -> str:
return await utils.random_secret_for('closed')
async def chat_completion(**kwargs):
payload = kwargs
key = await get_key()
return {
'method': 'POST',
'url': f'{ENDPOINT}/v1/chat/completions',
'payload': payload,
'headers': {
'Authorization': f'Bearer {key}'
},
'provider_auth': f'closed>{key}'
}
async def organify(request: dict) -> dict:
key = await get_key()
request['url'] = ENDPOINT + request['path']
request['headers']['Authorization'] = f'Bearer {key}'
request['provider_auth'] = f'closed>{key}'
return request

35
api/providers/closed4.py Normal file
View file

@ -0,0 +1,35 @@
from .helpers import utils
AUTH = True
ORGANIC = False
CONTEXT = True
STREAMING = True
MODERATIONS = True
ENDPOINT = 'https://api.openai.com'
MODELS = utils.GPT_4
async def get_key() -> str:
return await utils.random_secret_for('closed4')
async def chat_completion(**kwargs):
payload = kwargs
key = await get_key()
return {
'method': 'POST',
'url': f'{ENDPOINT}/v1/chat/completions',
'payload': payload,
'headers': {
'Authorization': f'Bearer {key}'
},
'provider_auth': f'closed4>{key}'
}
async def organify(request: dict) -> dict:
key = await get_key()
request['url'] = ENDPOINT + request['path']
request['headers']['Authorization'] = f'Bearer {key}'
request['provider_auth'] = f'closed4>{key}'
return request

View file

@ -0,0 +1,35 @@
from .helpers import utils
AUTH = True
ORGANIC = False
CONTEXT = True
STREAMING = True
MODERATIONS = False
ENDPOINT = 'https://api.openai.com'
MODELS = utils.GPT_4_32K
async def get_key() -> str:
return await utils.random_secret_for('closed432')
async def chat_completion(**kwargs):
payload = kwargs
key = await get_key()
return {
'method': 'POST',
'url': f'{ENDPOINT}/v1/chat/completions',
'payload': payload,
'headers': {
'Authorization': f'Bearer {key}'
},
'provider_auth': f'closed432>{key}'
}
async def organify(request: dict) -> dict:
key = await get_key()
request['url'] = ENDPOINT + request['path']
request['headers']['Authorization'] = f'Bearer {key}'
request['provider_auth'] = f'closed432>{key}'
return request

View file

@ -0,0 +1,37 @@
from db import providerkeys
GPT_3 = [
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-0301',
'gpt-3.5-turbo-16k-0613',
]
GPT_4 = GPT_3 + [
'gpt-4',
'gpt-4-0314',
'gpt-4-0613',
]
GPT_4_32K = GPT_4 + [
'gpt-4-32k',
'gpt-4-32k-0314',
'gpt-4-32k-0613',
]
async def conversation_to_prompt(conversation: list) -> str:
text = ''
for message in conversation:
text += f'<|{message["role"]}|>: {message["content"]}\n'
text += '<|assistant|>:'
return text
async def random_secret_for(name: str) -> str:
try:
return await providerkeys.manager.get_key(name)
except ValueError:
raise ValueError(f'Keys missing for "{name}" <no_keys>')

View file

@ -38,7 +38,6 @@ async def respond(
is_chat = False
model = None
is_stream = False
if 'chat/completions' in path:
is_chat = True
@ -73,6 +72,13 @@ async def respond(
provider_name = provider_auth.split('>')[0]
provider_key = provider_auth.split('>')[1]
if provider_key == '--NO_KEY--':
yield await errors.yield_error(500,
'Sorry, our API seems to have issues connecting to our provider(s).',
'This most likely isn\'t your fault. Please try again later.'
)
return
target_request['headers'].update(target_request.get('headers', {}))
if target_request['method'] == 'GET' and not payload:
@ -91,12 +97,13 @@ async def respond(
timeout=aiohttp.ClientTimeout(
connect=1.0,
total=float(os.getenv('TRANSFER_TIMEOUT', '500'))
),
)
) as response:
is_stream = response.content_type == 'text/event-stream'
if response.status == 429:
await keymanager.rate_limit_key(provider_name, provider_key)
print('[!] rate limit')
# await keymanager.rate_limit_key(provider_name, provider_key)
continue
if response.content_type == 'application/json':
@ -112,12 +119,14 @@ async def respond(
critical_error = True
if critical_error:
print('[!] critical error')
continue
if response.ok:
server_json_response = client_json_response
else:
print('[!] non-ok response', client_json_response)
continue
if is_stream:
@ -136,10 +145,6 @@ async def respond(
except Exception as exc:
print('[!] exception', exc)
if 'too many requests' in str(exc):
#!TODO
pass
continue
else:

View file

@ -26,6 +26,12 @@ MESSAGES = [
api_endpoint = os.getenv('CHECKS_ENDPOINT', 'http://localhost:2332/v1')
async def _response_base_check(response: httpx.Response) -> None:
try:
response.raise_for_status()
except httpx.HTTPStatusError as exc:
raise ConnectionError(f'API returned an error: {response.json()}') from exc
async def test_server():
"""Tests if the API server is running."""
@ -36,7 +42,7 @@ async def test_server():
url=f'{api_endpoint.replace("/v1", "")}',
timeout=3
)
response.raise_for_status()
await _response_base_check(response)
assert response.json()['ping'] == 'pong', 'The API did not return a correct response.'
except httpx.ConnectError as exc:
@ -63,7 +69,7 @@ async def test_chat_non_stream_gpt4() -> float:
json=json_data,
timeout=10,
)
response.raise_for_status()
await _response_base_check(response)
assert '1337' in response.json()['choices'][0]['message']['content'], 'The API did not return a correct response.'
return time.perf_counter() - request_start
@ -86,7 +92,7 @@ async def test_chat_stream_gpt3() -> float:
json=json_data,
timeout=10,
)
response.raise_for_status()
await _response_base_check(response)
chunks = []
resulting_text = ''
@ -128,7 +134,7 @@ async def test_image_generation() -> float:
json=json_data,
timeout=10,
)
response.raise_for_status()
await _response_base_check(response)
assert '://' in response.json()['data'][0]['url']
return time.perf_counter() - request_start
@ -166,7 +172,7 @@ async def test_function_calling():
json=json_data,
timeout=15,
)
response.raise_for_status()
await _response_base_check(response)
res = response.json()
output = json.loads(res['choices'][0]['message']['function_call']['arguments'])
@ -185,7 +191,7 @@ async def test_models():
headers=HEADERS,
timeout=3
)
response.raise_for_status()
await _response_base_check(response)
res = response.json()
all_models = [model['id'] for model in res['data']]
@ -208,20 +214,10 @@ async def demo():
else:
raise ConnectionError('API Server is not running.')
# print('[lightblue]Checking if function calling works...')
# print(await test_function_calling())
print('Checking non-streamed chat completions...')
print(await test_chat_non_stream_gpt4())
print('Checking streamed chat completions...')
print(await test_chat_stream_gpt3())
# print('[lightblue]Checking if image generation works...')
# print(await test_image_generation())
# print('Checking the models endpoint...')
# print(await test_models())
for func in [test_chat_non_stream_gpt4, test_chat_stream_gpt3]:
print(f'[*] {func.__name__}')
result = await func()
print(f'[+] {func.__name__} - {result}')
except Exception as exc:
print('[red]Error: ' + str(exc))