Compare commits

...

2 commits

Author SHA1 Message Date
nsde d1c7180d5d Small fixes 2023-10-14 01:23:25 +02:00
nsde d1085b00c3 Made added azure providers simpler 2023-10-14 01:15:59 +02:00
13 changed files with 69 additions and 71 deletions

19
api/db/azurenodes.py Normal file
View file

@ -0,0 +1,19 @@
import os
import asyncio
from dotenv import load_dotenv
from motor.motor_asyncio import AsyncIOMotorClient
load_dotenv()
class AzureManager:
def __init__(self):
self.conn = AsyncIOMotorClient(os.environ['MONGO_URI'])
async def _get_collection(self, collection_name: str):
azure_db = conn[os.getenv('MONGO_NAME', 'nova-test')][collection_name]
manager = AzureManager()
if __name__ == '__main__':
print(asyncio.run(manager.get_entire_financial_history()))

View file

@ -9,4 +9,4 @@ def find_project_root():
root = find_project_root() root = find_project_root()
if __name__ == '__main__': if __name__ == '__main__':
print(find_project_root()) print(find_project_root())

View file

@ -85,7 +85,7 @@ class KeyManager:
for filename in os.listdir(os.path.join('api', 'secret')): for filename in os.listdir(os.path.join('api', 'secret')):
if filename.endswith('.txt'): if filename.endswith('.txt'):
async open(os.path.join('api', 'secret', filename)) as f: with open(os.path.join('api', 'secret', filename)) as f:
async for line in f: async for line in f:
if not line.strip(): if not line.strip():
continue continue

View file

@ -21,17 +21,7 @@ with open(os.path.join(helpers.root, 'api', 'config', 'config.yml'), encoding='u
class UserManager: class UserManager:
""" """
### Manager of all users in the database. Manager of all users in the database.
Following methods are available:
- `_get_collection(collection_name)`
- `create(discord_id)`
- `user_by_id(user_id)`
- `user_by_discord_id(discord_id)`
- `user_by_api_key(api_key)`
- `update_by_id(user_id, new_obj)`
- `update_by_filter(filter_object, new_obj)`
- `delete(user_id)`
""" """
def __init__(self): def __init__(self):
@ -45,31 +35,35 @@ class UserManager:
return collection#.find() return collection#.find()
async def create(self, discord_id: str = '') -> dict: async def create(self, discord_id: str = '') -> dict:
db = await self._get_collection('users')
chars = string.ascii_letters + string.digits chars = string.ascii_letters + string.digits
infix = os.getenv('KEYGEN_INFIX', 'S3LFH0ST') infix = os.getenv('KEYGEN_INFIX', 'S3LFH0ST')
suffix = ''.join(random.choices(chars, k=20)) suffix = ''.join(random.choices(chars, k=20))
prefix = ''.join(random.choices(chars, k=20)) prefix = ''.join(random.choices(chars, k=20))
new_api_key = f'nv-{prefix}{infix}{suffix}' new_api_key = f'nv2-{prefix}{infix}{suffix}'
new_user = { existing_user = await self.user_by_discord_id(discord_id)
'api_key': new_api_key, if existing_user: # just change api key
'credits': credits_config['start-credits'], await db.update_one({'auth.discord': str(int(discord_id))}, {'$set': {'api_key': new_api_key}})
'role': '', else:
'level': '', new_user = {
'status': { 'api_key': new_api_key,
'active': True, 'credits': credits_config['start-credits'],
'ban_reason': '', 'role': '',
}, 'level': '',
'auth': { 'status': {
'discord': str(discord_id), 'active': True,
'github': None 'ban_reason': '',
},
'auth': {
'discord': str(discord_id),
'github': None
}
} }
}
db = await self._get_collection('users') await db.insert_one(new_user)
await db.insert_one(new_user)
user = await db.find_one({'api_key': new_api_key}) user = await db.find_one({'api_key': new_api_key})
return user return user

View file

@ -12,15 +12,11 @@ async def _get_module_name(module) -> str:
async def balance_chat_request(payload: dict) -> dict: async def balance_chat_request(payload: dict) -> dict:
""" """
Load balance the chat completion request between chat providers. Load balance the chat completion request between chat providers.
Providers are sorted by streaming and models. Target (provider.chat_completion) is returned
""" """
providers_available = [] providers_available = []
for provider_module in providers.MODULES: for provider_module in providers.MODULES:
if payload['stream'] and not provider_module.STREAMING:
continue
if payload['model'] not in provider_module.MODELS: if payload['model'] not in provider_module.MODELS:
continue continue
@ -53,10 +49,6 @@ async def balance_organic_request(request: dict) -> dict:
if not provider_module.ORGANIC: if not provider_module.ORGANIC:
continue continue
if '/moderations' in request['path']:
if not provider_module.MODERATIONS:
continue
providers_available.append(provider_module) providers_available.append(provider_module)
provider = random.choice(providers_available) provider = random.choice(providers_available)

View file

@ -49,7 +49,10 @@ async def main():
headers=req['headers'], headers=req['headers'],
json=req['payload'], json=req['payload'],
) as response: ) as response:
res_json = await response.json() try:
res_json = await response.json()
except aiohttp.ContentTypeError:
res_json = await response.text()
print(response.status, res_json) print(response.status, res_json)
asyncio.run(main()) asyncio.run(main())

View file

@ -1,25 +1,23 @@
from .helpers import utils from .helpers import utils
AUTH = True
ORGANIC = False ORGANIC = False
STREAMING = True
MODELS = [ MODELS = [
'gpt-3.5-turbo-0613', 'gpt-3.5-turbo',
'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-0301',
'gpt-3.5-turbo-16k-0613' 'gpt-3.5-turbo-16k-0613'
] ]
async def chat_completion(**kwargs): async def chat_completion(**kwargs):
payload = kwargs payload = kwargs
key = await utils.random_secret_for('webraft') key = await utils.random_secret_for('ai.ls')
return { return {
'method': 'POST', 'method': 'POST',
'url': 'https://thirdparty.webraft.in/v1/chat/completions', 'url': 'https://api.caipacity.com/v1/chat/completions',
'payload': payload, 'payload': payload,
'headers': { 'headers': {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': f'Bearer {key}' 'Authorization': f'Bearer {key}'
}, },
'provider_auth': f'webraft>{key}' 'provider_auth': f'ai.ls>{key}'
} }

View file

@ -1,8 +1,6 @@
from .helpers import utils from .helpers import utils
AUTH = True # If the provider requires an API key ORGANIC = False # If all OpenAI endpoints should be used for the provider. If false, only a chat completions are used for this provider.
ORGANIC = False # If all OpenAI endpoints are available on the provider. If false, only a chat completions are available.
STREAMING = True # If the provider supports streaming completions
ENDPOINT = 'https://nova-00001.openai.azure.com' # (Important: read below) The endpoint for the provider. ENDPOINT = 'https://nova-00001.openai.azure.com' # (Important: read below) The endpoint for the provider.
#! IMPORTANT: If this is an ORGANIC provider, this should be the endpoint for the API with anything BEFORE the "/v1". #! IMPORTANT: If this is an ORGANIC provider, this should be the endpoint for the API with anything BEFORE the "/v1".
MODELS = [ MODELS = [
@ -11,21 +9,6 @@ MODELS = [
'gpt-4', 'gpt-4',
'gpt-4-32k' 'gpt-4-32k'
] ]
MODELS += [f'{model}-azure' for model in MODELS]
AZURE_API = '2023-08-01-preview'
async def chat_completion(**payload): async def chat_completion(**payload):
key = await utils.random_secret_for('azure-nva1') return await utils.azure_chat_completion(ENDPOINT, 'azure-nva1', payload)
deployment = payload['model'].replace('.', '').replace('-azure', '')
return {
'method': 'POST',
'url': f'{ENDPOINT}/openai/deployments/{deployment}/chat/completions?api-version={AZURE_API}',
'payload': payload,
'headers': {
'api-key': key
},
'provider_auth': f'azure-nva1>{key}'
}

View file

@ -1,8 +1,6 @@
from .helpers import utils from .helpers import utils
AUTH = True
ORGANIC = True ORGANIC = True
STREAMING = True
ENDPOINT = 'https://api.openai.com' ENDPOINT = 'https://api.openai.com'
MODELS = utils.GPT_3 MODELS = utils.GPT_3

View file

@ -1,8 +1,6 @@
from .helpers import utils from .helpers import utils
AUTH = True
ORGANIC = False ORGANIC = False
STREAMING = True
ENDPOINT = 'https://api.openai.com' ENDPOINT = 'https://api.openai.com'
MODELS = utils.GPT_4 MODELS = utils.GPT_4

View file

@ -1,8 +1,6 @@
from .helpers import utils from .helpers import utils
AUTH = True
ORGANIC = False ORGANIC = False
STREAMING = True
ENDPOINT = 'https://api.openai.com' ENDPOINT = 'https://api.openai.com'
MODELS = utils.GPT_4_32K MODELS = utils.GPT_4_32K

View file

@ -23,5 +23,22 @@ GPT_4_32K = GPT_4 + [
'gpt-4-32k-0613', 'gpt-4-32k-0613',
] ]
AZURE_API = '2023-08-01-preview'
async def random_secret_for(name: str) -> str: async def random_secret_for(name: str) -> str:
return await providerkeys.manager.get_key(name) return await providerkeys.manager.get_key(name)
async def azure_chat_completion(endpoint: str, provider: str, payload: dict) -> dict:
key = await random_secret_for(provider)
model = payload['model']
deployment = model.replace('.', '').replace('-azure', '')
return {
'method': 'POST',
'url': f'{endpoint}/openai/deployments/{deployment}/chat/completions?api-version={AZURE_API}',
'payload': payload,
'headers': {
'api-key': key
},
'provider_auth': f'{provider}>{key}'
}

View file

@ -1,8 +1,6 @@
from .helpers import utils from .helpers import utils
AUTH = True
ORGANIC = False ORGANIC = False
STREAMING = True
MODELS = ['llama-2-7b-chat'] MODELS = ['llama-2-7b-chat']
async def chat_completion(**kwargs): async def chat_completion(**kwargs):