mirror of
https://github.com/NovaOSS/nova-api.git
synced 2024-11-25 12:33:58 +01:00
Made added azure providers simpler
This commit is contained in:
parent
eb6768cae5
commit
d1085b00c3
19
api/db/azurenodes.py
Normal file
19
api/db/azurenodes.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
import os
|
||||
import asyncio
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
load_dotenv()
|
||||
|
||||
class AzureManager:
|
||||
def __init__(self):
|
||||
self.conn = AsyncIOMotorClient(os.environ['MONGO_URI'])
|
||||
|
||||
async def _get_collection(self, collection_name: str):
|
||||
azure_db = conn[os.getenv('MONGO_NAME', 'nova-test')][collection_name]
|
||||
|
||||
manager = AzureManager()
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(asyncio.run(manager.get_entire_financial_history()))
|
|
@ -1,12 +0,0 @@
|
|||
import os
|
||||
|
||||
def find_project_root():
|
||||
current_path = os.getcwd()
|
||||
while not os.path.isfile(os.path.join(current_path, 'LICENSE')):
|
||||
current_path = os.path.dirname(current_path)
|
||||
return current_path
|
||||
|
||||
root = find_project_root()
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(find_project_root())
|
|
@ -85,7 +85,7 @@ class KeyManager:
|
|||
|
||||
for filename in os.listdir(os.path.join('api', 'secret')):
|
||||
if filename.endswith('.txt'):
|
||||
async open(os.path.join('api', 'secret', filename)) as f:
|
||||
with open(os.path.join('api', 'secret', filename)) as f:
|
||||
async for line in f:
|
||||
if not line.strip():
|
||||
continue
|
||||
|
|
|
@ -21,7 +21,7 @@ with open(os.path.join(helpers.root, 'api', 'config', 'config.yml'), encoding='u
|
|||
|
||||
class UserManager:
|
||||
"""
|
||||
### Manager of all users in the database.
|
||||
Manager of all users in the database.
|
||||
Following methods are available:
|
||||
|
||||
- `_get_collection(collection_name)`
|
||||
|
@ -45,31 +45,35 @@ class UserManager:
|
|||
return collection#.find()
|
||||
|
||||
async def create(self, discord_id: str = '') -> dict:
|
||||
db = await self._get_collection('users')
|
||||
chars = string.ascii_letters + string.digits
|
||||
|
||||
infix = os.getenv('KEYGEN_INFIX', 'S3LFH0ST')
|
||||
suffix = ''.join(random.choices(chars, k=20))
|
||||
prefix = ''.join(random.choices(chars, k=20))
|
||||
|
||||
new_api_key = f'nv-{prefix}{infix}{suffix}'
|
||||
new_api_key = f'nv2-{prefix}{infix}{suffix}'
|
||||
|
||||
new_user = {
|
||||
'api_key': new_api_key,
|
||||
'credits': credits_config['start-credits'],
|
||||
'role': '',
|
||||
'level': '',
|
||||
'status': {
|
||||
'active': True,
|
||||
'ban_reason': '',
|
||||
},
|
||||
'auth': {
|
||||
'discord': str(discord_id),
|
||||
'github': None
|
||||
existing_user = await self.user_by_discord_id(discord_id)
|
||||
if existing_user: # just change api key
|
||||
await db.update_one({'auth.discord': str(int(discord_id))}, {'$set': {'api_key': new_api_key}})
|
||||
else:
|
||||
new_user = {
|
||||
'api_key': new_api_key,
|
||||
'credits': credits_config['start-credits'],
|
||||
'role': '',
|
||||
'level': '',
|
||||
'status': {
|
||||
'active': True,
|
||||
'ban_reason': '',
|
||||
},
|
||||
'auth': {
|
||||
'discord': str(discord_id),
|
||||
'github': None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
db = await self._get_collection('users')
|
||||
await db.insert_one(new_user)
|
||||
await db.insert_one(new_user)
|
||||
user = await db.find_one({'api_key': new_api_key})
|
||||
return user
|
||||
|
||||
|
|
|
@ -12,15 +12,11 @@ async def _get_module_name(module) -> str:
|
|||
async def balance_chat_request(payload: dict) -> dict:
|
||||
"""
|
||||
Load balance the chat completion request between chat providers.
|
||||
Providers are sorted by streaming and models. Target (provider.chat_completion) is returned
|
||||
"""
|
||||
|
||||
providers_available = []
|
||||
|
||||
for provider_module in providers.MODULES:
|
||||
if payload['stream'] and not provider_module.STREAMING:
|
||||
continue
|
||||
|
||||
if payload['model'] not in provider_module.MODELS:
|
||||
continue
|
||||
|
||||
|
@ -53,10 +49,6 @@ async def balance_organic_request(request: dict) -> dict:
|
|||
if not provider_module.ORGANIC:
|
||||
continue
|
||||
|
||||
if '/moderations' in request['path']:
|
||||
if not provider_module.MODERATIONS:
|
||||
continue
|
||||
|
||||
providers_available.append(provider_module)
|
||||
|
||||
provider = random.choice(providers_available)
|
||||
|
|
|
@ -49,7 +49,10 @@ async def main():
|
|||
headers=req['headers'],
|
||||
json=req['payload'],
|
||||
) as response:
|
||||
res_json = await response.json()
|
||||
try:
|
||||
res_json = await response.json()
|
||||
except aiohttp.ContentTypeError:
|
||||
res_json = await response.text()
|
||||
print(response.status, res_json)
|
||||
|
||||
asyncio.run(main())
|
||||
|
|
|
@ -1,25 +1,23 @@
|
|||
from .helpers import utils
|
||||
|
||||
AUTH = True
|
||||
ORGANIC = False
|
||||
STREAMING = True
|
||||
MODELS = [
|
||||
'gpt-3.5-turbo-0613',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-0301',
|
||||
'gpt-3.5-turbo-16k-0613'
|
||||
]
|
||||
|
||||
async def chat_completion(**kwargs):
|
||||
payload = kwargs
|
||||
key = await utils.random_secret_for('webraft')
|
||||
key = await utils.random_secret_for('ai.ls')
|
||||
|
||||
return {
|
||||
'method': 'POST',
|
||||
'url': 'https://thirdparty.webraft.in/v1/chat/completions',
|
||||
'url': 'https://api.caipacity.com/v1/chat/completions',
|
||||
'payload': payload,
|
||||
'headers': {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {key}'
|
||||
},
|
||||
'provider_auth': f'webraft>{key}'
|
||||
'provider_auth': f'ai.ls>{key}'
|
||||
}
|
|
@ -1,8 +1,6 @@
|
|||
from .helpers import utils
|
||||
|
||||
AUTH = True # If the provider requires an API key
|
||||
ORGANIC = False # If all OpenAI endpoints are available on the provider. If false, only a chat completions are available.
|
||||
STREAMING = True # If the provider supports streaming completions
|
||||
ORGANIC = False # If all OpenAI endpoints should be used for the provider. If false, only a chat completions are used for this provider.
|
||||
ENDPOINT = 'https://nova-00001.openai.azure.com' # (Important: read below) The endpoint for the provider.
|
||||
#! IMPORTANT: If this is an ORGANIC provider, this should be the endpoint for the API with anything BEFORE the "/v1".
|
||||
MODELS = [
|
||||
|
@ -11,21 +9,6 @@ MODELS = [
|
|||
'gpt-4',
|
||||
'gpt-4-32k'
|
||||
]
|
||||
MODELS += [f'{model}-azure' for model in MODELS]
|
||||
|
||||
AZURE_API = '2023-08-01-preview'
|
||||
|
||||
async def chat_completion(**payload):
|
||||
key = await utils.random_secret_for('azure-nva1')
|
||||
|
||||
deployment = payload['model'].replace('.', '').replace('-azure', '')
|
||||
|
||||
return {
|
||||
'method': 'POST',
|
||||
'url': f'{ENDPOINT}/openai/deployments/{deployment}/chat/completions?api-version={AZURE_API}',
|
||||
'payload': payload,
|
||||
'headers': {
|
||||
'api-key': key
|
||||
},
|
||||
'provider_auth': f'azure-nva1>{key}'
|
||||
}
|
||||
return await utils.azure_chat_completion(ENDPOINT, 'azure-nva1', payload)
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from .helpers import utils
|
||||
|
||||
AUTH = True
|
||||
ORGANIC = True
|
||||
STREAMING = True
|
||||
ENDPOINT = 'https://api.openai.com'
|
||||
MODELS = utils.GPT_3
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from .helpers import utils
|
||||
|
||||
AUTH = True
|
||||
ORGANIC = False
|
||||
STREAMING = True
|
||||
ENDPOINT = 'https://api.openai.com'
|
||||
MODELS = utils.GPT_4
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from .helpers import utils
|
||||
|
||||
AUTH = True
|
||||
ORGANIC = False
|
||||
STREAMING = True
|
||||
ENDPOINT = 'https://api.openai.com'
|
||||
MODELS = utils.GPT_4_32K
|
||||
|
||||
|
|
|
@ -23,5 +23,22 @@ GPT_4_32K = GPT_4 + [
|
|||
'gpt-4-32k-0613',
|
||||
]
|
||||
|
||||
AZURE_API = '2023-08-01-preview'
|
||||
|
||||
async def random_secret_for(name: str) -> str:
|
||||
return await providerkeys.manager.get_key(name)
|
||||
|
||||
async def azure_chat_completion(endpoint: str, provider: str, payload: dict) -> dict:
|
||||
key = await random_secret_for(provider)
|
||||
model = payload['model']
|
||||
deployment = model.replace('.', '').replace('-azure', '')
|
||||
|
||||
return {
|
||||
'method': 'POST',
|
||||
'url': f'{endpoint}/openai/deployments/{deployment}/chat/completions?api-version={AZURE_API}',
|
||||
'payload': payload,
|
||||
'headers': {
|
||||
'api-key': key
|
||||
},
|
||||
'provider_auth': f'{provider}>{key}'
|
||||
}
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
from .helpers import utils
|
||||
|
||||
AUTH = True
|
||||
ORGANIC = False
|
||||
STREAMING = True
|
||||
MODELS = ['llama-2-7b-chat']
|
||||
|
||||
async def chat_completion(**kwargs):
|
||||
|
|
Loading…
Reference in a new issue