Compare commits

..

No commits in common. "45c6a14dc05fa9a4dfbbe95b6b5dd2ab93b27fba" and "d25574274328ce10627764ee4987d8ceaa6b561a" have entirely different histories.

9 changed files with 29 additions and 59 deletions

View file

@ -65,31 +65,24 @@ This one's code can be found in the following repository: [github.com/novaoss/no
# Setup # Setup
## Requirements ## Requirements
- newest **Python** version - Python 3.9+
- newest Python **pip** version - pip
- **MongoDB** database - MongoDB database
- `uvicorn` in your system package manager - `uvicorn`
## Recommended ## Recommended
- Setup of the other infrastructure - Setup of the other infrastructure
- `git` (for updates) - `git` (for updates)
- `screen` (for production) - `screen` (for production)
- Cloudflare (for security, anti-DDoS, etc.) - we fully support Cloudflare - Cloudflare (for security, anti-DDoS, etc.) - we fully support Cloudflare
- proxies, in case you need to protect your privacy from authorities (China, Iran, ...)
## Staging System ## Staging System
This repository has an integrated staging system. It's a simple system that allows you to test the API server before deploying it to production. This repository has an integrated staging system. It's a simple system that allows you to test the API server before deploying it to production.
You should definitely set up two databases on MongoDB: `nova-core` and `nova-test`. Please note that `nova-core` is always used for `providerkeys`. You should definitely set up two databases on MongoDB: `nova-core` and `nova-test`. Please note that `nova-core` is always used for `providerkeys`.
Put your production env in `env/.prod.env` and modify the values from the test `.env` to your liking: Put your production `.env` file in `env/.prod.env`. Your test `.env` file should be in `.env`.
- Set `MONGO_NAME` to `nova-core`, which is your database name for the production mode.
- Set `CHECKS_ENDPOINT` to `http://localhost:2333` (or the production port you set for `nova-api`)
**Warning -** always make sure to update your production `.env` (`env/.prod.env`), too!
Your test `.env` file should be placed in here.
Running `PUSH_TO_PRODUCTION.sh` will: Running `PUSH_TO_PRODUCTION.sh` will:
- kill port `2333` (production) - kill port `2333` (production)
- remove all contents of the production directory, set to `/home/nova-prod/` (feel free to change it) - remove all contents of the production directory, set to `/home/nova-prod/` (feel free to change it)
@ -137,8 +130,6 @@ Create a `.env` file, make sure not to reveal any of its contents to anyone, and
### Database ### Database
Set up a MongoDB database and set `MONGO_URI` to the MongoDB database connection URI. Quotation marks are definetly recommended here! Set up a MongoDB database and set `MONGO_URI` to the MongoDB database connection URI. Quotation marks are definetly recommended here!
Then set `MONGO_NAME` to `nova-test`, which is your database name for the tests.
### Proxy (optional) ### Proxy (optional)
- `PROXY_TYPE` (optional, defaults to `socks.PROXY_TYPE_HTTP`): the type of proxy - can be `http`, `https`, `socks4`, `socks5`, `4` or `5`, etc... - `PROXY_TYPE` (optional, defaults to `socks.PROXY_TYPE_HTTP`): the type of proxy - can be `http`, `https`, `socks4`, `socks5`, `4` or `5`, etc...
- `PROXY_HOST`: the proxy host (host domain or IP address), without port! - `PROXY_HOST`: the proxy host (host domain or IP address), without port!
@ -191,17 +182,13 @@ You can also just add the *beginning* of an API address, like `12.123.` (without
### Core Keys ### Core Keys
`CORE_API_KEY` specifies the **very secret key** for which need to access the entire user database etc. `CORE_API_KEY` specifies the **very secret key** for which need to access the entire user database etc.
### Checks
`NOVA_KEY` is the API key the which is used in tests. It should be one with tons of credits. `NOVA_KEY` is the API key the which is used in tests. It should be one with tons of credits.
`CHECKS_ENDPOINT` is the endpoint
### Webhooks ### Webhooks
`DISCORD_WEBHOOK__USER_CREATED` is the Discord webhook URL for when a user is created. `DISCORD_WEBHOOK__USER_CREATED` is the Discord webhook URL for when a user is created.
`DISCORD_WEBHOOK__API_ISSUE` is the Discord webhook URL for when an API issue occurs. `DISCORD_WEBHOOK__API_ISSUE` is the Discord webhook URL for when an API issue occurs.
### Other ### Other
`MODERATION_DEBUG_KEY` can be almost any string (avoid spaces or special characters) - users can add `#` + this key to their API key (e.g. `Bearer nv-123#modkey` as the `Authorization` header) to bypass the moderation checks. This is especially useful if the moderation is too sensitive and can be disabled for certain trusted users.
`KEYGEN_INFIX` can be almost any string (avoid spaces or special characters) - this string will be put in the middle of every NovaAI API key which is generated. This is useful for identifying the source of the key using e.g. RegEx. `KEYGEN_INFIX` can be almost any string (avoid spaces or special characters) - this string will be put in the middle of every NovaAI API key which is generated. This is useful for identifying the source of the key using e.g. RegEx.
## Misc ## Misc

View file

@ -25,7 +25,7 @@ models = [model['id'] for model in models_list['data']]
with open(os.path.join('config', 'config.yml'), encoding='utf8') as f: with open(os.path.join('config', 'config.yml'), encoding='utf8') as f:
config = yaml.safe_load(f) config = yaml.safe_load(f)
moderation_debug_key = os.getenv('MODERATION_DEBUG_KEY') moderation_debug_key_key = os.getenv('MODERATION_DEBUG_KEY')
async def handle(incoming_request: fastapi.Request): async def handle(incoming_request: fastapi.Request):
"""Transfer a streaming response """Transfer a streaming response
@ -71,10 +71,10 @@ async def handle(incoming_request: fastapi.Request):
if ban_reason: if ban_reason:
return await errors.error(403, f'Your NovaAI account has been banned. Reason: \'{ban_reason}\'.', 'Contact the staff for an appeal.') return await errors.error(403, f'Your NovaAI account has been banned. Reason: \'{ban_reason}\'.', 'Contact the staff for an appeal.')
is_enterprise_key = 'enterprise' in user.get('role', 'default') # Checking for enterprise status
enterprise_keys = os.environ.get('ENTERPRISE_KEYS')
if path.startswith('/enterprise/v1') and not is_enterprise_key: if path.startswith('/enterprise/v1') and user.get('api_key') not in enterprise_keys.split():
return await errors.error(403, 'Enterprise API is not available for your API key.', 'Contact the staff for an upgrade.') return await errors.error(403, 'Enterprise API is not available.', 'Contact the staff for an upgrade.')
if 'account/credits' in path: if 'account/credits' in path:
return fastapi.responses.JSONResponse({'credits': user['credits']}) return fastapi.responses.JSONResponse({'credits': user['credits']})
@ -87,13 +87,10 @@ async def handle(incoming_request: fastapi.Request):
role = user.get('role', 'default') role = user.get('role', 'default')
if 'enterprise' in role: try:
role_cost_multiplier = 0.1 role_cost_multiplier = config['roles'][role]['bonus']
else: except KeyError:
try: role_cost_multiplier = 1
role_cost_multiplier = config['roles'][role]['bonus']
except KeyError:
role_cost_multiplier = 1
cost = round(cost * role_cost_multiplier) cost = round(cost * role_cost_multiplier)
@ -127,7 +124,7 @@ async def handle(incoming_request: fastapi.Request):
policy_violation = False policy_violation = False
if not (moderation_debug_key and moderation_debug_key in key_tags and 'gpt-3' in payload.get('model', '')): if not (moderation_debug_key_key and moderation_debug_key_key in key_tags and 'gpt-3' in payload.get('model', '')):
if '/moderations' not in path: if '/moderations' not in path:
inp = '' inp = ''

View file

@ -1,2 +1,2 @@
from . import ails, closed, closed4 from . import azure
MODULES = [ails, closed, closed4] MODULES = [azure]

View file

@ -3,10 +3,8 @@ from .helpers import utils
ORGANIC = False ORGANIC = False
MODELS = [ MODELS = [
'gpt-3.5-turbo', 'gpt-3.5-turbo',
'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-0301',
'gpt-4', 'gpt-3.5-turbo-16k-0613'
'gpt-3.5-turbo-16k',
'gpt-4-0613'
] ]
async def chat_completion(**kwargs): async def chat_completion(**kwargs):

View file

@ -1,14 +1,13 @@
from .helpers import utils from .helpers import utils
ORGANIC = False # If all OpenAI endpoints should be used for the provider. If false, only a chat completions are used for this provider. ORGANIC = False # If all OpenAI endpoints should be used for the provider. If false, only a chat completions are used for this provider.
ENDPOINT = 'https://nova-00003.openai.azure.com' # (Important: read below) The endpoint for the provider. ENDPOINT = 'https://nova-00001.openai.azure.com' # (Important: read below) The endpoint for the provider.
#! IMPORTANT: If this is an ORGANIC provider, this should be the endpoint for the API with anything BEFORE the "/v1". #! IMPORTANT: If this is an ORGANIC provider, this should be the endpoint for the API with anything BEFORE the "/v1".
MODELS = [ MODELS = [
'gpt-3.5-turbo', 'gpt-3.5-turbo',
# 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-16k',
# 'gpt-3.5-turbo-instruct' 'gpt-4',
# 'gpt-4', 'gpt-4-32k'
# 'gpt-4-32k'
] ]
async def chat_completion(**payload): async def chat_completion(**payload):

View file

@ -31,8 +31,7 @@ async def random_secret_for(name: str) -> str:
async def azure_chat_completion(endpoint: str, provider: str, payload: dict) -> dict: async def azure_chat_completion(endpoint: str, provider: str, payload: dict) -> dict:
key = await random_secret_for(provider) key = await random_secret_for(provider)
model = payload['model'] model = payload['model']
del payload['model'] deployment = model.replace('.', '').replace('-azure', '')
deployment = model.replace('.', '')
return { return {
'method': 'POST', 'method': 'POST',

View file

@ -122,7 +122,7 @@ def get_proxy() -> Proxy:
return Proxy( return Proxy(
proxy_type=os.getenv('PROXY_TYPE', 'http'), proxy_type=os.getenv('PROXY_TYPE', 'http'),
host_or_ip=os.environ['PROXY_HOST'], host_or_ip=os.getenv('PROXY_HOST', '127.0.0.1'),
port=int(os.getenv('PROXY_PORT', '8080')), port=int(os.getenv('PROXY_PORT', '8080')),
username=os.getenv('PROXY_USER'), username=os.getenv('PROXY_USER'),
password=os.getenv('PROXY_PASS') password=os.getenv('PROXY_PASS')

View file

@ -71,7 +71,7 @@ async def respond(
'timeout': 0 'timeout': 0
} }
for _ in range(10): for _ in range(5):
try: try:
if is_chat: if is_chat:
target_request = await load_balancing.balance_chat_request(payload) target_request = await load_balancing.balance_chat_request(payload)
@ -107,12 +107,7 @@ async def respond(
if target_request['method'] == 'GET' and not payload: if target_request['method'] == 'GET' and not payload:
target_request['payload'] = None target_request['payload'] = None
connector = None async with aiohttp.ClientSession(connector=proxies.get_proxy().connector) as session:
if os.getenv('PROXY_HOST') or os.getenv('USE_PROXY_LIST', 'False').lower() == 'true':
connector = proxies.get_proxy().connector
async with aiohttp.ClientSession(connector=connector) as session:
try: try:
async with session.request( async with session.request(
method=target_request.get('method', 'POST'), method=target_request.get('method', 'POST'),

View file

@ -72,11 +72,7 @@ async def test_chat_non_stream_gpt4() -> float:
) )
await _response_base_check(response) await _response_base_check(response)
try: assert '1337' in response.json()['choices'][0]['message']['content'], 'The API did not return a correct response.'
assert '1337' in response.json()['choices'][0]['message']['content'], 'The API did not return a correct response.'
except json.decoder.JSONDecodeError:
return response.status_code
return time.perf_counter() - request_start return time.perf_counter() - request_start
async def test_chat_stream_gpt3() -> float: async def test_chat_stream_gpt3() -> float:
@ -220,9 +216,8 @@ async def demo():
raise ConnectionError('API Server is not running.') raise ConnectionError('API Server is not running.')
for func in [ for func in [
test_chat_stream_gpt3,
test_chat_non_stream_gpt4, test_chat_non_stream_gpt4,
test_function_calling, test_chat_stream_gpt3
]: ]:
print(f'[*] {func.__name__}') print(f'[*] {func.__name__}')
result = await func() result = await func()