mirror of
https://github.com/NovaOSS/nova-api.git
synced 2024-11-25 23:53:57 +01:00
Compare commits
5 commits
f7f37ddd59
...
8fe14135fd
Author | SHA1 | Date | |
---|---|---|---|
8fe14135fd | |||
0877645981 | |||
7ccd93c423 | |||
ce2cd9469e | |||
474723c6dd |
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
|
@ -16,7 +16,7 @@ jobs:
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.x
|
python-version: 3.10
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
|
|
3054
api/models.json
3054
api/models.json
File diff suppressed because it is too large
Load diff
|
@ -28,6 +28,9 @@ async def handle(incoming_request):
|
||||||
users = UserManager()
|
users = UserManager()
|
||||||
path = incoming_request.url.path.replace('v1/v1/', 'v1/')
|
path = incoming_request.url.path.replace('v1/v1/', 'v1/')
|
||||||
|
|
||||||
|
if '/models' in path:
|
||||||
|
return fastapi.responses.JSONResponse(content=models_list)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
payload = await incoming_request.json()
|
payload = await incoming_request.json()
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
|
@ -52,10 +55,6 @@ async def handle(incoming_request):
|
||||||
if ban_reason:
|
if ban_reason:
|
||||||
return await errors.error(403, f'Your NovaAI account has been banned. Reason: "{ban_reason}".', 'Contact the staff for an appeal.')
|
return await errors.error(403, f'Your NovaAI account has been banned. Reason: "{ban_reason}".', 'Contact the staff for an appeal.')
|
||||||
|
|
||||||
path_contains_models = '/models' in path
|
|
||||||
if path_contains_models:
|
|
||||||
return fastapi.responses.JSONResponse(content=models_list)
|
|
||||||
|
|
||||||
costs = config['costs']
|
costs = config['costs']
|
||||||
cost = costs['other']
|
cost = costs['other']
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue