Compare commits

...

2 commits

Author SHA1 Message Date
monosans de2710539f
Add missing await 2023-10-08 23:05:11 +03:00
nsde a6af7bd1a4 aight 2023-10-08 21:53:27 +02:00
5 changed files with 9 additions and 8 deletions

View file

@ -7,9 +7,9 @@
"**/.DS_Store": true, "**/.DS_Store": true,
"**/Thumbs.db": true, "**/Thumbs.db": true,
"**/__pycache__": true, "**/__pycache__": true,
"**/*.css.map": true,
"**/.vscode": true, "**/.vscode": true,
"**/*.map": true, "**/*.map": true,
"**/*.css.map": true,
"tests/__pycache__": true "tests/__pycache__": true
}, },
"hide-files.files": [ "hide-files.files": [

View file

@ -6,10 +6,9 @@ costs:
other: 5 other: 5
chat-models: chat-models:
gpt-4-32k-azure: 100 gpt-4-32k: 200
gpt-4: 50 gpt-4: 50
gpt-4-azure: 10 gpt-3: 10
gpt-3: 5
## Roles Explanation ## Roles Explanation

View file

@ -38,10 +38,10 @@ async def count_for_messages(messages: list, model: str='gpt-3.5-turbo-0613') ->
tokens_per_name = -1 # if there's a name, the role is omitted tokens_per_name = -1 # if there's a name, the role is omitted
elif 'gpt-3.5-turbo' in model: elif 'gpt-3.5-turbo' in model:
return count_for_messages(messages, model='gpt-3.5-turbo-0613') return await count_for_messages(messages, model='gpt-3.5-turbo-0613')
elif 'gpt-4' in model: elif 'gpt-4' in model:
return count_for_messages(messages, model='gpt-4-0613') return await count_for_messages(messages, model='gpt-4-0613')
else: else:
raise NotImplementedError(f"""count_for_messages() is not implemented for model {model}. raise NotImplementedError(f"""count_for_messages() is not implemented for model {model}.

View file

@ -147,13 +147,15 @@ async def respond(
print('[!] too many requests') print('[!] too many requests')
continue continue
chunk_no = 0
async for chunk in response.content.iter_any(): async for chunk in response.content.iter_any():
chunk_no += 1
chunk = chunk.decode('utf8').strip() chunk = chunk.decode('utf8').strip()
if 'azure' in provider_name: if 'azure' in provider_name:
chunk = chunk.strip().replace('data: ', '') chunk = chunk.strip().replace('data: ', '')
if not chunk or 'prompt_filter_results' in chunk: if not chunk or chunk_no == 1:
continue continue
yield chunk + '\n\n' yield chunk + '\n\n'

View file

@ -216,7 +216,7 @@ async def demo():
raise ConnectionError('API Server is not running.') raise ConnectionError('API Server is not running.')
for func in [ for func in [
# test_chat_non_stream_gpt4, test_chat_non_stream_gpt4,
test_chat_stream_gpt3 test_chat_stream_gpt3
]: ]:
print(f'[*] {func.__name__}') print(f'[*] {func.__name__}')