feat: model max_tokens customisation

Fixes #92
This commit is contained in:
Jing Hua 2023-03-19 12:46:31 +08:00
parent 84ae0ae8df
commit bdc95cb305
9 changed files with 113 additions and 6 deletions

View file

@ -1,6 +1,10 @@
{
"configuration": "Configuration",
"model": "Model",
"token": {
"label": "Max Token",
"description": "The maximum number of tokens to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length."
},
"default": "Default",
"temperature": {
"label": "Temperature",

View file

@ -1,6 +1,10 @@
{
"configuration": "配置",
"model": "模型",
"token": {
"label": "最大 Token",
"description": "assistant 生成一条信息可以包含的最大 token 数。最大 token 数也受到模型的总长度限制,上文的 token 数和生成的 token 数之和不能超过模型的 token 总数(例如 gpt-3.5-turbo 的 token 总数是 4096。"
},
"default": "默认",
"temperature": {
"label": "采样温度",

View file

@ -1,6 +1,10 @@
{
"configuration": "配置",
"model": "模型",
"token": {
"label": "最大 Token",
"description": "控制 assistant 一條 msg 最多可以 gen 幾多 token。最大 token 數仲受到模型總長度嘅限制,上文的 token 數同生成嘅 token 數加埋一齊唔可以超過模型嘅 token 總數(譬如 gpt-3.5-turbo 嘅 token 總數係 4096。"
},
"default": "預設",
"temperature": {
"label": "採樣温度",

View file

@ -51,6 +51,9 @@ const ChatTitle = React.memo(() => {
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
{t('model')}: {config.model}
</div>
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
{t('token.label')}: {config.max_tokens}
</div>
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
{t('temperature.label')}: {config.temperature}
</div>

View file

@ -1,10 +1,10 @@
import React, { useState } from 'react';
import React, { useEffect, useRef, useState } from 'react';
import useStore from '@store/store';
import { useTranslation } from 'react-i18next';
import PopupModal from '@components/PopupModal';
import { ConfigInterface, ModelOptions } from '@type/chat';
import DownChevronArrow from '@icon/DownChevronArrow';
import { modelOptions } from '@constants/chat';
import { modelMaxToken, modelOptions } from '@constants/chat';
const ConfigMenu = ({
setIsModalOpen,
@ -15,6 +15,7 @@ const ConfigMenu = ({
config: ConfigInterface;
setConfig: (config: ConfigInterface) => void;
}) => {
const [_maxToken, _setMaxToken] = useState<number>(config.max_tokens);
const [_model, _setModel] = useState<ModelOptions>(config.model);
const [_temperature, _setTemperature] = useState<number>(config.temperature);
const [_presencePenalty, _setPresencePenalty] = useState<number>(
@ -28,6 +29,7 @@ const ConfigMenu = ({
const handleConfirm = () => {
setConfig({
max_tokens: _maxToken,
model: _model,
temperature: _temperature,
presence_penalty: _presencePenalty,
@ -45,7 +47,12 @@ const ConfigMenu = ({
>
<div className='p-6 border-b border-gray-200 dark:border-gray-600'>
<ModelSelector _model={_model} _setModel={_setModel} />
<div>
<MaxTokenSlider
_maxToken={_maxToken}
_setMaxToken={_setMaxToken}
_model={_model}
/>
<div className='mt-5 pt-5 border-t border-gray-500'>
<label className='block text-sm font-medium text-gray-900 dark:text-white'>
{t('temperature.label')}: {_temperature}
</label>
@ -177,4 +184,46 @@ const ModelSelector = ({
);
};
const MaxTokenSlider = ({
_maxToken,
_setMaxToken,
_model,
}: {
_maxToken: number;
_setMaxToken: React.Dispatch<React.SetStateAction<number>>;
_model: ModelOptions;
}) => {
const { t } = useTranslation('model');
const inputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
inputRef &&
inputRef.current &&
_setMaxToken(Number(inputRef.current.value));
}, [_model]);
return (
<div>
<label className='block text-sm font-medium text-gray-900 dark:text-white'>
{t('token.label')}: {_maxToken}
</label>
<input
type='range'
ref={inputRef}
value={_maxToken}
onChange={(e) => {
_setMaxToken(Number(e.target.value));
}}
min={0}
max={modelMaxToken[_model]}
step={1}
className='w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer'
/>
<div className='min-w-fit text-gray-500 dark:text-gray-300 text-sm mt-2'>
{t('token.description')}
</div>
</div>
);
};
export default ConfigMenu;

View file

@ -18,14 +18,26 @@ export const modelOptions: ModelOptions[] = [
// 'gpt-3.5-turbo-0301',
'gpt-4',
// 'gpt-4-0314',
// 'gpt-4-32k',
'gpt-4-32k',
// 'gpt-4-32k-0314',
];
export const defaultModel = 'gpt-3.5-turbo';
export const modelMaxToken = {
'gpt-3.5-turbo': 4096,
'gpt-3.5-turbo-0301': 4096,
'gpt-4': 8192,
'gpt-4-0314': 8192,
'gpt-4-32k': 32768,
'gpt-4-32k-0314': 32768,
};
export const defaultUserMaxToken = 4000;
export const defaultChatConfig: ConfigInterface = {
model: defaultModel,
max_tokens: defaultUserMaxToken,
temperature: 1,
presence_penalty: 0,
top_p: 1,

View file

@ -4,8 +4,13 @@ import {
LocalStorageInterfaceV2ToV3,
LocalStorageInterfaceV3ToV4,
LocalStorageInterfaceV4ToV5,
LocalStorageInterfaceV5ToV6,
} from '@type/chat';
import { defaultChatConfig, defaultModel } from '@constants/chat';
import {
defaultChatConfig,
defaultModel,
defaultUserMaxToken,
} from '@constants/chat';
import { officialAPIEndpoint } from '@constants/auth';
import defaultPrompts from '@constants/prompt';
@ -47,3 +52,12 @@ export const migrateV4 = (persistedState: LocalStorageInterfaceV4ToV5) => {
};
});
};
export const migrateV5 = (persistedState: LocalStorageInterfaceV5ToV6) => {
persistedState.chats.forEach((chat) => {
chat.config = {
...chat.config,
max_tokens: defaultUserMaxToken,
};
});
};

View file

@ -11,6 +11,7 @@ import {
LocalStorageInterfaceV2ToV3,
LocalStorageInterfaceV3ToV4,
LocalStorageInterfaceV4ToV5,
LocalStorageInterfaceV5ToV6,
} from '@type/chat';
import {
migrateV0,
@ -18,6 +19,7 @@ import {
migrateV2,
migrateV3,
migrateV4,
migrateV5,
} from './migrate';
export type StoreState = ChatSlice &
@ -52,7 +54,7 @@ const useStore = create<StoreState>()(
autoTitle: state.autoTitle,
prompts: state.prompts,
}),
version: 5,
version: 6,
migrate: (persistedState, version) => {
switch (version) {
case 0:
@ -65,6 +67,8 @@ const useStore = create<StoreState>()(
migrateV3(persistedState as LocalStorageInterfaceV3ToV4);
case 4:
migrateV4(persistedState as LocalStorageInterfaceV4ToV5);
case 5:
migrateV5(persistedState as LocalStorageInterfaceV5ToV6);
break;
}
return persistedState as StoreState;

View file

@ -18,6 +18,7 @@ export interface ChatInterface {
export interface ConfigInterface {
model: ModelOptions;
max_tokens: number;
temperature: number;
presence_penalty: number;
top_p: number;
@ -84,3 +85,15 @@ export interface LocalStorageInterfaceV4ToV5 {
autoTitle: boolean;
prompts: Prompt[];
}
export interface LocalStorageInterfaceV5ToV6 {
chats: ChatInterface[];
currentChatIndex: number;
apiKey: string;
apiFree: boolean;
apiFreeEndpoint: string;
apiEndpoint?: string;
theme: Theme;
autoTitle: boolean;
prompts: Prompt[];
}