diff --git a/public/locales/en/model.json b/public/locales/en/model.json
index 7f44717..606b645 100644
--- a/public/locales/en/model.json
+++ b/public/locales/en/model.json
@@ -1,6 +1,10 @@
{
"configuration": "Configuration",
"model": "Model",
+ "token": {
+ "label": "Max Token",
+ "description": "The maximum number of tokens to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length."
+ },
"default": "Default",
"temperature": {
"label": "Temperature",
diff --git a/public/locales/zh-CN/model.json b/public/locales/zh-CN/model.json
index 9677cb5..297c722 100644
--- a/public/locales/zh-CN/model.json
+++ b/public/locales/zh-CN/model.json
@@ -1,6 +1,10 @@
{
"configuration": "配置",
"model": "模型",
+ "token": {
+ "label": "最大 Token",
+ "description": "assistant 生成一条信息可以包含的最大 token 数。最大 token 数也受到模型的总长度限制,上文的 token 数和生成的 token 数之和不能超过模型的 token 总数(例如 gpt-3.5-turbo 的 token 总数是 4096)。"
+ },
"default": "默认",
"temperature": {
"label": "采样温度",
diff --git a/public/locales/zh-HK/model.json b/public/locales/zh-HK/model.json
index 7ecfa84..317a1ec 100644
--- a/public/locales/zh-HK/model.json
+++ b/public/locales/zh-HK/model.json
@@ -1,6 +1,10 @@
{
"configuration": "配置",
"model": "模型",
+ "token": {
+ "label": "最大 Token",
+ "description": "控制 assistant 一條 msg 最多可以 gen 幾多 token。最大 token 數仲受到模型總長度嘅限制,上文的 token 數同生成嘅 token 數加埋一齊唔可以超過模型嘅 token 總數(譬如 gpt-3.5-turbo 嘅 token 總數係 4096)。"
+ },
"default": "預設",
"temperature": {
"label": "採樣温度",
diff --git a/src/components/Chat/ChatContent/ChatTitle.tsx b/src/components/Chat/ChatContent/ChatTitle.tsx
index 4267b8a..3c972c8 100644
--- a/src/components/Chat/ChatContent/ChatTitle.tsx
+++ b/src/components/Chat/ChatContent/ChatTitle.tsx
@@ -51,6 +51,9 @@ const ChatTitle = React.memo(() => {
{t('model')}: {config.model}
+
+ {t('token.label')}: {config.max_tokens}
+
{t('temperature.label')}: {config.temperature}
diff --git a/src/components/ConfigMenu/ConfigMenu.tsx b/src/components/ConfigMenu/ConfigMenu.tsx
index ee115de..e77ca75 100644
--- a/src/components/ConfigMenu/ConfigMenu.tsx
+++ b/src/components/ConfigMenu/ConfigMenu.tsx
@@ -1,10 +1,10 @@
-import React, { useState } from 'react';
+import React, { useEffect, useRef, useState } from 'react';
import useStore from '@store/store';
import { useTranslation } from 'react-i18next';
import PopupModal from '@components/PopupModal';
import { ConfigInterface, ModelOptions } from '@type/chat';
import DownChevronArrow from '@icon/DownChevronArrow';
-import { modelOptions } from '@constants/chat';
+import { modelMaxToken, modelOptions } from '@constants/chat';
const ConfigMenu = ({
setIsModalOpen,
@@ -15,6 +15,7 @@ const ConfigMenu = ({
config: ConfigInterface;
setConfig: (config: ConfigInterface) => void;
}) => {
+ const [_maxToken, _setMaxToken] = useState(config.max_tokens);
const [_model, _setModel] = useState(config.model);
const [_temperature, _setTemperature] = useState(config.temperature);
const [_presencePenalty, _setPresencePenalty] = useState(
@@ -28,6 +29,7 @@ const ConfigMenu = ({
const handleConfirm = () => {
setConfig({
+ max_tokens: _maxToken,
model: _model,
temperature: _temperature,
presence_penalty: _presencePenalty,
@@ -45,7 +47,12 @@ const ConfigMenu = ({
>
-
+
+
@@ -177,4 +184,46 @@ const ModelSelector = ({
);
};
+const MaxTokenSlider = ({
+ _maxToken,
+ _setMaxToken,
+ _model,
+}: {
+ _maxToken: number;
+ _setMaxToken: React.Dispatch
>;
+ _model: ModelOptions;
+}) => {
+ const { t } = useTranslation('model');
+ const inputRef = useRef(null);
+
+ useEffect(() => {
+ inputRef &&
+ inputRef.current &&
+ _setMaxToken(Number(inputRef.current.value));
+ }, [_model]);
+
+ return (
+
+
+
{
+ _setMaxToken(Number(e.target.value));
+ }}
+ min={0}
+ max={modelMaxToken[_model]}
+ step={1}
+ className='w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer'
+ />
+
+ {t('token.description')}
+
+
+ );
+};
+
export default ConfigMenu;
diff --git a/src/constants/chat.ts b/src/constants/chat.ts
index d07b66f..35f2e5f 100644
--- a/src/constants/chat.ts
+++ b/src/constants/chat.ts
@@ -18,14 +18,26 @@ export const modelOptions: ModelOptions[] = [
// 'gpt-3.5-turbo-0301',
'gpt-4',
// 'gpt-4-0314',
- // 'gpt-4-32k',
+ 'gpt-4-32k',
// 'gpt-4-32k-0314',
];
export const defaultModel = 'gpt-3.5-turbo';
+export const modelMaxToken = {
+ 'gpt-3.5-turbo': 4096,
+ 'gpt-3.5-turbo-0301': 4096,
+ 'gpt-4': 8192,
+ 'gpt-4-0314': 8192,
+ 'gpt-4-32k': 32768,
+ 'gpt-4-32k-0314': 32768,
+};
+
+export const defaultUserMaxToken = 4000;
+
export const defaultChatConfig: ConfigInterface = {
model: defaultModel,
+ max_tokens: defaultUserMaxToken,
temperature: 1,
presence_penalty: 0,
top_p: 1,
diff --git a/src/store/migrate.ts b/src/store/migrate.ts
index 71adbc0..e2cb9f2 100644
--- a/src/store/migrate.ts
+++ b/src/store/migrate.ts
@@ -4,8 +4,13 @@ import {
LocalStorageInterfaceV2ToV3,
LocalStorageInterfaceV3ToV4,
LocalStorageInterfaceV4ToV5,
+ LocalStorageInterfaceV5ToV6,
} from '@type/chat';
-import { defaultChatConfig, defaultModel } from '@constants/chat';
+import {
+ defaultChatConfig,
+ defaultModel,
+ defaultUserMaxToken,
+} from '@constants/chat';
import { officialAPIEndpoint } from '@constants/auth';
import defaultPrompts from '@constants/prompt';
@@ -47,3 +52,12 @@ export const migrateV4 = (persistedState: LocalStorageInterfaceV4ToV5) => {
};
});
};
+
+export const migrateV5 = (persistedState: LocalStorageInterfaceV5ToV6) => {
+ persistedState.chats.forEach((chat) => {
+ chat.config = {
+ ...chat.config,
+ max_tokens: defaultUserMaxToken,
+ };
+ });
+};
diff --git a/src/store/store.ts b/src/store/store.ts
index 294a0d3..e7e90c6 100644
--- a/src/store/store.ts
+++ b/src/store/store.ts
@@ -11,6 +11,7 @@ import {
LocalStorageInterfaceV2ToV3,
LocalStorageInterfaceV3ToV4,
LocalStorageInterfaceV4ToV5,
+ LocalStorageInterfaceV5ToV6,
} from '@type/chat';
import {
migrateV0,
@@ -18,6 +19,7 @@ import {
migrateV2,
migrateV3,
migrateV4,
+ migrateV5,
} from './migrate';
export type StoreState = ChatSlice &
@@ -52,7 +54,7 @@ const useStore = create()(
autoTitle: state.autoTitle,
prompts: state.prompts,
}),
- version: 5,
+ version: 6,
migrate: (persistedState, version) => {
switch (version) {
case 0:
@@ -65,6 +67,8 @@ const useStore = create()(
migrateV3(persistedState as LocalStorageInterfaceV3ToV4);
case 4:
migrateV4(persistedState as LocalStorageInterfaceV4ToV5);
+ case 5:
+ migrateV5(persistedState as LocalStorageInterfaceV5ToV6);
break;
}
return persistedState as StoreState;
diff --git a/src/types/chat.ts b/src/types/chat.ts
index 99985d5..f7b161d 100644
--- a/src/types/chat.ts
+++ b/src/types/chat.ts
@@ -18,6 +18,7 @@ export interface ChatInterface {
export interface ConfigInterface {
model: ModelOptions;
+ max_tokens: number;
temperature: number;
presence_penalty: number;
top_p: number;
@@ -84,3 +85,15 @@ export interface LocalStorageInterfaceV4ToV5 {
autoTitle: boolean;
prompts: Prompt[];
}
+
+export interface LocalStorageInterfaceV5ToV6 {
+ chats: ChatInterface[];
+ currentChatIndex: number;
+ apiKey: string;
+ apiFree: boolean;
+ apiFreeEndpoint: string;
+ apiEndpoint?: string;
+ theme: Theme;
+ autoTitle: boolean;
+ prompts: Prompt[];
+}