mirror of
https://github.com/NovaOSS/nova-betterchat.git
synced 2024-11-25 21:34:00 +01:00
parent
209173ef5c
commit
84ae0ae8df
|
@ -15,7 +15,6 @@ export const getChatCompletion = async (
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
messages,
|
messages,
|
||||||
...config,
|
...config,
|
||||||
}),
|
}),
|
||||||
|
@ -41,16 +40,24 @@ export const getChatCompletionStream = async (
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
messages,
|
messages,
|
||||||
...config,
|
...config,
|
||||||
stream: true,
|
stream: true,
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
if (response.status === 404 || response.status === 405)
|
if (response.status === 404 || response.status === 405) {
|
||||||
throw new Error(
|
const text = await response.text();
|
||||||
'Message from freechatgpt.chat:\nInvalid API endpoint! We recommend you to check your free API endpoint.'
|
if (text.includes('model_not_found')) {
|
||||||
);
|
throw new Error(
|
||||||
|
text +
|
||||||
|
'\nMessage from freechatgpt.chat:\nPlease ensure that you have access to the GPT-4 API!'
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
'Message from freechatgpt.chat:\nInvalid API endpoint! We recommend you to check your free API endpoint.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (response.status === 429 || !response.ok) {
|
if (response.status === 429 || !response.ok) {
|
||||||
const text = await response.text();
|
const text = await response.text();
|
||||||
|
|
|
@ -33,7 +33,6 @@ export const getChatCompletion = async (
|
||||||
Authorization: `Bearer ${apiKey}`,
|
Authorization: `Bearer ${apiKey}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
messages,
|
messages,
|
||||||
...config,
|
...config,
|
||||||
}),
|
}),
|
||||||
|
@ -56,7 +55,6 @@ export const getChatCompletionStream = async (
|
||||||
Authorization: `Bearer ${apiKey}`,
|
Authorization: `Bearer ${apiKey}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
messages,
|
messages,
|
||||||
...config,
|
...config,
|
||||||
stream: true,
|
stream: true,
|
||||||
|
|
|
@ -11,7 +11,6 @@ export const getChatCompletion = async (
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
messages,
|
messages,
|
||||||
...config,
|
...config,
|
||||||
}),
|
}),
|
||||||
|
@ -33,7 +32,6 @@ export const getChatCompletionStream = async (
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
messages,
|
messages,
|
||||||
...config,
|
...config,
|
||||||
stream: true,
|
stream: true,
|
||||||
|
|
|
@ -49,7 +49,7 @@ const ChatTitle = React.memo(() => {
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
|
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
|
||||||
{t('model')}: {t('default')}
|
{t('model')}: {config.model}
|
||||||
</div>
|
</div>
|
||||||
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
|
<div className='text-center p-1 rounded-md bg-gray-300/20 dark:bg-gray-900/10 hover:bg-gray-300/50 dark:hover:bg-gray-900/50'>
|
||||||
{t('temperature.label')}: {config.temperature}
|
{t('temperature.label')}: {config.temperature}
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
|
import useStore from '@store/store';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import PopupModal from '@components/PopupModal';
|
import PopupModal from '@components/PopupModal';
|
||||||
import { ConfigInterface } from '@type/chat';
|
import { ConfigInterface, ModelOptions } from '@type/chat';
|
||||||
|
import DownChevronArrow from '@icon/DownChevronArrow';
|
||||||
|
import { modelOptions } from '@constants/chat';
|
||||||
|
|
||||||
const ConfigMenu = ({
|
const ConfigMenu = ({
|
||||||
setIsModalOpen,
|
setIsModalOpen,
|
||||||
|
@ -12,18 +15,24 @@ const ConfigMenu = ({
|
||||||
config: ConfigInterface;
|
config: ConfigInterface;
|
||||||
setConfig: (config: ConfigInterface) => void;
|
setConfig: (config: ConfigInterface) => void;
|
||||||
}) => {
|
}) => {
|
||||||
|
const [_model, _setModel] = useState<ModelOptions>(config.model);
|
||||||
const [_temperature, _setTemperature] = useState<number>(config.temperature);
|
const [_temperature, _setTemperature] = useState<number>(config.temperature);
|
||||||
const [_presencePenalty, _setPresencePenalty] = useState<number>(config.presence_penalty);
|
const [_presencePenalty, _setPresencePenalty] = useState<number>(
|
||||||
|
config.presence_penalty
|
||||||
|
);
|
||||||
const [_topP, _setTopP] = useState<number>(config.top_p);
|
const [_topP, _setTopP] = useState<number>(config.top_p);
|
||||||
const [_frequencyPenalty, _setFrequencyPenalty] = useState<number>(config.frequency_penalty);
|
const [_frequencyPenalty, _setFrequencyPenalty] = useState<number>(
|
||||||
|
config.frequency_penalty
|
||||||
|
);
|
||||||
const { t } = useTranslation('model');
|
const { t } = useTranslation('model');
|
||||||
|
|
||||||
const handleConfirm = () => {
|
const handleConfirm = () => {
|
||||||
setConfig({
|
setConfig({
|
||||||
|
model: _model,
|
||||||
temperature: _temperature,
|
temperature: _temperature,
|
||||||
presence_penalty: _presencePenalty,
|
presence_penalty: _presencePenalty,
|
||||||
top_p: _topP,
|
top_p: _topP,
|
||||||
frequency_penalty: _frequencyPenalty
|
frequency_penalty: _frequencyPenalty,
|
||||||
});
|
});
|
||||||
setIsModalOpen(false);
|
setIsModalOpen(false);
|
||||||
};
|
};
|
||||||
|
@ -35,6 +44,7 @@ const ConfigMenu = ({
|
||||||
handleConfirm={handleConfirm}
|
handleConfirm={handleConfirm}
|
||||||
>
|
>
|
||||||
<div className='p-6 border-b border-gray-200 dark:border-gray-600'>
|
<div className='p-6 border-b border-gray-200 dark:border-gray-600'>
|
||||||
|
<ModelSelector _model={_model} _setModel={_setModel} />
|
||||||
<div>
|
<div>
|
||||||
<label className='block text-sm font-medium text-gray-900 dark:text-white'>
|
<label className='block text-sm font-medium text-gray-900 dark:text-white'>
|
||||||
{t('temperature.label')}: {_temperature}
|
{t('temperature.label')}: {_temperature}
|
||||||
|
@ -120,4 +130,51 @@ const ConfigMenu = ({
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ModelSelector = ({
|
||||||
|
_model,
|
||||||
|
_setModel,
|
||||||
|
}: {
|
||||||
|
_model: ModelOptions;
|
||||||
|
_setModel: React.Dispatch<React.SetStateAction<ModelOptions>>;
|
||||||
|
}) => {
|
||||||
|
const [dropDown, setDropDown] = useState<boolean>(false);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className='mb-4'>
|
||||||
|
<button
|
||||||
|
className='btn btn-neutral btn-small flex gap-1'
|
||||||
|
type='button'
|
||||||
|
onClick={() => setDropDown((prev) => !prev)}
|
||||||
|
>
|
||||||
|
{_model}
|
||||||
|
<DownChevronArrow />
|
||||||
|
</button>
|
||||||
|
<div
|
||||||
|
id='dropdown'
|
||||||
|
className={`${
|
||||||
|
dropDown ? '' : 'hidden'
|
||||||
|
} absolute top-100 bottom-100 z-10 bg-white rounded-lg shadow-xl border-b border-black/10 dark:border-gray-900/50 text-gray-800 dark:text-gray-100 group dark:bg-gray-800 opacity-90`}
|
||||||
|
>
|
||||||
|
<ul
|
||||||
|
className='text-sm text-gray-700 dark:text-gray-200 p-0 m-0'
|
||||||
|
aria-labelledby='dropdownDefaultButton'
|
||||||
|
>
|
||||||
|
{modelOptions.map((m) => (
|
||||||
|
<li
|
||||||
|
className='px-4 py-2 hover:bg-gray-100 dark:hover:bg-gray-600 dark:hover:text-white cursor-pointer'
|
||||||
|
onClick={() => {
|
||||||
|
_setModel(m);
|
||||||
|
setDropDown(false);
|
||||||
|
}}
|
||||||
|
key={m}
|
||||||
|
>
|
||||||
|
{m}
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export default ConfigMenu;
|
export default ConfigMenu;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { ChatInterface, ConfigInterface } from '@type/chat';
|
import { ChatInterface, ConfigInterface, ModelOptions } from '@type/chat';
|
||||||
|
|
||||||
const date = new Date();
|
const date = new Date();
|
||||||
const dateString =
|
const dateString =
|
||||||
|
@ -13,11 +13,23 @@ export const defaultSystemMessage = `You are ChatGPT, a large language model tra
|
||||||
Knowledge cutoff: 2021-09
|
Knowledge cutoff: 2021-09
|
||||||
Current date: ${dateString}`;
|
Current date: ${dateString}`;
|
||||||
|
|
||||||
|
export const modelOptions: ModelOptions[] = [
|
||||||
|
'gpt-3.5-turbo',
|
||||||
|
// 'gpt-3.5-turbo-0301',
|
||||||
|
'gpt-4',
|
||||||
|
// 'gpt-4-0314',
|
||||||
|
// 'gpt-4-32k',
|
||||||
|
// 'gpt-4-32k-0314',
|
||||||
|
];
|
||||||
|
|
||||||
|
export const defaultModel = 'gpt-3.5-turbo';
|
||||||
|
|
||||||
export const defaultChatConfig: ConfigInterface = {
|
export const defaultChatConfig: ConfigInterface = {
|
||||||
|
model: defaultModel,
|
||||||
temperature: 1,
|
temperature: 1,
|
||||||
presence_penalty: 0,
|
presence_penalty: 0,
|
||||||
top_p: 1,
|
top_p: 1,
|
||||||
frequency_penalty: 0
|
frequency_penalty: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const generateDefaultChat = (title?: string): ChatInterface => ({
|
export const generateDefaultChat = (title?: string): ChatInterface => ({
|
||||||
|
|
|
@ -3,8 +3,9 @@ import {
|
||||||
LocalStorageInterfaceV1ToV2,
|
LocalStorageInterfaceV1ToV2,
|
||||||
LocalStorageInterfaceV2ToV3,
|
LocalStorageInterfaceV2ToV3,
|
||||||
LocalStorageInterfaceV3ToV4,
|
LocalStorageInterfaceV3ToV4,
|
||||||
|
LocalStorageInterfaceV4ToV5,
|
||||||
} from '@type/chat';
|
} from '@type/chat';
|
||||||
import { defaultChatConfig } from '@constants/chat';
|
import { defaultChatConfig, defaultModel } from '@constants/chat';
|
||||||
import { officialAPIEndpoint } from '@constants/auth';
|
import { officialAPIEndpoint } from '@constants/auth';
|
||||||
import defaultPrompts from '@constants/prompt';
|
import defaultPrompts from '@constants/prompt';
|
||||||
|
|
||||||
|
@ -37,3 +38,12 @@ export const migrateV2 = (persistedState: LocalStorageInterfaceV2ToV3) => {
|
||||||
export const migrateV3 = (persistedState: LocalStorageInterfaceV3ToV4) => {
|
export const migrateV3 = (persistedState: LocalStorageInterfaceV3ToV4) => {
|
||||||
persistedState.prompts = defaultPrompts;
|
persistedState.prompts = defaultPrompts;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const migrateV4 = (persistedState: LocalStorageInterfaceV4ToV5) => {
|
||||||
|
persistedState.chats.forEach((chat) => {
|
||||||
|
chat.config = {
|
||||||
|
...chat.config,
|
||||||
|
model: defaultModel,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
|
@ -10,8 +10,15 @@ import {
|
||||||
LocalStorageInterfaceV1ToV2,
|
LocalStorageInterfaceV1ToV2,
|
||||||
LocalStorageInterfaceV2ToV3,
|
LocalStorageInterfaceV2ToV3,
|
||||||
LocalStorageInterfaceV3ToV4,
|
LocalStorageInterfaceV3ToV4,
|
||||||
|
LocalStorageInterfaceV4ToV5,
|
||||||
} from '@type/chat';
|
} from '@type/chat';
|
||||||
import { migrateV0, migrateV1, migrateV2, migrateV3 } from './migrate';
|
import {
|
||||||
|
migrateV0,
|
||||||
|
migrateV1,
|
||||||
|
migrateV2,
|
||||||
|
migrateV3,
|
||||||
|
migrateV4,
|
||||||
|
} from './migrate';
|
||||||
|
|
||||||
export type StoreState = ChatSlice &
|
export type StoreState = ChatSlice &
|
||||||
InputSlice &
|
InputSlice &
|
||||||
|
@ -45,7 +52,7 @@ const useStore = create<StoreState>()(
|
||||||
autoTitle: state.autoTitle,
|
autoTitle: state.autoTitle,
|
||||||
prompts: state.prompts,
|
prompts: state.prompts,
|
||||||
}),
|
}),
|
||||||
version: 4,
|
version: 5,
|
||||||
migrate: (persistedState, version) => {
|
migrate: (persistedState, version) => {
|
||||||
switch (version) {
|
switch (version) {
|
||||||
case 0:
|
case 0:
|
||||||
|
@ -56,6 +63,8 @@ const useStore = create<StoreState>()(
|
||||||
migrateV2(persistedState as LocalStorageInterfaceV2ToV3);
|
migrateV2(persistedState as LocalStorageInterfaceV2ToV3);
|
||||||
case 3:
|
case 3:
|
||||||
migrateV3(persistedState as LocalStorageInterfaceV3ToV4);
|
migrateV3(persistedState as LocalStorageInterfaceV3ToV4);
|
||||||
|
case 4:
|
||||||
|
migrateV4(persistedState as LocalStorageInterfaceV4ToV5);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return persistedState as StoreState;
|
return persistedState as StoreState;
|
||||||
|
|
|
@ -17,12 +17,21 @@ export interface ChatInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ConfigInterface {
|
export interface ConfigInterface {
|
||||||
|
model: ModelOptions;
|
||||||
temperature: number;
|
temperature: number;
|
||||||
presence_penalty: number;
|
presence_penalty: number;
|
||||||
top_p: number;
|
top_p: number;
|
||||||
frequency_penalty: number;
|
frequency_penalty: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type ModelOptions =
|
||||||
|
| 'gpt-4'
|
||||||
|
| 'gpt-4-0314'
|
||||||
|
| 'gpt-4-32k'
|
||||||
|
| 'gpt-4-32k-0314'
|
||||||
|
| 'gpt-3.5-turbo'
|
||||||
|
| 'gpt-3.5-turbo-0301';
|
||||||
|
|
||||||
export interface LocalStorageInterfaceV0ToV1 {
|
export interface LocalStorageInterfaceV0ToV1 {
|
||||||
chats: ChatInterface[];
|
chats: ChatInterface[];
|
||||||
currentChatIndex: number;
|
currentChatIndex: number;
|
||||||
|
@ -63,3 +72,15 @@ export interface LocalStorageInterfaceV3ToV4 {
|
||||||
autoTitle: boolean;
|
autoTitle: boolean;
|
||||||
prompts: Prompt[];
|
prompts: Prompt[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface LocalStorageInterfaceV4ToV5 {
|
||||||
|
chats: ChatInterface[];
|
||||||
|
currentChatIndex: number;
|
||||||
|
apiKey: string;
|
||||||
|
apiFree: boolean;
|
||||||
|
apiFreeEndpoint: string;
|
||||||
|
apiEndpoint?: string;
|
||||||
|
theme: Theme;
|
||||||
|
autoTitle: boolean;
|
||||||
|
prompts: Prompt[];
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue