diff --git a/src/api/api.ts b/src/api/api.ts
index 43322e4..ecc14ca 100644
--- a/src/api/api.ts
+++ b/src/api/api.ts
@@ -15,7 +15,6 @@ export const getChatCompletion = async (
method: 'POST',
headers,
body: JSON.stringify({
- model: 'gpt-3.5-turbo',
messages,
...config,
}),
@@ -41,16 +40,24 @@ export const getChatCompletionStream = async (
method: 'POST',
headers,
body: JSON.stringify({
- model: 'gpt-3.5-turbo',
messages,
...config,
stream: true,
}),
});
- if (response.status === 404 || response.status === 405)
- throw new Error(
- 'Message from freechatgpt.chat:\nInvalid API endpoint! We recommend you to check your free API endpoint.'
- );
+ if (response.status === 404 || response.status === 405) {
+ const text = await response.text();
+ if (text.includes('model_not_found')) {
+ throw new Error(
+ text +
+ '\nMessage from freechatgpt.chat:\nPlease ensure that you have access to the GPT-4 API!'
+ );
+ } else {
+ throw new Error(
+ 'Message from freechatgpt.chat:\nInvalid API endpoint! We recommend you to check your free API endpoint.'
+ );
+ }
+ }
if (response.status === 429 || !response.ok) {
const text = await response.text();
diff --git a/src/api/customApi.ts b/src/api/customApi.ts
index 96a6cde..65fcccf 100644
--- a/src/api/customApi.ts
+++ b/src/api/customApi.ts
@@ -33,7 +33,6 @@ export const getChatCompletion = async (
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
- model: 'gpt-3.5-turbo',
messages,
...config,
}),
@@ -56,7 +55,6 @@ export const getChatCompletionStream = async (
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
- model: 'gpt-3.5-turbo',
messages,
...config,
stream: true,
diff --git a/src/api/freeApi.ts b/src/api/freeApi.ts
index 25e6e7a..ac8a754 100644
--- a/src/api/freeApi.ts
+++ b/src/api/freeApi.ts
@@ -11,7 +11,6 @@ export const getChatCompletion = async (
'Content-Type': 'application/json',
},
body: JSON.stringify({
- model: 'gpt-3.5-turbo',
messages,
...config,
}),
@@ -33,7 +32,6 @@ export const getChatCompletionStream = async (
'Content-Type': 'application/json',
},
body: JSON.stringify({
- model: 'gpt-3.5-turbo',
messages,
...config,
stream: true,
diff --git a/src/components/Chat/ChatContent/ChatTitle.tsx b/src/components/Chat/ChatContent/ChatTitle.tsx
index 2cdb3fa..4267b8a 100644
--- a/src/components/Chat/ChatContent/ChatTitle.tsx
+++ b/src/components/Chat/ChatContent/ChatTitle.tsx
@@ -49,7 +49,7 @@ const ChatTitle = React.memo(() => {
}}
>
- {t('model')}: {t('default')}
+ {t('model')}: {config.model}
{t('temperature.label')}: {config.temperature}
diff --git a/src/components/ConfigMenu/ConfigMenu.tsx b/src/components/ConfigMenu/ConfigMenu.tsx
index a12c600..ee115de 100644
--- a/src/components/ConfigMenu/ConfigMenu.tsx
+++ b/src/components/ConfigMenu/ConfigMenu.tsx
@@ -1,7 +1,10 @@
import React, { useState } from 'react';
+import useStore from '@store/store';
import { useTranslation } from 'react-i18next';
import PopupModal from '@components/PopupModal';
-import { ConfigInterface } from '@type/chat';
+import { ConfigInterface, ModelOptions } from '@type/chat';
+import DownChevronArrow from '@icon/DownChevronArrow';
+import { modelOptions } from '@constants/chat';
const ConfigMenu = ({
setIsModalOpen,
@@ -12,18 +15,24 @@ const ConfigMenu = ({
config: ConfigInterface;
setConfig: (config: ConfigInterface) => void;
}) => {
+ const [_model, _setModel] = useState
(config.model);
const [_temperature, _setTemperature] = useState(config.temperature);
- const [_presencePenalty, _setPresencePenalty] = useState(config.presence_penalty);
+ const [_presencePenalty, _setPresencePenalty] = useState(
+ config.presence_penalty
+ );
const [_topP, _setTopP] = useState(config.top_p);
- const [_frequencyPenalty, _setFrequencyPenalty] = useState(config.frequency_penalty);
+ const [_frequencyPenalty, _setFrequencyPenalty] = useState(
+ config.frequency_penalty
+ );
const { t } = useTranslation('model');
const handleConfirm = () => {
setConfig({
+ model: _model,
temperature: _temperature,
presence_penalty: _presencePenalty,
top_p: _topP,
- frequency_penalty: _frequencyPenalty
+ frequency_penalty: _frequencyPenalty,
});
setIsModalOpen(false);
};
@@ -35,6 +44,7 @@ const ConfigMenu = ({
handleConfirm={handleConfirm}
>
+