mirror of
https://github.com/NovaOSS/nova-betterchat.git
synced 2024-11-29 10:14:00 +01:00
error handling
This commit is contained in:
parent
5f673691b1
commit
b9a8f05176
|
@ -25,45 +25,41 @@ export const getChatCompletion = async (
|
|||
apiKey: string,
|
||||
messages: MessageInterface[]
|
||||
) => {
|
||||
try {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
}),
|
||||
});
|
||||
const data = await response.json();
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
}),
|
||||
});
|
||||
if (!response.ok) throw new Error(await response.text());
|
||||
|
||||
const data = await response.json();
|
||||
return data;
|
||||
};
|
||||
|
||||
export const getChatCompletionStream = async (
|
||||
apiKey: string,
|
||||
messages: MessageInterface[]
|
||||
) => {
|
||||
try {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
stream: true,
|
||||
}),
|
||||
});
|
||||
const stream = response.body;
|
||||
return stream;
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
stream: true,
|
||||
}),
|
||||
});
|
||||
if (!response.ok) throw new Error(await response.text());
|
||||
|
||||
const stream = response.body;
|
||||
return stream;
|
||||
};
|
||||
|
|
|
@ -3,40 +3,36 @@ import { MessageInterface } from '@type/chat';
|
|||
export const endpoint = 'https://chatgpt-api.shn.hk/v1/';
|
||||
|
||||
export const getChatCompletion = async (messages: MessageInterface[]) => {
|
||||
try {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
}),
|
||||
});
|
||||
const data = await response.json();
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
}),
|
||||
});
|
||||
if (!response.ok) throw new Error(await response.text());
|
||||
|
||||
const data = await response.json();
|
||||
return data;
|
||||
};
|
||||
|
||||
export const getChatCompletionStream = async (messages: MessageInterface[]) => {
|
||||
try {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
stream: true,
|
||||
}),
|
||||
});
|
||||
const stream = response.body;
|
||||
return stream;
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages,
|
||||
stream: true,
|
||||
}),
|
||||
});
|
||||
if (!response.ok) throw new Error(await response.text());
|
||||
|
||||
const stream = response.body;
|
||||
return stream;
|
||||
};
|
||||
|
|
|
@ -39,8 +39,8 @@ const ChatContent = () => {
|
|||
<Message role={inputRole} content='' messageIndex={-1} sticky />
|
||||
|
||||
{error !== '' && (
|
||||
<div className='bg-red-600/50 p-2 rounded-sm w-3/5 mt-3 text-gray-900 dark:text-gray-300 text-sm'>
|
||||
Invalid API key!
|
||||
<div className='bg-red-600/50 p-2 rounded-sm w-3/5 mt-3 text-gray-900 dark:text-gray-300 text-sm break-words'>
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useState } from 'react';
|
||||
import React from 'react';
|
||||
import useStore from '@store/store';
|
||||
import { MessageInterface } from '@type/chat';
|
||||
import { getChatCompletionStream as getChatCompletionStreamFree } from '@api/freeApi';
|
||||
|
@ -6,16 +6,23 @@ import { getChatCompletionStream as getChatCompletionStreamCustom } from '@api/c
|
|||
import { parseEventSource } from '@api/helper';
|
||||
|
||||
const useSubmit = () => {
|
||||
const [error, setError] = useState<string>('');
|
||||
const [apiFree, apiKey, setMessages, setGenerating, generating] = useStore(
|
||||
(state) => [
|
||||
state.apiFree,
|
||||
state.apiKey,
|
||||
state.setMessages,
|
||||
state.setGenerating,
|
||||
state.generating,
|
||||
]
|
||||
);
|
||||
const [
|
||||
error,
|
||||
setError,
|
||||
apiFree,
|
||||
apiKey,
|
||||
setMessages,
|
||||
setGenerating,
|
||||
generating,
|
||||
] = useStore((state) => [
|
||||
state.error,
|
||||
state.setError,
|
||||
state.apiFree,
|
||||
state.apiKey,
|
||||
state.setMessages,
|
||||
state.setGenerating,
|
||||
state.generating,
|
||||
]);
|
||||
|
||||
const handleSubmit = async () => {
|
||||
if (generating) return;
|
||||
|
@ -71,8 +78,8 @@ const useSubmit = () => {
|
|||
console.log(err);
|
||||
setError(err);
|
||||
setTimeout(() => {
|
||||
setError(''), 10000;
|
||||
});
|
||||
setError('');
|
||||
}, 10000);
|
||||
}
|
||||
setGenerating(false);
|
||||
};
|
||||
|
|
|
@ -6,16 +6,19 @@ export interface ChatSlice {
|
|||
chats?: ChatInterface[];
|
||||
currentChatIndex: number;
|
||||
generating: boolean;
|
||||
error: string;
|
||||
setMessages: (messages: MessageInterface[]) => void;
|
||||
setChats: (chats: ChatInterface[]) => void;
|
||||
setCurrentChatIndex: (currentChatIndex: number) => void;
|
||||
setGenerating: (generating: boolean) => void;
|
||||
setError: (error: string) => void;
|
||||
}
|
||||
|
||||
export const createChatSlice: StoreSlice<ChatSlice> = (set, get) => ({
|
||||
messages: [],
|
||||
currentChatIndex: -1,
|
||||
generating: false,
|
||||
error: '',
|
||||
setMessages: (messages: MessageInterface[]) => {
|
||||
set((prev: ChatSlice) => ({
|
||||
...prev,
|
||||
|
@ -40,4 +43,10 @@ export const createChatSlice: StoreSlice<ChatSlice> = (set, get) => ({
|
|||
generating: generating,
|
||||
}));
|
||||
},
|
||||
setError: (error: string) => {
|
||||
set((prev: ChatSlice) => ({
|
||||
...prev,
|
||||
error: error,
|
||||
}));
|
||||
},
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue