Handle the OpenAI API error

This commit is contained in:
Kevin Yue
2023-03-13 01:03:14 +08:00
parent 8450b8beef
commit 86d0b24a8d
5 changed files with 779 additions and 7 deletions

View File

@@ -58,6 +58,10 @@ $ docker run -d -p 3000:3000 --env-file .env -v $(pwd)/config/app.config.json:/a
You can configure the app by copying the `config/example.json` to `config/app.config.json`
and editing its values. Note: comments are not allowed in JSON files.
## Error messages
Error messages for the OpenAI API can be customized by editing the `config/app.config.json` file. See the examples in the `config/example.json` file.
## Credits
Thanks: [transitive-bullshit/chatgpt-api](https://github.com/transitive-bullshit/chatgpt-api)

View File

@@ -4,6 +4,20 @@
"maxTokens": 4000,
// Currently, only `gpt-3.5-turbo` and `gpt-3.5-turbo-0301` are supported.
// default: `gpt-3.5-turbo`
"model": "gpt-3.5-turbo"
"model": "gpt-3.5-turbo",
"errorMapping1": [
{
"keyword": "insufficient_quota",
"message": "The API key has insufficient quota."
},
{
"keyword": "Rate limit reached for",
"message": "The API key has reached its rate limit."
},
{
"keyword": "context_length_exceeded",
"message": "The context length exceeds the maximum allowed length."
}
]
}
}

File diff suppressed because one or more lines are too long

View File

@@ -5,7 +5,7 @@ import fetch from './fetch.js';
import { ConfigService } from '@nestjs/config';
import ProxyAgent from 'proxy-agent-v2';
import { Observable } from 'rxjs';
import { OpenAiConfig } from './config/configuration.types.js';
import { ErrorMapping, OpenAiConfig } from './config/configuration.types.js';
@Injectable()
export class ChatGPTService implements OnModuleInit {
@@ -13,8 +13,11 @@ export class ChatGPTService implements OnModuleInit {
private api: ChatGPTAPI;
private proxyAgent: unknown;
private errorMapping: ErrorMapping[];
constructor(private readonly configService: ConfigService) {}
constructor(private readonly configService: ConfigService) {
this.errorMapping = configService.get('openai.errorMapping') || [];
}
onModuleInit() {
const { HTTP_PROXY } = process.env;
@@ -23,7 +26,8 @@ export class ChatGPTService implements OnModuleInit {
}
const openaiConfig: OpenAiConfig = this.configService.get('openai') || {};
const { systemMessage, maxTokens, model = 'gpt-3.5-turbo' } = openaiConfig;
const { systemMessage, maxTokens, model = 'gpt-3.5-turbo', errorMapping } = openaiConfig;
this.errorMapping = errorMapping || [];
this.api = new ChatGPTAPI({
apiKey: process.env.OPENAI_API_KEY,
@@ -62,7 +66,7 @@ export class ChatGPTService implements OnModuleInit {
type: 'add',
data: {
error: {
message: err.message || 'Unknown error',
message: this.buildErrorMessage(err),
},
},
});
@@ -101,4 +105,10 @@ export class ChatGPTService implements OnModuleInit {
error: null,
} as ConversationResponseEvent;
}
private buildErrorMessage(err: { message?: string }): string {
const { message } = err;
const errorMapping = this.errorMapping.find((item) => message?.includes(item.keyword));
return errorMapping?.message || message || 'Unknown error';
}
}

View File

@@ -1,8 +1,15 @@
export type ErrorMapping = {
keyword: string;
message: string;
};
export type OpenAiConfig = {
systemMessage?: string;
maxTokens?: number;
model?: string;
}
errorMapping?: ErrorMapping[];
};
export type AppConfig = {
openai?: OpenAiConfig;
};