From 663130ff51ddd85a7461532e9a6fbcdee40cf656 Mon Sep 17 00:00:00 2001 From: sunanzhi Date: Thu, 2 Jan 2025 18:45:14 +0800 Subject: [PATCH] =?UTF-8?q?=E8=A1=A5=E5=85=85=E8=AE=B0=E5=BD=95ai=E4=BE=9B?= =?UTF-8?q?=E5=BA=94=E5=95=86=20ChatGLM3=20token=E4=BD=BF=E7=94=A8?= =?UTF-8?q?=E6=83=85=E5=86=B5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- drivers/ai-provider/chatglm/mode.go | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/drivers/ai-provider/chatglm/mode.go b/drivers/ai-provider/chatglm/mode.go index 5939def1..3d1debae 100644 --- a/drivers/ai-provider/chatglm/mode.go +++ b/drivers/ai-provider/chatglm/mode.go @@ -77,15 +77,28 @@ func (c *Chat) ResponseConvert(ctx eocontext.EoContext) error { if err != nil { return err } - if httpContext.Response().StatusCode() != 200 { - return nil - } body := httpContext.Response().GetBody() data := eosc.NewBase[Response]() err = json.Unmarshal(body, data) if err != nil { return err } + // 针对不同响应做出处理 + switch httpContext.Response().StatusCode() { + case 200: + // Calculate the token consumption for a successful request. + usage := data.Config.Usage + ai_provider.SetAIStatusNormal(ctx) + ai_provider.SetAIModelInputToken(ctx, usage.PromptTokens) + ai_provider.SetAIModelOutputToken(ctx, usage.CompletionTokens) + ai_provider.SetAIModelTotalToken(ctx, usage.TotalTokens) + case 400: + // Handle the bad request error. + ai_provider.SetAIStatusInvalidRequest(ctx) + case 401: + // Handle authentication failure + ai_provider.SetAIStatusInvalid(ctx) + } responseBody := &ai_provider.ClientResponse{} if len(data.Config.Choices) > 0 { msg := data.Config.Choices[0]