补充记录ai供应商 ChatGLM3 token使用情况

This commit is contained in:
sunanzhi
2025-01-02 18:45:14 +08:00
parent e07e6927c8
commit 663130ff51

View File

@@ -77,15 +77,28 @@ func (c *Chat) ResponseConvert(ctx eocontext.EoContext) error {
if err != nil {
return err
}
if httpContext.Response().StatusCode() != 200 {
return nil
}
body := httpContext.Response().GetBody()
data := eosc.NewBase[Response]()
err = json.Unmarshal(body, data)
if err != nil {
return err
}
// 针对不同响应做出处理
switch httpContext.Response().StatusCode() {
case 200:
// Calculate the token consumption for a successful request.
usage := data.Config.Usage
ai_provider.SetAIStatusNormal(ctx)
ai_provider.SetAIModelInputToken(ctx, usage.PromptTokens)
ai_provider.SetAIModelOutputToken(ctx, usage.CompletionTokens)
ai_provider.SetAIModelTotalToken(ctx, usage.TotalTokens)
case 400:
// Handle the bad request error.
ai_provider.SetAIStatusInvalidRequest(ctx)
case 401:
// Handle authentication failure
ai_provider.SetAIStatusInvalid(ctx)
}
responseBody := &ai_provider.ClientResponse{}
if len(data.Config.Choices) > 0 {
msg := data.Config.Choices[0]