添加环境变量 TOKEN_DELIMITER 支持自定义分隔符

This commit is contained in:
wisdgod
2025-01-18 03:50:38 +08:00
parent 19609d4809
commit 742c2e1c5c
4 changed files with 33 additions and 28 deletions

View File

@@ -47,3 +47,6 @@ REVERSE_PROXY_HOST=
# 请求体大小限制单位为MB
# 默认为2MB (2,097,152 字节)
REQUEST_BODY_LIMIT_MB=2
# OpenAI 请求时token 和 checksum 的分隔符
TOKEN_DELIMITER=,

View File

@@ -3,7 +3,7 @@ use crate::{
CURSOR_API2_HOST, CURSOR_HOST, DEFAULT_TOKEN_FILE_NAME, DEFAULT_TOKEN_LIST_FILE_NAME,
EMPTY_STRING,
},
common::utils::parse_string_from_env,
common::utils::{parse_char_from_env, parse_string_from_env},
};
use std::sync::LazyLock;
@@ -49,12 +49,17 @@ pub fn get_start_time() -> chrono::DateTime<chrono::Local> {
def_pub_static!(DEFAULT_INSTRUCTIONS, env: "DEFAULT_INSTRUCTIONS", default: "Respond in Chinese by default");
def_pub_static!(REVERSE_PROXY_HOST, env: "REVERSE_PROXY_HOST", default: "");
def_pub_static!(REVERSE_PROXY_HOST, env: "REVERSE_PROXY_HOST", default: EMPTY_STRING);
def_pub_static!(SHARED_AUTH_TOKEN, env: "SHARED_AUTH_TOKEN", default: EMPTY_STRING);
pub static USE_SHARE: LazyLock<bool> = LazyLock::new(|| !SHARED_AUTH_TOKEN.is_empty());
pub static TOKEN_DELIMITER: LazyLock<char> =
LazyLock::new(|| parse_char_from_env("TOKEN_DELIMITER", ','));
pub static TOKEN_DELIMITER_LEN: LazyLock<usize> = LazyLock::new(|| TOKEN_DELIMITER.len_utf8());
pub static USE_PROXY: LazyLock<bool> = LazyLock::new(|| !REVERSE_PROXY_HOST.is_empty());
pub static CURSOR_API2_CHAT_URL: LazyLock<String> = LazyLock::new(|| {

View File

@@ -243,27 +243,6 @@ pub async fn handle_chat(
};
// 构建请求客户端
// let client_key = match generate_client_key(&checksum) {
// Some(key) => key,
// None => {
// let mut state = state.lock().await;
// if let Some(log) = state
// .request_logs
// .iter_mut()
// .rev()
// .find(|log| log.id == current_id)
// {
// log.status = STATUS_FAILED;
// log.error = Some(ERR_CHECKSUM_NO_GOOD.to_string());
// }
// state.active_requests -= 1;
// state.error_requests += 1;
// return Err((
// StatusCode::BAD_REQUEST,
// Json(ChatError::RequestFailed(ERR_CHECKSUM_NO_GOOD.to_string()).to_json()),
// ));
// }
// };
let client = build_client(&auth_token, &checksum);
let response = client.body(hex_data).send().await;

View File

@@ -4,7 +4,10 @@ mod tokens;
pub use tokens::*;
use super::models::userinfo::{StripeProfile, TokenProfile, UsageProfile, UserProfile};
use crate::app::constant::{FALSE, TRUE};
use crate::app::{
constant::{FALSE, TRUE},
lazy::{TOKEN_DELIMITER, TOKEN_DELIMITER_LEN},
};
pub fn parse_bool_from_env(key: &str, default: bool) -> bool {
std::env::var(key)
@@ -21,6 +24,20 @@ pub fn parse_string_from_env(key: &str, default: &str) -> String {
std::env::var(key).unwrap_or_else(|_| default.to_string())
}
pub fn parse_char_from_env(key: &str, default: char) -> char {
std::env::var(key)
.ok()
.and_then(|v| {
let chars: Vec<char> = v.chars().collect();
if chars.len() == 1 {
Some(chars[0])
} else {
None
}
})
.unwrap_or(default)
}
pub fn parse_usize_from_env(key: &str, default: usize) -> usize {
std::env::var(key)
.ok()
@@ -86,9 +103,9 @@ pub async fn get_user_profile(auth_token: &str) -> Option<UserProfile> {
pub fn validate_token_and_checksum(auth_token: &str) -> Option<(String, String)> {
// 找最后一个逗号
let comma_pos = auth_token.rfind(',')?;
let comma_pos = auth_token.rfind(*TOKEN_DELIMITER)?;
let (token_part, checksum) = auth_token.split_at(comma_pos);
let checksum = &checksum[1..]; // 跳过逗号
let checksum = &checksum[*TOKEN_DELIMITER_LEN..]; // 跳过逗号
// 解析 token - 为了向前兼容,忽略最后一个:或%3A前的内容
let colon_pos = token_part.rfind(':');
@@ -115,11 +132,12 @@ pub fn validate_token_and_checksum(auth_token: &str) -> Option<(String, String)>
pub fn extract_token(auth_token: &str) -> Option<String> {
// 解析 token
let token_part = match auth_token.rfind(',') {
let token_part = match auth_token.rfind(*TOKEN_DELIMITER) {
Some(pos) => &auth_token[..pos],
None => auth_token
None => auth_token,
};
// 向前兼容
let colon_pos = token_part.rfind(':');
let encoded_colon_pos = token_part.rfind("%3A");