[CI] add output for last_token in test_streaming_with_stop_str (#5170)
Some checks failed
CE Compile Job / ce_job_pre_check (push) Has been cancelled
CE Compile Job / print_ce_job_pre_check_outputs (push) Has been cancelled
CE Compile Job / FD-Clone-Linux (push) Has been cancelled
CE Compile Job / Show Code Archive Output (push) Has been cancelled
CE Compile Job / BUILD_SM8090 (push) Has been cancelled
CE Compile Job / BUILD_SM8689 (push) Has been cancelled
CE Compile Job / CE_UPLOAD (push) Has been cancelled
Deploy GitHub Pages / deploy (push) Has been cancelled

This commit is contained in:
YuBaoku
2025-11-24 10:49:17 +08:00
committed by GitHub
parent b9e76f1a7a
commit 98f1ab46a9
2 changed files with 4 additions and 4 deletions

View File

@@ -385,14 +385,14 @@ def test_streaming_with_stop_str(openai_client):
messages=[{"role": "user", "content": "Hello, how are you?"}],
temperature=1,
max_tokens=5,
extra_body={"include_stop_str_in_output": True},
extra_body={"min_tokens": 1, "include_stop_str_in_output": True},
stream=True,
)
# Assertions to check the response structure
last_token = ""
for chunk in response:
last_token = chunk.choices[0].delta.content
assert last_token.endswith("</s>")
assert last_token.endswith("</s>"), f"last_token did not end with '</s>': {last_token!r}"
response = openai_client.chat.completions.create(
model="default",

View File

@@ -539,14 +539,14 @@ def test_streaming_with_stop_str(openai_client):
messages=[{"role": "user", "content": "Hello, how are you?"}],
temperature=1,
max_tokens=5,
extra_body={"include_stop_str_in_output": True},
extra_body={"min_tokens": 1, "include_stop_str_in_output": True},
stream=True,
)
# Assertions to check the response structure
last_token = ""
for chunk in response:
last_token = chunk.choices[0].delta.content
assert last_token.endswith("</s>")
assert last_token.endswith("</s>"), f"last_token did not end with '</s>': {last_token!r}"
response = openai_client.chat.completions.create(
model="default",