server : fix cache_tokens bug with no cache_prompt (#13533)
This commit is contained in:
parent
09d13d94fb
commit
360a9c98e1
3 changed files with 25 additions and 11 deletions
|
@ -196,6 +196,18 @@ def test_cache_vs_nocache_prompt():
|
|||
assert res_cache.body["content"] == res_no_cache.body["content"]
|
||||
|
||||
|
||||
def test_nocache_long_input_prompt():
|
||||
global server
|
||||
server.start()
|
||||
res = server.make_request("POST", "/completion", data={
|
||||
"prompt": "I believe the meaning of life is"*32,
|
||||
"seed": 42,
|
||||
"temperature": 1.0,
|
||||
"cache_prompt": False,
|
||||
})
|
||||
assert res.status_code == 200
|
||||
|
||||
|
||||
def test_completion_with_tokens_input():
|
||||
global server
|
||||
server.temperature = 0.0
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue