mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-06-09 07:07:16 -04:00
Fix llama.cpp loader not being random (thanks @reydeljuego12345)
This commit is contained in:
parent
49dfa0adaf
commit
c9a9f63d1b
1 changed files with 1 additions and 1 deletions
|
@ -136,7 +136,7 @@ class LlamaCppModel:
|
|||
prompt=prompt,
|
||||
max_tokens=state['max_new_tokens'],
|
||||
temperature=state['temperature'],
|
||||
top_p=state['top_p'],
|
||||
top_p=state['top_p'] if state['top_p'] < 1 else 0.999,
|
||||
min_p=state['min_p'],
|
||||
typical_p=state['typical_p'],
|
||||
frequency_penalty=state['frequency_penalty'],
|
||||
|
|
Loading…
Add table
Reference in a new issue