fix do_sample bug

This commit is contained in:
lvmin 2023-09-13 18:55:51 -07:00
parent ac1050dd24
commit 0f58038d85
2 changed files with 6 additions and 2 deletions

View File

@ -1 +1 @@
version = '2.0.10'
version = '2.0.11'

View File

@ -52,7 +52,11 @@ class FooocusExpansion:
# https://huggingface.co/blog/introducing-csearch
# https://huggingface.co/docs/transformers/generation_strategies
features = self.model.generate(**tokenized_kwargs, penalty_alpha=0.8, top_k=8, max_new_tokens=256)
features = self.model.generate(**tokenized_kwargs,
penalty_alpha=0.6,
top_k=4,
max_new_tokens=256,
do_sample=True)
response = self.tokenizer.batch_decode(features, skip_special_tokens=True)
result = response[0][len(origin):]