reduce semantic corruption

This commit is contained in:
lvmin 2023-09-10 10:50:01 -07:00
parent 43c443d80e
commit 8a6cfbbf15
3 changed files with 12 additions and 3 deletions

View File

@ -1 +1 @@
version = '1.0.51'
version = '1.0.52'

View File

@ -3,6 +3,10 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline, set_seed
from modules.path import fooocus_expansion_path
def safe_str(x):
return str(x).rstrip(",. \r\n")
class FooocusExpansion:
def __init__(self):
self.tokenizer = AutoTokenizer.from_pretrained(fooocus_expansion_path)
@ -15,9 +19,10 @@ class FooocusExpansion:
print('Fooocus Expansion engine loaded.')
def __call__(self, prompt, seed):
prompt = str(prompt).rstrip('\n')
prompt = safe_str(prompt) + '. ' # reduce semantic corruption.
seed = int(seed)
set_seed(seed)
response = self.pipe(prompt, max_length=len(prompt) + 256)
result = response[0]['generated_text'].rstrip('\n')
result = response[0]['generated_text']
result = safe_str(result)
return result

View File

@ -1,3 +1,7 @@
### 1.0.52
* Reduce the semantic corruption of Prompt Expansion.
### 1.0.51
* Speed up Prompt Expansion a bit.