feat: change type of hash_cache file to txt, append to file if single line is provided

This commit is contained in:
Manuel Schmid 2024-07-01 17:21:30 +02:00
parent 33b1c5cb87
commit 1fa7eeac84
No known key found for this signature in database
GPG Key ID: 32C4F7569B40B84B
2 changed files with 28 additions and 14 deletions

2
.gitignore vendored
View File

@ -10,7 +10,7 @@ __pycache__
*.partial
*.onnx
sorted_styles.json
hash_cache.json
hash_cache.txt
/input
/cache
/language/default.json

View File

@ -1,15 +1,18 @@
from modules.util import sha256, HASH_SHA256_LENGTH
import os
import json
hash_cache_filename = 'hash_cache.json'
import os
from modules.util import sha256, HASH_SHA256_LENGTH
hash_cache_filename = 'hash_cache.txt'
hash_cache = {}
def sha256_from_cache(filepath):
global hash_cache
if filepath not in hash_cache:
hash_cache[filepath] = sha256(filepath)
save_cache_to_file()
hash_value = sha256(filepath)
hash_cache[filepath] = hash_value
save_cache_to_file(filepath, hash_value)
return hash_cache[filepath]
@ -20,22 +23,33 @@ def load_cache_from_file():
try:
if os.path.exists(hash_cache_filename):
with open(hash_cache_filename, 'rt', encoding='utf-8') as fp:
for filepath, hash in json.load(fp).items():
if not os.path.exists(filepath) or not isinstance(hash, str) and len(hash) != HASH_SHA256_LENGTH:
print(f'[Cache] Skipping invalid cache entry: {filepath}')
continue
hash_cache[filepath] = hash
for line in fp:
entry = json.loads(line)
for filepath, hash_value in entry.items():
if not os.path.exists(filepath) or not isinstance(hash_value, str) and len(hash_value) != HASH_SHA256_LENGTH:
print(f'[Cache] Skipping invalid cache entry: {filepath}')
continue
hash_cache[filepath] = hash_value
print(f'[Cache] Warmed cache from file')
except Exception as e:
print(f'[Cache] Warming failed: {e}')
def save_cache_to_file():
def save_cache_to_file(filename=None, hash_value=None):
global hash_cache
if filename is not None and hash_value is not None:
items = [(filename, hash_value)]
mode = 'at'
else:
items = sorted(hash_cache.items())
mode = 'wt'
try:
with open(hash_cache_filename, 'wt', encoding='utf-8') as fp:
json.dump(hash_cache, fp, indent=4)
with open(hash_cache_filename, mode, encoding='utf-8') as fp:
for filepath, hash_value in items:
json.dump({filepath: hash_value}, fp)
fp.write('\n')
print(f'[Cache] Updated cache file')
except Exception as e:
print(f'[Cache] Saving failed: {e}')