Skip to content

Commit

Permalink
Merge commit 'e2f9bcb11d06216d6800676c48d8d74d6fd77a4b'
Browse files Browse the repository at this point in the history
# Conflicts:
#	fooocus_version.py
#	modules/meta_parser.py
  • Loading branch information
mashb1t committed Mar 23, 2024
2 parents b4a257b + e2f9bcb commit 8fff047
Show file tree
Hide file tree
Showing 6 changed files with 89 additions and 24 deletions.
2 changes: 1 addition & 1 deletion fooocus_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = '2.3.0 (mashb1t)'
version = '2.3.1 (mashb1t)'
8 changes: 4 additions & 4 deletions modules/async_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,12 +636,12 @@ def handler(async_task):

H, W, C = inpaint_image.shape
if 'left' in outpaint_selections:
inpaint_image = np.pad(inpaint_image, [[0, 0], [int(H * 0.3), 0], [0, 0]], mode='edge')
inpaint_mask = np.pad(inpaint_mask, [[0, 0], [int(H * 0.3), 0]], mode='constant',
inpaint_image = np.pad(inpaint_image, [[0, 0], [int(W * 0.3), 0], [0, 0]], mode='edge')
inpaint_mask = np.pad(inpaint_mask, [[0, 0], [int(W * 0.3), 0]], mode='constant',
constant_values=255)
if 'right' in outpaint_selections:
inpaint_image = np.pad(inpaint_image, [[0, 0], [0, int(H * 0.3)], [0, 0]], mode='edge')
inpaint_mask = np.pad(inpaint_mask, [[0, 0], [0, int(H * 0.3)]], mode='constant',
inpaint_image = np.pad(inpaint_image, [[0, 0], [0, int(W * 0.3)], [0, 0]], mode='edge')
inpaint_mask = np.pad(inpaint_mask, [[0, 0], [0, int(W * 0.3)]], mode='constant',
constant_values=255)

inpaint_image = np.ascontiguousarray(inpaint_image.copy())
Expand Down
2 changes: 2 additions & 0 deletions modules/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,6 +515,7 @@ def init_temp_path(path: str | None, default_path: str) -> str:
"default_scheduler": "scheduler",
"default_overwrite_step": "steps",
"default_performance": "performance",
"default_image_number": "image_number",
"default_prompt": "prompt",
"default_prompt_negative": "negative_prompt",
"default_styles": "styles",
Expand Down Expand Up @@ -568,6 +569,7 @@ def add_ratio(x):

sdxl_lcm_lora = 'sdxl_lcm_lora.safetensors'
sdxl_lightning_lora = 'sdxl_lightning_4step_lora.safetensors'
loras_metadata_remove = [sdxl_lcm_lora, sdxl_lightning_lora]


def get_model_filenames(folder_paths, extensions=None, name_filter=None):
Expand Down
56 changes: 42 additions & 14 deletions modules/meta_parser.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import json
import os
import re
from abc import ABC, abstractmethod
from pathlib import Path
Expand All @@ -12,7 +11,7 @@
import modules.sdxl_styles
from modules.flags import MetadataScheme, Performance, Steps
from modules.flags import SAMPLERS, CIVITAI_NO_KARRAS
from modules.util import quote, unquote, extract_styles_from_prompt, is_json, get_file_from_folder_list, calculate_sha256
from modules.util import quote, unquote, extract_styles_from_prompt, is_json, get_file_from_folder_list, sha256

re_param_code = r'\s*(\w[\w \-/]+):\s*("(?:\\.|[^\\"])+"|[^,]*)(?:,|$)'
re_param = re.compile(re_param_code)
Expand All @@ -27,8 +26,9 @@ def load_parameter_button_click(raw_metadata: dict | str, is_generating: bool):
loaded_parameter_dict = json.loads(raw_metadata)
assert isinstance(loaded_parameter_dict, dict)

results = [len(loaded_parameter_dict) > 0, 1]
results = [len(loaded_parameter_dict) > 0]

get_image_number('image_number', 'Image Number', loaded_parameter_dict, results)
get_str('prompt', 'Prompt', loaded_parameter_dict, results)
get_str('negative_prompt', 'Negative Prompt', loaded_parameter_dict, results)
get_list('styles', 'Styles', loaded_parameter_dict, results)
Expand Down Expand Up @@ -92,13 +92,25 @@ def get_float(key: str, fallback: str | None, source_dict: dict, results: list,
results.append(gr.update())


def get_image_number(key: str, fallback: str | None, source_dict: dict, results: list, default=None):
try:
h = source_dict.get(key, source_dict.get(fallback, default))
assert h is not None
h = int(h)
h = min(h, modules.config.default_max_image_number)
results.append(h)
except:
results.append(1)


def get_steps(key: str, fallback: str | None, source_dict: dict, results: list, default=None):
try:
h = source_dict.get(key, source_dict.get(fallback, default))
assert h is not None
h = int(h)
# if not in steps or in steps and performance is not the same
if h not in iter(Steps) or Steps(h).name.casefold() != source_dict.get('performance', '').replace(' ', '_').casefold():
if h not in iter(Steps) or Steps(h).name.casefold() != source_dict.get('performance', '').replace(' ',
'_').casefold():
results.append(h)
return
results.append(-1)
Expand Down Expand Up @@ -192,7 +204,8 @@ def get_lora(key: str, fallback: str | None, source_dict: dict, results: list):
def get_sha256(filepath):
global hash_cache
if filepath not in hash_cache:
hash_cache[filepath] = calculate_sha256(filepath)
# is_safetensors = os.path.splitext(filepath)[1].lower() == '.safetensors'
hash_cache[filepath] = sha256(filepath)

return hash_cache[filepath]

Expand Down Expand Up @@ -276,6 +289,12 @@ def set_data(self, raw_prompt, full_prompt, raw_negative_prompt, full_negative_p
lora_hash = get_sha256(lora_path)
self.loras.append((Path(lora_name).stem, lora_weight, lora_hash))

@staticmethod
def remove_special_loras(lora_filenames):
for lora_to_remove in modules.config.loras_metadata_remove:
if lora_to_remove in lora_filenames:
lora_filenames.remove(lora_to_remove)


class A1111MetadataParser(MetadataParser):
def get_scheme(self) -> MetadataScheme:
Expand Down Expand Up @@ -385,12 +404,19 @@ def parse_json(self, metadata: str) -> dict:
data[key] = filename
break

if 'lora_hashes' in data and data['lora_hashes'] != '':
lora_data = ''
if 'lora_weights' in data and data['lora_weights'] != '':
lora_data = data['lora_weights']
elif 'lora_hashes' in data and data['lora_hashes'] != '' and data['lora_hashes'].split(', ')[0].count(':') == 2:
lora_data = data['lora_hashes']

if lora_data != '':
lora_filenames = modules.config.lora_filenames.copy()
if modules.config.sdxl_lcm_lora in lora_filenames:
lora_filenames.remove(modules.config.sdxl_lcm_lora)
for li, lora in enumerate(data['lora_hashes'].split(', ')):
lora_name, lora_hash, lora_weight = lora.split(': ')
self.remove_special_loras(lora_filenames)
for li, lora in enumerate(lora_data.split(', ')):
lora_split = lora.split(': ')
lora_name = lora_split[0]
lora_weight = lora_split[2] if len(lora_split) == 3 else lora_split[1]
for filename in lora_filenames:
path = Path(filename)
if lora_name == path.stem:
Expand Down Expand Up @@ -441,11 +467,15 @@ def parse_string(self, metadata: dict) -> str:

if len(self.loras) > 0:
lora_hashes = []
lora_weights = []
for index, (lora_name, lora_weight, lora_hash) in enumerate(self.loras):
# workaround for Fooocus not knowing LoRA name in LoRA metadata
lora_hashes.append(f'{lora_name}: {lora_hash}: {lora_weight}')
lora_hashes.append(f'{lora_name}: {lora_hash}')
lora_weights.append(f'{lora_name}: {lora_weight}')
lora_hashes_string = ', '.join(lora_hashes)
lora_weights_string = ', '.join(lora_weights)
generation_params[self.fooocus_to_a1111['lora_hashes']] = lora_hashes_string
generation_params[self.fooocus_to_a1111['lora_weights']] = lora_weights_string

generation_params[self.fooocus_to_a1111['version']] = data['version']

Expand All @@ -468,9 +498,7 @@ def get_scheme(self) -> MetadataScheme:
def parse_json(self, metadata: dict) -> dict:
model_filenames = modules.config.model_filenames.copy()
lora_filenames = modules.config.lora_filenames.copy()
if modules.config.sdxl_lcm_lora in lora_filenames:
lora_filenames.remove(modules.config.sdxl_lcm_lora)

self.remove_special_loras(lora_filenames)
for key, value in metadata.items():
if value in ['', 'None']:
continue
Expand Down
38 changes: 33 additions & 5 deletions modules/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import os
import cv2
import json
import hashlib

from PIL import Image
from hashlib import sha256

import modules.sdxl_styles

Expand Down Expand Up @@ -182,16 +182,44 @@ def get_files_from_folder(folder_path, extensions=None, name_filter=None):
return filenames


def calculate_sha256(filename, length=HASH_SHA256_LENGTH) -> str:
hash_sha256 = sha256()
def sha256(filename, use_addnet_hash=False, length=HASH_SHA256_LENGTH):
print(f"Calculating sha256 for {filename}: ", end='')
if use_addnet_hash:
with open(filename, "rb") as file:
sha256_value = addnet_hash_safetensors(file)
else:
sha256_value = calculate_sha256(filename)
print(f"{sha256_value}")

return sha256_value[:length] if length is not None else sha256_value


def addnet_hash_safetensors(b):
"""kohya-ss hash for safetensors from https://github.com/kohya-ss/sd-scripts/blob/main/library/train_util.py"""
hash_sha256 = hashlib.sha256()
blksize = 1024 * 1024

b.seek(0)
header = b.read(8)
n = int.from_bytes(header, "little")

offset = n + 8
b.seek(offset)
for chunk in iter(lambda: b.read(blksize), b""):
hash_sha256.update(chunk)

return hash_sha256.hexdigest()


def calculate_sha256(filename) -> str:
hash_sha256 = hashlib.sha256()
blksize = 1024 * 1024

with open(filename, "rb") as f:
for chunk in iter(lambda: f.read(blksize), b""):
hash_sha256.update(chunk)

res = hash_sha256.hexdigest()
return res[:length] if length else res
return hash_sha256.hexdigest()


def quote(text):
Expand Down
7 changes: 7 additions & 0 deletions update_log.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# [2.3.1](https://github.com/lllyasviel/Fooocus/releases/tag/2.3.1)

* Remove positive prompt from anime prefix to not reset prompt after switching presets
* Fix image number being reset to 1 when switching preset, now doesn't reset anymore
* Fix outpainting dimension calculation when extending left/right
* Fix LoRA compatibility for LoRAs in a1111 metadata scheme

# [2.3.0](https://github.com/lllyasviel/Fooocus/releases/tag/2.3.0)

* Add performance "lightning" (based on [SDXL-Lightning 4 step LoRA](https://huggingface.co/ByteDance/SDXL-Lightning/blob/main/sdxl_lightning_4step_lora.safetensors))
Expand Down

0 comments on commit 8fff047

Please sign in to comment.