Skip to content

Commit

Permalink
add max lora config feature
Browse files Browse the repository at this point in the history
  • Loading branch information
MindOfMatter committed Jan 25, 2024
1 parent f7af74a commit cfe7463
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 12 deletions.
2 changes: 1 addition & 1 deletion modules/async_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def handler(async_task):
base_model_name = args.pop()
refiner_model_name = args.pop()
refiner_switch = args.pop()
loras = [[str(args.pop()), float(args.pop())] for _ in range(5)]
loras = [[str(args.pop()), float(args.pop())] for _ in range(modules.config.default_loras_max_number)]
input_image_checkbox = args.pop()
current_tab = args.pop()
uov_method = args.pop()
Expand Down
8 changes: 7 additions & 1 deletion modules/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,11 @@ def get_config_item_or_set_default(key, default_value, validator, disable_empty_
],
validator=lambda x: isinstance(x, list) and all(len(y) == 2 and isinstance(y[0], str) and isinstance(y[1], numbers.Number) for y in x)
)
default_loras_max_number = get_config_item_or_set_default(
key='default_loras_max_number',
default_value=len(default_loras),
validator=lambda x: isinstance(x, numbers.Number) and x >= 1
)
default_cfg_scale = get_config_item_or_set_default(
key='default_cfg_scale',
default_value=4.0,
Expand Down Expand Up @@ -318,13 +323,14 @@ def get_config_item_or_set_default(key, default_value, validator, disable_empty_

example_inpaint_prompts = [[x] for x in example_inpaint_prompts]

config_dict["default_loras"] = default_loras = default_loras[:5] + [['None', 1.0] for _ in range(5 - len(default_loras))]
config_dict["default_loras"] = default_loras = default_loras[:default_loras_max_number] + [['None', 1.0] for _ in range(default_loras_max_number - len(default_loras))]

possible_preset_keys = [
"default_model",
"default_refiner",
"default_refiner_switch",
"default_loras",
"default_loras_max_number",
"default_cfg_scale",
"default_sample_sharpness",
"default_sampler",
Expand Down
24 changes: 15 additions & 9 deletions modules/meta_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,14 +135,20 @@ def load_parameter_button_click(raw_prompt_txt, is_generating):

results.append(gr.update(visible=False))

for i in range(1, 6):
try:
n, w = loaded_parameter_dict.get(f'LoRA {i}').split(' : ')
w = float(w)
results.append(n)
results.append(w)
except:
results.append(gr.update())
results.append(gr.update())
for i in range(1, modules.config.default_loras_max_number + 1):
lora_key = f'LoRA {i}'
if lora_key in loaded_parameter_dict:
try:
n, w = loaded_parameter_dict[lora_key].split(' : ')
w = float(w)
results.append(n) # Update LoRA model
results.append(w) # Update LoRA weight
except Exception as e:
# If there's an error parsing, log it or handle it as needed
print(f"Error parsing {lora_key}: {e}")
results.extend([gr.update(), gr.update()]) # Keep existing settings unchanged
else:
# If the LoRA setting is not in the JSON, keep the existing settings unchanged
results.extend([gr.update(), gr.update()])

return results
2 changes: 1 addition & 1 deletion webui.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ def model_refresh_clicked():
modules.config.update_all_model_names()
results = []
results += [gr.update(choices=modules.config.model_filenames), gr.update(choices=['None'] + modules.config.model_filenames)]
for i in range(5):
for i in range(modules.config.default_loras_max_number):
results += [gr.update(choices=['None'] + modules.config.lora_filenames), gr.update()]
return results

Expand Down

0 comments on commit cfe7463

Please sign in to comment.