Skip to content

Commit

Permalink
Merge pull request #2354 from bmaltais/dev
Browse files Browse the repository at this point in the history
v24.0.5
  • Loading branch information
bmaltais authored Apr 20, 2024
2 parents 6c69b89 + 5a80164 commit 5cec4c6
Show file tree
Hide file tree
Showing 18 changed files with 143 additions and 70 deletions.
2 changes: 1 addition & 1 deletion .release
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v24.0.4
v24.0.5
25 changes: 22 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ The GUI allows you to set the training parameters and generate and run the requi
- [SDXL training](#sdxl-training)
- [Masked loss](#masked-loss)
- [Change History](#change-history)
- [2024/04/25 (v24.0.4)](#20240425-v2404)
- [2024/04/19 (v24.0.5)](#20240419-v2405)
- [2024/04/18 (v24.0.4)](#20240418-v2404)
- [What's Changed](#whats-changed)
- [New Contributors](#new-contributors)
- [2024/04/24 (v24.0.3)](#20240424-v2403)
- [2024/04/24 (v24.0.2)](#20240424-v2402)
- [2024/04/17 (v24.0.1)](#20240417-v2401)
Expand Down Expand Up @@ -409,9 +412,25 @@ ControlNet dataset is used to specify the mask. The mask images should be the RG
## Change History
### 2024/04/25 (v24.0.4)
### 2024/04/19 (v24.0.5)
- ...
- fdds
### 2024/04/18 (v24.0.4)
#### What's Changed
- Fix options.md heading by @bmaltais in <https://github.com/bmaltais/kohya_ss/pull/2337>
- Use correct file extensions when browsing for model file by @b-fission in <https://github.com/bmaltais/kohya_ss/pull/2323>
- Add argument for Gradio's `root_path` to enable reverse proxy support by @hlky in <https://github.com/bmaltais/kohya_ss/pull/2333>
- 2325 quotes wrapping python path cause subprocess cant find target in v2403 by @bmaltais in <https://github.com/bmaltais/kohya_ss/pull/2338>
- 2330 another seemingly new data validation leads to unusable configs 2403 by @bmaltais in <https://github.com/bmaltais/kohya_ss/pull/2339>
- Fix bad Lora parameters by @bmaltais in <https://github.com/bmaltais/kohya_ss/pull/2341>
#### New Contributors
- @b-fission made their first contribution in <https://github.com/bmaltais/kohya_ss/pull/2323>
- @hlky made their first contribution in <https://github.com/bmaltais/kohya_ss/pull/2333>
### 2024/04/24 (v24.0.3)
Expand Down
2 changes: 1 addition & 1 deletion kohya_gui/blip2_caption_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def generate_caption(
output_file_path = os.path.splitext(file_path)[0] + caption_file_ext

# Write the generated text to the output file
with open(output_file_path, "w") as output_file:
with open(output_file_path, "w", encoding="utf-8") as output_file:
output_file.write(generated_text)

# Log the image file path with a message about the fact that the caption was generated
Expand Down
9 changes: 5 additions & 4 deletions kohya_gui/class_advanced_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,10 +510,11 @@ def list_state_dirs(path):
value=self.config.get("advanced.max_data_loader_n_workers", 0),
)
with gr.Row():
self.use_wandb = gr.Checkbox(
label="WANDB Logging",
value=self.config.get("advanced.use_wandb", False),
info="If unchecked, tensorboard will be used as the default for logging.",
self.log_with = gr.Dropdown(
label="Logging",
choices=["","wandb", "tensorboard","all"],
value="",
info="Loggers to use, tensorboard will be used as the default.",
)
self.wandb_api_key = gr.Textbox(
label="WANDB API Key",
Expand Down
4 changes: 2 additions & 2 deletions kohya_gui/class_basic_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,15 +98,15 @@ def init_training_controls(self) -> None:
step=1,
# precision=0,
minimum=0,
value=self.config.get("basic.max_train_epochs", 1600),
value=self.config.get("basic.max_train_epochs", 0),
)
# Initialize the maximum train steps input
self.max_train_steps = gr.Number(
label="Max train steps",
info="Overrides # training steps. 0 = no override",
step=1,
# precision=0,
value=self.config.get("basic.max_train_steps", 0),
value=self.config.get("basic.max_train_steps", 1600),
)
# Initialize the save every N epochs input
self.save_every_n_epochs = gr.Number(
Expand Down
2 changes: 1 addition & 1 deletion kohya_gui/class_gui_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def save_config(self, config: dict, config_file_path: str = "./config.toml"):
- config (dict): The configuration data to save.
"""
# Write the configuration data to the TOML file
with open(f"{config_file_path}", "w") as f:
with open(f"{config_file_path}", "w", encoding="utf-8") as f:
toml.dump(config, f)

def get(self, key: str, default=None):
Expand Down
2 changes: 1 addition & 1 deletion kohya_gui/class_sample_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def create_prompt_file(sample_prompts, output_dir):
"""
sample_prompts_path = os.path.join(output_dir, "prompt.txt")

with open(sample_prompts_path, "w") as f:
with open(sample_prompts_path, "w", encoding="utf-8") as f:
f.write(sample_prompts)

return sample_prompts_path
Expand Down
13 changes: 12 additions & 1 deletion kohya_gui/class_tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,20 @@ def stop_tensorboard(self):
return self.get_button_states(started=False)

def gradio_interface(self):
try:
os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'

import tensorflow # Attempt to import tensorflow to check if it is installed

visibility = True

except ImportError:
self.log.error("tensorflow is not installed, hiding the tensorboard button...")
visibility = False

with gr.Row():
button_start_tensorboard = gr.Button(
value="Start tensorboard", elem_id="myTensorButton"
value="Start tensorboard", elem_id="myTensorButton", visible=visibility
)
button_stop_tensorboard = gr.Button(
value="Stop tensorboard",
Expand Down
43 changes: 33 additions & 10 deletions kohya_gui/common_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,6 @@ def update_my_data(my_data):
"lr_warmup",
"max_data_loader_n_workers",
"max_train_epochs",
"max_train_steps",
"save_every_n_epochs",
"seed",
]:
Expand All @@ -352,6 +351,17 @@ def update_my_data(my_data):
except ValueError:
# Handle the case where the string is not a valid float
my_data[key] = int(1)

for key in [
"max_train_steps",
]:
value = my_data.get(key)
if value is not None:
try:
my_data[key] = int(value)
except ValueError:
# Handle the case where the string is not a valid float
my_data[key] = int(1600)

# Convert values to int if they are strings
for key in ["max_token_length"]:
Expand Down Expand Up @@ -406,7 +416,20 @@ def update_my_data(my_data):
my_data["xformers"] = "xformers"
else:
my_data["xformers"] = "none"


# Convert use_wandb to log_with="wandb" if it is set to True
for key in ["use_wandb"]:
value = my_data.get(key)
if value is not None:
try:
if value == "True":
my_data["log_with"] = "wandb"
except ValueError:
# Handle the case where the string is not a valid float
pass

my_data.pop(key, None)

return my_data


Expand Down Expand Up @@ -743,13 +766,13 @@ def add_pre_postfix(
# Check if the caption file does not exist
if not os.path.exists(caption_file_path):
# Create a new caption file with the specified prefix and/or postfix
with open(caption_file_path, "w", encoding="utf8") as f:
with open(caption_file_path, "w", encoding="utf-8") as f:
# Determine the separator based on whether both prefix and postfix are provided
separator = " " if prefix and postfix else ""
f.write(f"{prefix}{separator}{postfix}")
else:
# Open the existing caption file for reading and writing
with open(caption_file_path, "r+", encoding="utf8") as f:
with open(caption_file_path, "r+", encoding="utf-8") as f:
# Read the content of the caption file, stripping any trailing whitespace
content = f.read().rstrip()
# Move the file pointer to the beginning of the file
Expand Down Expand Up @@ -850,11 +873,11 @@ def find_replace(
file_path = os.path.join(folder_path, caption_file)
# Read and replace text
try:
with open(file_path, "r", errors="ignore") as f:
with open(file_path, "r", errors="ignore", encoding="utf-8") as f:
content = f.read().replace(search_text, replace_text)

# Write the updated content back to the file
with open(file_path, "w") as f:
with open(file_path, "w", encoding="utf-8") as f:
f.write(content)
except Exception as e:
log.error(f"Error processing file {file_path}: {e}")
Expand Down Expand Up @@ -1218,7 +1241,7 @@ def SaveConfigFile(
log.info(f"Creating folder {folder_path} for the configuration file...")

# Save the data to the specified JSON file
with open(file_path, "w") as file:
with open(file_path, "w", encoding="utf-8") as file:
json.dump(variables, file, indent=2)


Expand All @@ -1242,7 +1265,7 @@ def save_to_file(content):

# Append content to the specified file
try:
with open(file_path, "a") as file:
with open(file_path, "a", encoding="utf-8") as file:
file.write(content + "\n")
except IOError as e:
print(f"Error: Could not write to file - {e}")
Expand Down Expand Up @@ -1443,7 +1466,7 @@ def is_file_writable(file_path: str) -> bool:

try:
# Attempt to open the file in append mode to check if it can be written to
with open(file_path, "a"):
with open(file_path, "a", encoding="utf-8"):
pass
# If the file can be opened, it is considered writable
return True
Expand All @@ -1463,7 +1486,7 @@ def print_command_and_toml(run_cmd, tmpfilename):

log.info(f"Showing toml config file: {tmpfilename}")
print("")
with open(tmpfilename, "r") as toml_file:
with open(tmpfilename, "r", encoding="utf-8") as toml_file:
log.info(toml_file.read())
log.info(f"end of toml config file: {tmpfilename}")

Expand Down
14 changes: 7 additions & 7 deletions kohya_gui/dreambooth_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def save_configuration(
save_every_n_steps,
save_last_n_steps,
save_last_n_steps_state,
use_wandb,
log_with,
wandb_api_key,
wandb_run_name,
log_tracker_name,
Expand Down Expand Up @@ -317,7 +317,7 @@ def open_configuration(
save_every_n_steps,
save_last_n_steps,
save_last_n_steps_state,
use_wandb,
log_with,
wandb_api_key,
wandb_run_name,
log_tracker_name,
Expand Down Expand Up @@ -350,7 +350,7 @@ def open_configuration(

if not file_path == "" and not file_path == None:
# load variables from JSON file
with open(file_path, "r") as f:
with open(file_path, "r", encoding="utf-8") as f:
my_data = json.load(f)
log.info("Loading config...")
# Update values to fix deprecated use_8bit_adam checkbox and set appropriate optimizer if it is set to True
Expand Down Expand Up @@ -470,7 +470,7 @@ def train_model(
save_every_n_steps,
save_last_n_steps,
save_last_n_steps_state,
use_wandb,
log_with,
wandb_api_key,
wandb_run_name,
log_tracker_name,
Expand Down Expand Up @@ -783,7 +783,7 @@ def train_model(
),
"train_batch_size": train_batch_size,
"train_data_dir": train_data_dir,
"use_wandb": use_wandb,
"log_with": log_with,
"v2": v2,
"v_parameterization": v_parameterization,
"v_pred_like_loss": v_pred_like_loss if v_pred_like_loss != 0 else None,
Expand All @@ -810,7 +810,7 @@ def train_model(

tmpfilename = "./outputs/tmpfiledbooth.toml"
# Save the updated TOML data back to the file
with open(tmpfilename, "w") as toml_file:
with open(tmpfilename, "w", encoding="utf-8") as toml_file:
toml.dump(config_toml_data, toml_file)

if not os.path.exists(toml_file.name):
Expand Down Expand Up @@ -1056,7 +1056,7 @@ def dreambooth_tab(
advanced_training.save_every_n_steps,
advanced_training.save_last_n_steps,
advanced_training.save_last_n_steps_state,
advanced_training.use_wandb,
advanced_training.log_with,
advanced_training.wandb_api_key,
advanced_training.wandb_run_name,
advanced_training.log_tracker_name,
Expand Down
16 changes: 8 additions & 8 deletions kohya_gui/finetune_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def save_configuration(
save_every_n_steps,
save_last_n_steps,
save_last_n_steps_state,
use_wandb,
log_with,
wandb_api_key,
wandb_run_name,
log_tracker_name,
Expand Down Expand Up @@ -335,7 +335,7 @@ def open_configuration(
save_every_n_steps,
save_last_n_steps,
save_last_n_steps_state,
use_wandb,
log_with,
wandb_api_key,
wandb_run_name,
log_tracker_name,
Expand Down Expand Up @@ -383,7 +383,7 @@ def open_configuration(

if not file_path == "" and not file_path == None:
# load variables from JSON file
with open(file_path, "r") as f:
with open(file_path, "r", encoding="utf-8") as f:
my_data = json.load(f)
log.info("Loading config...")
# Update values to fix deprecated use_8bit_adam checkbox and set appropriate optimizer if it is set to True
Expand Down Expand Up @@ -507,7 +507,7 @@ def train_model(
save_every_n_steps,
save_last_n_steps,
save_last_n_steps_state,
use_wandb,
log_with,
wandb_api_key,
wandb_run_name,
log_tracker_name,
Expand Down Expand Up @@ -856,7 +856,7 @@ def train_model(
"train_batch_size": train_batch_size,
"train_data_dir": image_folder,
"train_text_encoder": train_text_encoder,
"use_wandb": use_wandb,
"log_with": log_with,
"v2": v2,
"v_parameterization": v_parameterization,
"v_pred_like_loss": v_pred_like_loss if v_pred_like_loss != 0 else None,
Expand All @@ -882,7 +882,7 @@ def train_model(

tmpfilename = "./outputs/tmpfilefinetune.toml"
# Save the updated TOML data back to the file
with open(tmpfilename, "w") as toml_file:
with open(tmpfilename, "w", encoding="utf-8") as toml_file:
toml.dump(config_toml_data, toml_file)

if not os.path.exists(toml_file.name):
Expand Down Expand Up @@ -1209,7 +1209,7 @@ def list_presets(path):
advanced_training.save_every_n_steps,
advanced_training.save_last_n_steps,
advanced_training.save_last_n_steps_state,
advanced_training.use_wandb,
advanced_training.log_with,
advanced_training.wandb_api_key,
advanced_training.wandb_run_name,
advanced_training.log_tracker_name,
Expand Down Expand Up @@ -1322,6 +1322,6 @@ def list_presets(path):
gr.Markdown("This section provide Various Finetuning guides and information...")
top_level_path = rf'"{scriptdir}/docs/Finetuning/top_level.md"'
if os.path.exists(top_level_path):
with open(os.path.join(top_level_path), "r", encoding="utf8") as file:
with open(os.path.join(top_level_path), "r", encoding="utf-8") as file:
guides_top_level = file.read() + "\n"
gr.Markdown(guides_top_level)
2 changes: 1 addition & 1 deletion kohya_gui/localization.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def load_language_js(language_name: str) -> str:
data = {}
if fn is not None:
try:
with open(fn, "r", encoding="utf8") as file:
with open(fn, "r", encoding="utf-8") as file:
data = json.load(file)
except Exception:
logging.ERROR(f"Error loading localization from {fn}")
Expand Down
Loading

0 comments on commit 5cec4c6

Please sign in to comment.