Safer usage of mkdir across the project

This commit is contained in:
oobabooga 2025-06-17 07:09:33 -07:00
parent 8689d7ecea
commit 0d1597616f
16 changed files with 240 additions and 206 deletions

View file

@ -314,7 +314,7 @@ class ModelDownloader:
def download_model_files(self, model, branch, links, sha256, output_folder, progress_queue=None, start_from_scratch=False, threads=4, specific_file=None, is_llamacpp=False): def download_model_files(self, model, branch, links, sha256, output_folder, progress_queue=None, start_from_scratch=False, threads=4, specific_file=None, is_llamacpp=False):
self.progress_queue = progress_queue self.progress_queue = progress_queue
if not output_folder.exists():
output_folder.mkdir(parents=True, exist_ok=True) output_folder.mkdir(parents=True, exist_ok=True)
if not is_llamacpp: if not is_llamacpp:

View file

@ -521,6 +521,7 @@ def backup_adapter(input_folder):
# Create the new subfolder # Create the new subfolder
subfolder_path = Path(f"{input_folder}/{creation_date_str}") subfolder_path = Path(f"{input_folder}/{creation_date_str}")
if not subfolder_path.exists():
subfolder_path.mkdir(parents=True, exist_ok=True) subfolder_path.mkdir(parents=True, exist_ok=True)
# Check if the file already exists in the subfolder # Check if the file already exists in the subfolder
@ -1157,7 +1158,9 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch
decoded_entries.append({"value": decoded_text}) decoded_entries.append({"value": decoded_text})
# Write the log file # Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True) Path('user_data/logs').mkdir(exist_ok=True)
with open(Path('user_data/logs/train_dataset_sample.json'), 'w') as json_file: with open(Path('user_data/logs/train_dataset_sample.json'), 'w') as json_file:
json.dump(decoded_entries, json_file, indent=4) json.dump(decoded_entries, json_file, indent=4)

View file

@ -194,7 +194,9 @@ def precise_cut(text: str, overlap: bool, min_chars_cut: int, eos_to_hc: bool, c
if debug_slicer: if debug_slicer:
# Write the log file # Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True) Path('user_data/logs').mkdir(exist_ok=True)
sentencelist_dict = {index: sentence for index, sentence in enumerate(sentencelist)} sentencelist_dict = {index: sentence for index, sentence in enumerate(sentencelist)}
output_file = "user_data/logs/sentencelist.json" output_file = "user_data/logs/sentencelist.json"
with open(output_file, 'w') as f: with open(output_file, 'w') as f:
@ -281,7 +283,9 @@ def sliding_block_cut(text: str, min_chars_cut: int, eos_to_hc: bool, cutoff_len
if debug_slicer: if debug_slicer:
# Write the log file # Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True) Path('user_data/logs').mkdir(exist_ok=True)
sentencelist_dict = {index: sentence for index, sentence in enumerate(sentencelist)} sentencelist_dict = {index: sentence for index, sentence in enumerate(sentencelist)}
output_file = "user_data/logs/sentencelist.json" output_file = "user_data/logs/sentencelist.json"
with open(output_file, 'w') as f: with open(output_file, 'w') as f:

View file

@ -168,6 +168,7 @@ def setup():
print("[XTTS] Loading XTTS...") print("[XTTS] Loading XTTS...")
model = load_model() model = load_model()
print("[XTTS] Done!") print("[XTTS] Done!")
if not Path(f"{this_dir}/outputs").exists():
Path(f"{this_dir}/outputs").mkdir(parents=True, exist_ok=True) Path(f"{this_dir}/outputs").mkdir(parents=True, exist_ok=True)

View file

@ -168,6 +168,7 @@ def get_SD_pictures(description, character):
variadic = f'{date.today().strftime("%Y_%m_%d")}/{character}_{int(time.time())}' variadic = f'{date.today().strftime("%Y_%m_%d")}/{character}_{int(time.time())}'
output_file = Path(f'extensions/sd_api_pictures/outputs/{variadic}.png') output_file = Path(f'extensions/sd_api_pictures/outputs/{variadic}.png')
if not output_file.parent.exists():
output_file.parent.mkdir(parents=True, exist_ok=True) output_file.parent.mkdir(parents=True, exist_ok=True)
with open(output_file.as_posix(), 'wb') as f: with open(output_file.as_posix(), 'wb') as f:

View file

@ -973,8 +973,8 @@ def save_history(history, unique_id, character, mode):
return return
p = get_history_file_path(unique_id, character, mode) p = get_history_file_path(unique_id, character, mode)
if not p.parent.is_dir(): if not p.parent.exists():
p.parent.mkdir(parents=True) p.parent.mkdir(parents=True, exist_ok=True)
with open(p, 'w', encoding='utf-8') as f: with open(p, 'w', encoding='utf-8') as f:
f.write(json.dumps(history, indent=4, ensure_ascii=False)) f.write(json.dumps(history, indent=4, ensure_ascii=False))
@ -1014,7 +1014,9 @@ def get_paths(state):
unique_id = datetime.now().strftime('%Y%m%d-%H-%M-%S') unique_id = datetime.now().strftime('%Y%m%d-%H-%M-%S')
p = get_history_file_path(unique_id, character, state['mode']) p = get_history_file_path(unique_id, character, state['mode'])
logger.warning(f"Moving \"{new_p}\" to \"{p}\"") logger.warning(f"Moving \"{new_p}\" to \"{p}\"")
if not p.parent.exists():
p.parent.mkdir(exist_ok=True) p.parent.mkdir(exist_ok=True)
new_p.rename(p) new_p.rename(p)
return Path(f'user_data/logs/chat/{character}').glob('*.json') return Path(f'user_data/logs/chat/{character}').glob('*.json')
@ -1163,7 +1165,9 @@ def save_last_chat_state(character, mode, unique_id):
state["last_chats"][key] = unique_id state["last_chats"][key] = unique_id
state_file = Path('user_data/logs/chat_state.json') state_file = Path('user_data/logs/chat_state.json')
if not state_file.parent.exists():
state_file.parent.mkdir(exist_ok=True) state_file.parent.mkdir(exist_ok=True)
with open(state_file, 'w', encoding='utf-8') as f: with open(state_file, 'w', encoding='utf-8') as f:
f.write(json.dumps(state, indent=2)) f.write(json.dumps(state, indent=2))

View file

@ -27,7 +27,9 @@ def save_past_evaluations(df):
global past_evaluations global past_evaluations
past_evaluations = df past_evaluations = df
filepath = Path('user_data/logs/evaluations.csv') filepath = Path('user_data/logs/evaluations.csv')
if not filepath.parent.exists():
filepath.parent.mkdir(parents=True, exist_ok=True) filepath.parent.mkdir(parents=True, exist_ok=True)
df.to_csv(filepath, index=False) df.to_csv(filepath, index=False)

View file

@ -9,7 +9,9 @@ def load_prompt(fname):
# Create new file # Create new file
new_name = utils.current_time() new_name = utils.current_time()
prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt" prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True) prompt_path.parent.mkdir(parents=True, exist_ok=True)
initial_content = "In this story," initial_content = "In this story,"
prompt_path.write_text(initial_content, encoding='utf-8') prompt_path.write_text(initial_content, encoding='utf-8')

View file

@ -241,6 +241,7 @@ def backup_adapter(input_folder):
# Create the new subfolder # Create the new subfolder
subfolder_path = Path(f"{input_folder}/{creation_date_str}") subfolder_path = Path(f"{input_folder}/{creation_date_str}")
if not subfolder_path.exists():
subfolder_path.mkdir(parents=True, exist_ok=True) subfolder_path.mkdir(parents=True, exist_ok=True)
# Check if the file already exists in the subfolder # Check if the file already exists in the subfolder
@ -676,7 +677,9 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en:
decoded_entries.append({"value": decoded_text}) decoded_entries.append({"value": decoded_text})
# Write the log file # Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True) Path('user_data/logs').mkdir(exist_ok=True)
with open(Path('user_data/logs/train_dataset_sample.json'), 'w') as json_file: with open(Path('user_data/logs/train_dataset_sample.json'), 'w') as json_file:
json.dump(decoded_entries, json_file, indent=4) json.dump(decoded_entries, json_file, indent=4)

View file

@ -409,7 +409,9 @@ def _perform_debounced_save():
if _last_interface_state is not None: if _last_interface_state is not None:
contents = save_settings(_last_interface_state, _last_preset, _last_extensions, _last_show_controls, _last_theme_state, manual_save=False) contents = save_settings(_last_interface_state, _last_preset, _last_extensions, _last_show_controls, _last_theme_state, manual_save=False)
settings_path = Path('user_data') / 'settings.yaml' settings_path = Path('user_data') / 'settings.yaml'
if not settings_path.parent.exists():
settings_path.parent.mkdir(exist_ok=True) settings_path.parent.mkdir(exist_ok=True)
with open(settings_path, 'w', encoding='utf-8') as f: with open(settings_path, 'w', encoding='utf-8') as f:
f.write(contents) f.write(contents)
except Exception as e: except Exception as e:

View file

@ -152,7 +152,9 @@ def autosave_prompt(text, prompt_name):
"""Automatically save the text to the selected prompt file""" """Automatically save the text to the selected prompt file"""
if prompt_name and text.strip(): if prompt_name and text.strip():
prompt_path = Path("user_data/logs/notebook") / f"{prompt_name}.txt" prompt_path = Path("user_data/logs/notebook") / f"{prompt_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True) prompt_path.parent.mkdir(parents=True, exist_ok=True)
prompt_path.write_text(text, encoding='utf-8') prompt_path.write_text(text, encoding='utf-8')
@ -205,7 +207,9 @@ def handle_new_prompt():
# Create the new prompt file # Create the new prompt file
prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt" prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True) prompt_path.parent.mkdir(parents=True, exist_ok=True)
prompt_path.write_text("In this story,", encoding='utf-8') prompt_path.write_text("In this story,", encoding='utf-8')
return gr.update(choices=utils.get_available_prompts(), value=new_name) return gr.update(choices=utils.get_available_prompts(), value=new_name)
@ -222,7 +226,9 @@ def handle_delete_prompt_confirm_default(prompt_name):
new_value = available_prompts[min(current_index, len(available_prompts) - 1)] new_value = available_prompts[min(current_index, len(available_prompts) - 1)]
else: else:
new_value = utils.current_time() new_value = utils.current_time()
if not Path("user_data/logs/notebook").exists():
Path("user_data/logs/notebook").mkdir(parents=True, exist_ok=True) Path("user_data/logs/notebook").mkdir(parents=True, exist_ok=True)
(Path("user_data/logs/notebook") / f"{new_value}.txt").write_text("In this story,") (Path("user_data/logs/notebook") / f"{new_value}.txt").write_text("In this story,")
available_prompts = [new_value] available_prompts = [new_value]

View file

@ -197,7 +197,9 @@ def handle_new_prompt():
# Create the new prompt file # Create the new prompt file
prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt" prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True) prompt_path.parent.mkdir(parents=True, exist_ok=True)
prompt_path.write_text("In this story,", encoding='utf-8') prompt_path.write_text("In this story,", encoding='utf-8')
return gr.update(choices=utils.get_available_prompts(), value=new_name) return gr.update(choices=utils.get_available_prompts(), value=new_name)
@ -214,7 +216,9 @@ def handle_delete_prompt_confirm_notebook(prompt_name):
new_value = available_prompts[min(current_index, len(available_prompts) - 1)] new_value = available_prompts[min(current_index, len(available_prompts) - 1)]
else: else:
new_value = utils.current_time() new_value = utils.current_time()
if not Path("user_data/logs/notebook").exists():
Path("user_data/logs/notebook").mkdir(parents=True, exist_ok=True) Path("user_data/logs/notebook").mkdir(parents=True, exist_ok=True)
(Path("user_data/logs/notebook") / f"{new_value}.txt").write_text("In this story,") (Path("user_data/logs/notebook") / f"{new_value}.txt").write_text("In this story,")
available_prompts = [new_value] available_prompts = [new_value]

View file

@ -160,6 +160,7 @@ def get_available_presets():
def get_available_prompts(): def get_available_prompts():
notebook_dir = Path('user_data/logs/notebook') notebook_dir = Path('user_data/logs/notebook')
if not notebook_dir.exists():
notebook_dir.mkdir(parents=True, exist_ok=True) notebook_dir.mkdir(parents=True, exist_ok=True)
prompt_files = list(notebook_dir.glob('*.txt')) prompt_files = list(notebook_dir.glob('*.txt'))

View file

@ -10,7 +10,8 @@ from modules.logging_colors import logger
# Set up Gradio temp directory path # Set up Gradio temp directory path
gradio_temp_path = Path('user_data') / 'cache' / 'gradio' gradio_temp_path = Path('user_data') / 'cache' / 'gradio'
shutil.rmtree(gradio_temp_path, ignore_errors=True) shutil.rmtree(gradio_temp_path, ignore_errors=True)
gradio_temp_path.mkdir(parents=True, exist_ok=True) if not gradio_temp_path.exists():
gradio_temp_path.mkdir(parents=True, exist_ok=True)
# Set environment variables # Set environment variables
os.environ.update({ os.environ.update({