Revert "Safer usage of mkdir across the project"

This reverts commit 0d1597616f.
This commit is contained in:
oobabooga 2025-06-17 07:11:59 -07:00
parent 0d1597616f
commit aa44e542cb
16 changed files with 206 additions and 240 deletions

View file

@ -314,7 +314,7 @@ class ModelDownloader:
def download_model_files(self, model, branch, links, sha256, output_folder, progress_queue=None, start_from_scratch=False, threads=4, specific_file=None, is_llamacpp=False):
self.progress_queue = progress_queue
if not output_folder.exists():
output_folder.mkdir(parents=True, exist_ok=True)
if not is_llamacpp:

View file

@ -521,7 +521,6 @@ def backup_adapter(input_folder):
# Create the new subfolder
subfolder_path = Path(f"{input_folder}/{creation_date_str}")
if not subfolder_path.exists():
subfolder_path.mkdir(parents=True, exist_ok=True)
# Check if the file already exists in the subfolder
@ -1158,9 +1157,7 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch
decoded_entries.append({"value": decoded_text})
# Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True)
with open(Path('user_data/logs/train_dataset_sample.json'), 'w') as json_file:
json.dump(decoded_entries, json_file, indent=4)

View file

@ -194,9 +194,7 @@ def precise_cut(text: str, overlap: bool, min_chars_cut: int, eos_to_hc: bool, c
if debug_slicer:
# Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True)
sentencelist_dict = {index: sentence for index, sentence in enumerate(sentencelist)}
output_file = "user_data/logs/sentencelist.json"
with open(output_file, 'w') as f:
@ -283,9 +281,7 @@ def sliding_block_cut(text: str, min_chars_cut: int, eos_to_hc: bool, cutoff_len
if debug_slicer:
# Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True)
sentencelist_dict = {index: sentence for index, sentence in enumerate(sentencelist)}
output_file = "user_data/logs/sentencelist.json"
with open(output_file, 'w') as f:

View file

@ -168,7 +168,6 @@ def setup():
print("[XTTS] Loading XTTS...")
model = load_model()
print("[XTTS] Done!")
if not Path(f"{this_dir}/outputs").exists():
Path(f"{this_dir}/outputs").mkdir(parents=True, exist_ok=True)

View file

@ -168,7 +168,6 @@ def get_SD_pictures(description, character):
variadic = f'{date.today().strftime("%Y_%m_%d")}/{character}_{int(time.time())}'
output_file = Path(f'extensions/sd_api_pictures/outputs/{variadic}.png')
if not output_file.parent.exists():
output_file.parent.mkdir(parents=True, exist_ok=True)
with open(output_file.as_posix(), 'wb') as f:

View file

@ -973,8 +973,8 @@ def save_history(history, unique_id, character, mode):
return
p = get_history_file_path(unique_id, character, mode)
if not p.parent.exists():
p.parent.mkdir(parents=True, exist_ok=True)
if not p.parent.is_dir():
p.parent.mkdir(parents=True)
with open(p, 'w', encoding='utf-8') as f:
f.write(json.dumps(history, indent=4, ensure_ascii=False))
@ -1014,9 +1014,7 @@ def get_paths(state):
unique_id = datetime.now().strftime('%Y%m%d-%H-%M-%S')
p = get_history_file_path(unique_id, character, state['mode'])
logger.warning(f"Moving \"{new_p}\" to \"{p}\"")
if not p.parent.exists():
p.parent.mkdir(exist_ok=True)
new_p.rename(p)
return Path(f'user_data/logs/chat/{character}').glob('*.json')
@ -1165,9 +1163,7 @@ def save_last_chat_state(character, mode, unique_id):
state["last_chats"][key] = unique_id
state_file = Path('user_data/logs/chat_state.json')
if not state_file.parent.exists():
state_file.parent.mkdir(exist_ok=True)
with open(state_file, 'w', encoding='utf-8') as f:
f.write(json.dumps(state, indent=2))

View file

@ -27,9 +27,7 @@ def save_past_evaluations(df):
global past_evaluations
past_evaluations = df
filepath = Path('user_data/logs/evaluations.csv')
if not filepath.parent.exists():
filepath.parent.mkdir(parents=True, exist_ok=True)
df.to_csv(filepath, index=False)

View file

@ -9,9 +9,7 @@ def load_prompt(fname):
# Create new file
new_name = utils.current_time()
prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True)
initial_content = "In this story,"
prompt_path.write_text(initial_content, encoding='utf-8')

View file

@ -241,7 +241,6 @@ def backup_adapter(input_folder):
# Create the new subfolder
subfolder_path = Path(f"{input_folder}/{creation_date_str}")
if not subfolder_path.exists():
subfolder_path.mkdir(parents=True, exist_ok=True)
# Check if the file already exists in the subfolder
@ -677,9 +676,7 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en:
decoded_entries.append({"value": decoded_text})
# Write the log file
if not Path('user_data/logs').exists():
Path('user_data/logs').mkdir(exist_ok=True)
with open(Path('user_data/logs/train_dataset_sample.json'), 'w') as json_file:
json.dump(decoded_entries, json_file, indent=4)

View file

@ -409,9 +409,7 @@ def _perform_debounced_save():
if _last_interface_state is not None:
contents = save_settings(_last_interface_state, _last_preset, _last_extensions, _last_show_controls, _last_theme_state, manual_save=False)
settings_path = Path('user_data') / 'settings.yaml'
if not settings_path.parent.exists():
settings_path.parent.mkdir(exist_ok=True)
with open(settings_path, 'w', encoding='utf-8') as f:
f.write(contents)
except Exception as e:

View file

@ -152,9 +152,7 @@ def autosave_prompt(text, prompt_name):
"""Automatically save the text to the selected prompt file"""
if prompt_name and text.strip():
prompt_path = Path("user_data/logs/notebook") / f"{prompt_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True)
prompt_path.write_text(text, encoding='utf-8')
@ -207,9 +205,7 @@ def handle_new_prompt():
# Create the new prompt file
prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True)
prompt_path.write_text("In this story,", encoding='utf-8')
return gr.update(choices=utils.get_available_prompts(), value=new_name)
@ -226,9 +222,7 @@ def handle_delete_prompt_confirm_default(prompt_name):
new_value = available_prompts[min(current_index, len(available_prompts) - 1)]
else:
new_value = utils.current_time()
if not Path("user_data/logs/notebook").exists():
Path("user_data/logs/notebook").mkdir(parents=True, exist_ok=True)
(Path("user_data/logs/notebook") / f"{new_value}.txt").write_text("In this story,")
available_prompts = [new_value]

View file

@ -197,9 +197,7 @@ def handle_new_prompt():
# Create the new prompt file
prompt_path = Path("user_data/logs/notebook") / f"{new_name}.txt"
if not prompt_path.parent.exists():
prompt_path.parent.mkdir(parents=True, exist_ok=True)
prompt_path.write_text("In this story,", encoding='utf-8')
return gr.update(choices=utils.get_available_prompts(), value=new_name)
@ -216,9 +214,7 @@ def handle_delete_prompt_confirm_notebook(prompt_name):
new_value = available_prompts[min(current_index, len(available_prompts) - 1)]
else:
new_value = utils.current_time()
if not Path("user_data/logs/notebook").exists():
Path("user_data/logs/notebook").mkdir(parents=True, exist_ok=True)
(Path("user_data/logs/notebook") / f"{new_value}.txt").write_text("In this story,")
available_prompts = [new_value]

View file

@ -160,7 +160,6 @@ def get_available_presets():
def get_available_prompts():
notebook_dir = Path('user_data/logs/notebook')
if not notebook_dir.exists():
notebook_dir.mkdir(parents=True, exist_ok=True)
prompt_files = list(notebook_dir.glob('*.txt'))

View file

@ -10,7 +10,6 @@ from modules.logging_colors import logger
# Set up Gradio temp directory path
gradio_temp_path = Path('user_data') / 'cache' / 'gradio'
shutil.rmtree(gradio_temp_path, ignore_errors=True)
if not gradio_temp_path.exists():
gradio_temp_path.mkdir(parents=True, exist_ok=True)
# Set environment variables