diff --git a/modules/models_settings.py b/modules/models_settings.py index 283a9744..15985608 100644 --- a/modules/models_settings.py +++ b/modules/models_settings.py @@ -9,6 +9,7 @@ import gradio as gr import yaml from modules import chat, loaders, metadata_gguf, shared, ui +from modules.logging_colors import logger def get_fallback_settings(): @@ -56,7 +57,13 @@ def get_model_metadata(model): if path.is_file(): model_file = path else: - model_file = list(path.glob('*.gguf'))[0] + gguf_files = list(path.glob('*.gguf')) + if not gguf_files: + error_msg = f"No .gguf files found in directory: {path}" + logger.error(error_msg) + raise FileNotFoundError(error_msg) + + model_file = gguf_files[0] metadata = load_gguf_metadata_with_cache(model_file) @@ -171,6 +178,8 @@ def infer_loader(model_name, model_settings, hf_quant_method=None): path_to_model = Path(f'{shared.args.model_dir}/{model_name}') if not path_to_model.exists(): loader = None + elif shared.args.portable: + loader = 'llama.cpp' elif len(list(path_to_model.glob('*.gguf'))) > 0: loader = 'llama.cpp' elif re.match(r'.*\.gguf', model_name.lower()): diff --git a/modules/ui_model_menu.py b/modules/ui_model_menu.py index 9e982f0e..a15bba2b 100644 --- a/modules/ui_model_menu.py +++ b/modules/ui_model_menu.py @@ -174,7 +174,12 @@ def create_event_handlers(): def load_model_wrapper(selected_model, loader, autoload=False): - settings = get_model_metadata(selected_model) + try: + settings = get_model_metadata(selected_model) + except FileNotFoundError: + exc = traceback.format_exc() + yield exc.replace('\n', '\n\n') + return if not autoload: yield "### {}\n\n- Settings updated: Click \"Load\" to load the model\n- Max sequence length: {}".format(selected_model, settings['truncation_length_info'])