fix: replace bare except clauses with except Exception (#7400)

This commit is contained in:
Sense_wang 2026-03-05 05:06:17 +08:00 committed by GitHub
parent 1d1f4dfc88
commit 7bf15ad933
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 21 additions and 21 deletions

View file

@ -37,7 +37,7 @@ class Iteratorize:
ret = self.mfunc(callback=_callback, *args, **self.kwargs)
except StopNowException:
pass
except:
except Exception:
traceback.print_exc()
pass

View file

@ -1300,7 +1300,7 @@ def load_last_chat_state():
try:
with open(state_file, 'r', encoding='utf-8') as f:
return json.loads(f.read())
except:
except Exception:
pass
return {"last_chats": {}}
@ -1372,7 +1372,7 @@ def load_history_json(file, history):
update_message_metadata(history['metadata'], "assistant", i, timestamp="")
return history
except:
except Exception:
return history
@ -1526,7 +1526,7 @@ def upload_character(file, img_path, tavern=False):
decoded_file = file if isinstance(file, str) else file.decode('utf-8')
try:
data = json.loads(decoded_file)
except:
except Exception:
data = yaml.safe_load(decoded_file)
if 'char_name' in data:

View file

@ -86,7 +86,7 @@ def calculate_perplexity(models, input_dataset, stride, _max_length):
update_model_parameters(model_settings) # hijacking the command-line arguments
unload_model()
shared.model, shared.tokenizer = load_model(model)
except:
except Exception:
cumulative_log += f"Failed to load `{model}`. Moving on.\n\n"
yield cumulative_log
continue

View file

@ -466,7 +466,7 @@ class LlamaServer:
response = self.session.get(health_url)
if response.status_code == 200:
break
except:
except Exception:
pass
time.sleep(1)
@ -559,5 +559,5 @@ def filter_stderr_with_progress(process_stderr):
finally:
try:
process_stderr.close()
except:
except Exception:
pass

View file

@ -121,7 +121,7 @@ def _get_next_logits(prompt, state, use_samplers, previous, top_logits=25, retur
if isinstance(key, bytes):
try:
key = key.decode()
except:
except Exception:
key = key.decode('latin')
output[key] = row[0]

View file

@ -53,7 +53,7 @@ def get_single(value_type, file):
value = file.read(value_length)
try:
value = value.decode('utf-8')
except:
except Exception:
pass
else:
type_str = _simple_value_packing.get(value_type)

View file

@ -33,5 +33,5 @@ def count_tokens(text):
try:
tokens = get_encoded_length(text)
return str(tokens)
except:
except Exception:
return '0'

View file

@ -503,7 +503,7 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en:
print("Model reloaded OK, continue with training.")
else:
return f"Failed to load {selected_model}."
except:
except Exception:
exc = traceback.format_exc()
logger.error('Failed to reload the model.')
print(exc)
@ -542,7 +542,7 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en:
logger.info("Loading existing LoRA data")
state_dict_peft = torch.load(f"{lora_file_path}/adapter_model.bin", weights_only=True)
set_peft_model_state_dict(lora_model, state_dict_peft)
except:
except Exception:
yield traceback.format_exc().replace('\n', '\n\n')
return

View file

@ -224,7 +224,7 @@ def load_model_wrapper(selected_model, loader, autoload=False):
yield f"Successfully loaded `{selected_model}`."
else:
yield f"Failed to load `{selected_model}`."
except:
except Exception:
exc = traceback.format_exc()
logger.error('Failed to load the model.')
print(exc)