Use logger.exception() instead of traceback.print_exc() for error messages

This commit is contained in:
oobabooga 2026-03-18 20:42:20 -07:00
parent eeb0e5700f
commit 779e7611ff
10 changed files with 23 additions and 38 deletions

View file

@ -1,8 +1,8 @@
import traceback
from queue import Queue
from threading import Thread
import modules.shared as shared
from modules.logging_colors import logger
class StopNowException(Exception):
@ -38,8 +38,7 @@ class Iteratorize:
except StopNowException:
pass
except Exception:
traceback.print_exc()
pass
logger.exception("Failed in generation callback")
self.q.put(self.sentinel)
if self.c_callback:

View file

@ -1,7 +1,6 @@
import math
import queue
import threading
import traceback
from pathlib import Path
from typing import Any, List, Tuple
@ -34,8 +33,7 @@ from modules.text_generation import get_max_prompt_length
try:
import flash_attn
except Exception:
logger.warning('Failed to load flash-attention due to the following error:\n')
traceback.print_exc()
logger.warning('Failed to load flash-attention due to the following error:', exc_info=True)
class LogitBiasFilter(Filter):
@ -81,7 +79,7 @@ class ConcurrentGenerator:
try:
results = self.generator.iterate()
except Exception:
logger.error("Exception in ConcurrentGenerator iterate loop:\n" + traceback.format_exc())
logger.exception("Exception in ConcurrentGenerator iterate loop")
for q in self.job_queues.values():
q.put(None)
self.job_queues.clear()

View file

@ -1,5 +1,4 @@
import os
import traceback
from pathlib import Path
from typing import Any, Dict, Optional, Union
@ -21,8 +20,7 @@ from modules.logging_colors import logger
try:
import flash_attn
except Exception:
logger.warning('Failed to load flash-attention due to the following error:\n')
traceback.print_exc()
logger.warning('Failed to load flash-attention due to the following error:', exc_info=True)
class Exllamav3HF(PreTrainedModel, GenerationMixin):

View file

@ -1,7 +1,6 @@
import importlib
import importlib.util
import sys
import traceback
from functools import partial
from inspect import signature
from pathlib import Path
@ -75,8 +74,7 @@ def load_extensions():
raise
except Exception:
logger.error(f'Failed to load the extension "{name}".')
traceback.print_exc()
logger.exception(f'Failed to load the extension "{name}".')
# This iterator returns the extensions in the order specified in the command-line

View file

@ -1,5 +1,4 @@
import time
import traceback
import numpy as np
@ -23,7 +22,7 @@ def get_next_logits(*args, **kwargs):
try:
result = _get_next_logits(*args, **kwargs)
except Exception:
traceback.print_exc()
logger.exception("Failed to get next logits")
result = None
if needs_lock:

View file

@ -4,7 +4,6 @@ import html
import pprint
import random
import time
import traceback
import numpy as np
@ -477,7 +476,7 @@ def generate_reply_HF(question, original_question, state, stopping_strings=None,
yield cumulative_reply
except Exception:
traceback.print_exc()
logger.exception("Failed to generate reply (HF)")
finally:
t1 = time.time()
original_tokens = len(original_input_ids[0])
@ -510,7 +509,7 @@ def generate_reply_custom(question, original_question, state, stopping_strings=N
yield reply
except Exception:
traceback.print_exc()
logger.exception("Failed to generate reply (custom)")
finally:
t1 = time.time()

View file

@ -546,10 +546,8 @@ def do_train(lora_name: str, always_override: bool, all_linear: bool, q_proj_en:
yield f"Failed to load {selected_model}."
return
except Exception:
exc = traceback.format_exc()
logger.error('Failed to reload the model.')
print(exc)
yield exc.replace('\n', '\n\n')
logger.exception('Failed to reload the model.')
yield traceback.format_exc().replace('\n', '\n\n')
return
# == Start prepping the model itself ==

View file

@ -1,8 +1,7 @@
import traceback
import gradio as gr
from modules import chat, presets, shared, ui, utils
from modules.logging_colors import logger
from modules.utils import gradio, sanitize_filename
@ -103,7 +102,7 @@ def handle_save_preset_confirm_click(filename, contents):
output = gr.update(choices=available_presets, value=filename)
except Exception:
output = gr.update()
traceback.print_exc()
logger.exception("Failed to save preset")
return [
output,
@ -119,7 +118,7 @@ def handle_save_confirm_click(root_state, filename, contents):
filename = sanitize_filename(filename)
utils.save_file(root_state + filename, contents)
except Exception:
traceback.print_exc()
logger.exception("Failed to save file")
return None, gr.update(visible=False)
@ -132,7 +131,7 @@ def handle_delete_confirm_click(root_state, filename):
filename = sanitize_filename(filename)
utils.delete_file(root_state + filename)
except Exception:
traceback.print_exc()
logger.exception("Failed to delete file")
return None, gr.update(visible=False)
@ -144,7 +143,7 @@ def handle_save_character_confirm_click(name2, greeting, context, character_pict
output = gr.update(choices=available_characters, value=filename)
except Exception:
output = gr.update()
traceback.print_exc()
logger.exception("Failed to save character")
return [
output,
@ -159,7 +158,7 @@ def handle_delete_character_confirm_click(character):
output = chat.update_character_menu_after_deletion(index)
except Exception:
output = gr.update()
traceback.print_exc()
logger.exception("Failed to delete character")
return [
output,
@ -214,7 +213,7 @@ def handle_save_user_confirm_click(name1, user_bio, your_picture, filename):
output = gr.update(choices=available_users, value=filename)
except Exception:
output = gr.update()
traceback.print_exc()
logger.exception("Failed to save user")
return [
output,
@ -229,7 +228,7 @@ def handle_delete_user_confirm_click(user):
output = chat.update_user_menu_after_deletion(index)
except Exception:
output = gr.update()
traceback.print_exc()
logger.exception("Failed to delete user")
return [
output,

View file

@ -916,9 +916,8 @@ def generate(state, save_images=True):
yield all_images, progress_bar_html()
clear_torch_cache()
except Exception as e:
logger.error(f"Image generation failed: {e}")
traceback.print_exc()
except Exception:
logger.exception("Image generation failed")
yield [], progress_bar_html()
clear_torch_cache()

View file

@ -222,10 +222,8 @@ def load_model_wrapper(selected_model, loader, autoload=False):
else:
yield f"Failed to load `{selected_model}`."
except Exception:
exc = traceback.format_exc()
logger.error('Failed to load the model.')
print(exc)
yield exc.replace('\n', '\n\n')
logger.exception('Failed to load the model.')
yield traceback.format_exc().replace('\n', '\n\n')
def load_lora_wrapper(selected_loras):