mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-12-06 07:12:10 +01:00
Lint
This commit is contained in:
parent
c871d9cdbd
commit
a156ebbf76
|
|
@ -245,7 +245,7 @@ class ModelDownloader:
|
|||
|
||||
# Make a HEAD request without following redirects to get metadata first
|
||||
r_head = session.head(url, timeout=20, allow_redirects=True)
|
||||
r_head.raise_for_status() # Will raise an error for 4xx or 5xx status codes
|
||||
r_head.raise_for_status() # Will raise an error for 4xx or 5xx status codes
|
||||
|
||||
# Check for the new 'x-linked-size' header from Hugging Face
|
||||
if 'x-linked-size' in r_head.headers:
|
||||
|
|
|
|||
|
|
@ -4,8 +4,6 @@ from pathlib import Path
|
|||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
import torch
|
||||
from exllamav3 import Cache, Config, Model
|
||||
from exllamav3.cache import CacheLayer_fp16, CacheLayer_quant
|
||||
from torch.nn import CrossEntropyLoss
|
||||
from transformers import (
|
||||
GenerationConfig,
|
||||
|
|
@ -15,6 +13,8 @@ from transformers import (
|
|||
)
|
||||
from transformers.modeling_outputs import CausalLMOutputWithPast
|
||||
|
||||
from exllamav3 import Cache, Config, Model
|
||||
from exllamav3.cache import CacheLayer_fp16, CacheLayer_quant
|
||||
from modules import shared
|
||||
from modules.logging_colors import logger
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue