mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-12-06 07:12:10 +01:00
Lint
This commit is contained in:
parent
b10d525bf7
commit
999471256c
|
|
@ -3,6 +3,7 @@ import traceback
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
from exllamav2 import (
|
from exllamav2 import (
|
||||||
ExLlamaV2,
|
ExLlamaV2,
|
||||||
ExLlamaV2Cache,
|
ExLlamaV2Cache,
|
||||||
|
|
@ -15,7 +16,6 @@ from exllamav2 import (
|
||||||
ExLlamaV2Tokenizer
|
ExLlamaV2Tokenizer
|
||||||
)
|
)
|
||||||
from exllamav2.generator import ExLlamaV2Sampler, ExLlamaV2StreamingGenerator
|
from exllamav2.generator import ExLlamaV2Sampler, ExLlamaV2StreamingGenerator
|
||||||
|
|
||||||
from modules import shared
|
from modules import shared
|
||||||
from modules.logging_colors import logger
|
from modules.logging_colors import logger
|
||||||
from modules.text_generation import get_max_prompt_length
|
from modules.text_generation import get_max_prompt_length
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,9 @@ import traceback
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, List, Tuple
|
from typing import Any, List, Tuple
|
||||||
|
|
||||||
import torch
|
|
||||||
from exllamav3 import Cache, Config, Generator, Model, Tokenizer
|
from exllamav3 import Cache, Config, Generator, Model, Tokenizer
|
||||||
from exllamav3.cache import CacheLayer_fp16, CacheLayer_quant
|
from exllamav3.cache import CacheLayer_fp16, CacheLayer_quant
|
||||||
from exllamav3.generator import Job
|
from exllamav3.generator import Job
|
||||||
|
|
||||||
from modules import shared
|
|
||||||
from exllamav3.generator.sampler import (
|
from exllamav3.generator.sampler import (
|
||||||
CustomSampler,
|
CustomSampler,
|
||||||
SS_Argmax,
|
SS_Argmax,
|
||||||
|
|
@ -19,13 +16,13 @@ from exllamav3.generator.sampler import (
|
||||||
SS_TopK,
|
SS_TopK,
|
||||||
SS_TopP
|
SS_TopP
|
||||||
)
|
)
|
||||||
|
from modules import shared
|
||||||
from modules.image_utils import (
|
from modules.image_utils import (
|
||||||
convert_image_attachments_to_pil,
|
convert_image_attachments_to_pil,
|
||||||
convert_openai_messages_to_images
|
convert_openai_messages_to_images
|
||||||
)
|
)
|
||||||
from modules.logging_colors import logger
|
from modules.logging_colors import logger
|
||||||
from modules.text_generation import get_max_prompt_length
|
from modules.text_generation import get_max_prompt_length
|
||||||
from modules.torch_utils import clear_torch_cache
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import flash_attn
|
import flash_attn
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue