mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-04-20 22:13:43 +00:00
Fix ExLlamaV2 loaders using unnecessary "bits" metadata
This commit is contained in:
parent
624faa1438
commit
db5f6cd1d8
1 changed files with 2 additions and 1 deletions
|
|
@ -89,7 +89,8 @@ def get_model_metadata(model):
|
|||
if metadata['rope_scaling']['type'] == 'linear':
|
||||
model_settings['compress_pos_emb'] = metadata['rope_scaling']['factor']
|
||||
|
||||
if 'quantization_config' in metadata:
|
||||
# Read GPTQ metadata for old GPTQ loaders
|
||||
if 'quantization_config' in metadata and metadata['quantization_config'].get('quant_method', '') != 'exl2':
|
||||
if 'bits' in metadata['quantization_config']:
|
||||
model_settings['wbits'] = metadata['quantization_config']['bits']
|
||||
if 'group_size' in metadata['quantization_config']:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue