Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Correct parameters calculations
Browse files
backend/app/utils/model_validation.py
CHANGED
@@ -105,13 +105,12 @@ class ModelValidator:
|
|
105 |
if adapter_meta and base_meta:
|
106 |
adapter_size = sum(adapter_meta.parameter_count.values())
|
107 |
base_size = sum(base_meta.parameter_count.values())
|
108 |
-
model_size =
|
109 |
else:
|
110 |
# For regular models, just get the model size
|
111 |
meta = await self.get_safetensors_metadata(model_info.id, revision=revision)
|
112 |
if meta:
|
113 |
-
|
114 |
-
model_size = total_params / (2 * 1e9) # Convert to billions, assuming float16
|
115 |
|
116 |
if model_size is None:
|
117 |
# If model size could not be determined, return an error
|
@@ -119,6 +118,7 @@ class ModelValidator:
|
|
119 |
|
120 |
# Adjust size for GPTQ models
|
121 |
size_factor = 8 if (precision == "GPTQ" or "gptq" in model_info.id.lower()) else 1
|
|
|
122 |
model_size = round(size_factor * model_size, 3)
|
123 |
|
124 |
logger.info(LogFormatter.success(f"Model size: {model_size}B parameters"))
|
|
|
105 |
if adapter_meta and base_meta:
|
106 |
adapter_size = sum(adapter_meta.parameter_count.values())
|
107 |
base_size = sum(base_meta.parameter_count.values())
|
108 |
+
model_size = adapter_size + base_size
|
109 |
else:
|
110 |
# For regular models, just get the model size
|
111 |
meta = await self.get_safetensors_metadata(model_info.id, revision=revision)
|
112 |
if meta:
|
113 |
+
model_size = sum(meta.parameter_count.values()) # total params
|
|
|
114 |
|
115 |
if model_size is None:
|
116 |
# If model size could not be determined, return an error
|
|
|
118 |
|
119 |
# Adjust size for GPTQ models
|
120 |
size_factor = 8 if (precision == "GPTQ" or "gptq" in model_info.id.lower()) else 1
|
121 |
+
model_size = model_size / 1e9 # Convert to billions, assuming float16
|
122 |
model_size = round(size_factor * model_size, 3)
|
123 |
|
124 |
logger.info(LogFormatter.success(f"Model size: {model_size}B parameters"))
|