add fp16 variant
Browse files
text_encoder/config.json
CHANGED
@@ -19,7 +19,7 @@
|
|
19 |
"num_hidden_layers": 12,
|
20 |
"pad_token_id": 1,
|
21 |
"projection_dim": 768,
|
22 |
-
"torch_dtype": "
|
23 |
"transformers_version": "4.35.1",
|
24 |
"vocab_size": 49408
|
25 |
}
|
|
|
19 |
"num_hidden_layers": 12,
|
20 |
"pad_token_id": 1,
|
21 |
"projection_dim": 768,
|
22 |
+
"torch_dtype": "float16",
|
23 |
"transformers_version": "4.35.1",
|
24 |
"vocab_size": 49408
|
25 |
}
|
text_encoder/model.fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:660c6f5b1abae9dc498ac2d21e1347d2abdb0cf6c0c0c8576cd796491d9a6cdd
|
3 |
+
size 246144152
|
text_encoder_2/config.json
CHANGED
@@ -19,7 +19,7 @@
|
|
19 |
"num_hidden_layers": 32,
|
20 |
"pad_token_id": 1,
|
21 |
"projection_dim": 1280,
|
22 |
-
"torch_dtype": "
|
23 |
"transformers_version": "4.35.1",
|
24 |
"vocab_size": 49408
|
25 |
}
|
|
|
19 |
"num_hidden_layers": 32,
|
20 |
"pad_token_id": 1,
|
21 |
"projection_dim": 1280,
|
22 |
+
"torch_dtype": "float16",
|
23 |
"transformers_version": "4.35.1",
|
24 |
"vocab_size": 49408
|
25 |
}
|
text_encoder_2/model.fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec310df2af79c318e24d20511b601a591ca8cd4f1fce1d8dff822a356bcdb1f4
|
3 |
+
size 1389382176
|
unet/diffusion_pytorch_model.fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e906040f7a2f484cf6c4fcef93bfa3cead8c245a87d02c825a1554bad9c7204a
|
3 |
+
size 5135313712
|
vae/diffusion_pytorch_model.fp16.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bcb60880a46b63dea58e9bc591abe15f8350bde47b405f9c38f4be70c6161e68
|
3 |
+
size 167335342
|