From 2d6805ce57cede78acb6515112439c5092c7b257 Mon Sep 17 00:00:00 2001 From: comfyanonymous <121283862+comfyanonymous@users.noreply.github.com> Date: Tue, 22 Apr 2025 03:17:38 -0700 Subject: [PATCH] Add option for using fp8_e8m0fnu for model weights. (#7733) Seems to break every model I have tried but worth testing? --- comfy/cli_args.py | 1 + comfy/model_management.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/comfy/cli_args.py b/comfy/cli_args.py index 81f29f09..1b971be3 100644 --- a/comfy/cli_args.py +++ b/comfy/cli_args.py @@ -66,6 +66,7 @@ fpunet_group.add_argument("--bf16-unet", action="store_true", help="Run the diff fpunet_group.add_argument("--fp16-unet", action="store_true", help="Run the diffusion model in fp16") fpunet_group.add_argument("--fp8_e4m3fn-unet", action="store_true", help="Store unet weights in fp8_e4m3fn.") fpunet_group.add_argument("--fp8_e5m2-unet", action="store_true", help="Store unet weights in fp8_e5m2.") +fpunet_group.add_argument("--fp8_e8m0fnu-unet", action="store_true", help="Store unet weights in fp8_e8m0fnu.") fpvae_group = parser.add_mutually_exclusive_group() fpvae_group.add_argument("--fp16-vae", action="store_true", help="Run the VAE in fp16, might cause black images.") diff --git a/comfy/model_management.py b/comfy/model_management.py index 19e6c8df..43e40224 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -725,6 +725,8 @@ def unet_dtype(device=None, model_params=0, supported_dtypes=[torch.float16, tor return torch.float8_e4m3fn if args.fp8_e5m2_unet: return torch.float8_e5m2 + if args.fp8_e8m0fnu_unet: + return torch.float8_e8m0fnu fp8_dtype = None if weight_dtype in FLOAT8_TYPES: