From d4c7e9f5a90def88a853ebdbcd2f5cd0ccda070a Mon Sep 17 00:00:00 2001 From: r266-tech Date: Mon, 23 Mar 2026 02:03:20 +0800 Subject: [PATCH] Add warning when using CUDA random generator (#13298) Warn users that CUDA generators produce different random numbers than CPU generators with the same seed, since PyTorch uses different RNG algorithms on each device. This helps users understand why their images differ when switching generator devices. Fixes #13298 --- src/diffusers/utils/torch_utils.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/diffusers/utils/torch_utils.py b/src/diffusers/utils/torch_utils.py index 7f4cb3e12766..a4c37daa4c63 100644 --- a/src/diffusers/utils/torch_utils.py +++ b/src/diffusers/utils/torch_utils.py @@ -176,6 +176,12 @@ def randn_tensor( ) elif gen_device_type != device.type and gen_device_type == "cuda": raise ValueError(f"Cannot generate a {device} tensor from a generator of type {gen_device_type}.") + elif gen_device_type == "cuda" and device.type == "cuda": + logger.warning( + "Using a CUDA random generator may produce different results than a CPU generator with the same seed." + " This is expected because PyTorch uses different random number generation algorithms on CPU and CUDA." + " If you need reproducible results across devices, use a CPU generator (e.g., torch.Generator('cpu'))." + ) # make sure generator list of length 1 is treated like a non-list if isinstance(generator, list) and len(generator) == 1: