We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ffce2d1 commit f25ea18Copy full SHA for f25ea18
src/diffusers/hooks/group_offloading.py
@@ -107,8 +107,9 @@ def onload_(self):
107
108
def offload_(self):
109
r"""Offloads the group of modules to the offload_device."""
110
- if self.stream is not None and not self.record_stream:
111
- torch.cuda.current_stream().synchronize()
+ if self.stream is not None:
+ if not self.record_stream:
112
+ torch.cuda.current_stream().synchronize()
113
for group_module in self.modules:
114
for param in group_module.parameters():
115
param.data = self.cpu_param_dict[param]
0 commit comments