Skip to content

Commit

Permalink
Merge pull request #527 from Kosinkadink/develop
Browse files Browse the repository at this point in the history
Fix backwards compatibility of fixed KF scheduling code
  • Loading branch information
Kosinkadink authored Jan 5, 2025
2 parents 63b70f1 + 4267c5a commit 7ec4693
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 9 deletions.
4 changes: 2 additions & 2 deletions animatediff/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from comfy.model_patcher import ModelPatcher

from .context_extras import ContextExtrasGroup
from .utils_model import BIGMAX
from .utils_model import BIGMAX_TENSOR
from .utils_motion import get_sorted_list_via_attr


Expand Down Expand Up @@ -158,7 +158,7 @@ def prepare_current_context(self, t: Tensor, transformer_options: dict[str, Tens
if curr_t == self._previous_t:
return
prev_index = self._current_index
max_sigma = torch.max(transformer_options.get("sigmas", BIGMAX))
max_sigma = torch.max(transformer_options.get("sample_sigmas", BIGMAX_TENSOR))
# if met guaranteed steps, look for next context in case need to switch
if self._current_used_steps >= self._current_context.get_effective_guarantee_steps(max_sigma):
# if has next index, loop through and see if need to switch
Expand Down
4 changes: 2 additions & 2 deletions animatediff/context_extras.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from comfy.model_base import BaseModel

from .utils_model import BIGMAX
from .utils_model import BIGMAX_TENSOR
from .utils_motion import (prepare_mask_batch, extend_to_batch_size, get_combined_multival, resize_multival,
get_sorted_list_via_attr)

Expand Down Expand Up @@ -345,7 +345,7 @@ def prepare_current_keyframe(self, t: Tensor, transformer_options: dict[str, Ten
if curr_t == self._previous_t:
return
prev_index = self._current_index
max_sigma = torch.max(transformer_options.get("sigmas", BIGMAX))
max_sigma = torch.max(transformer_options.get("sample_sigmas", BIGMAX_TENSOR))
# if met guaranteed steps, look for next keyframe in case need to switch
if self._current_used_steps >= self._current_keyframe.get_effective_guarantee_steps(max_sigma):
# if has next index, loop through and see if need t oswitch
Expand Down
4 changes: 2 additions & 2 deletions animatediff/model_injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
get_combined_multival, get_combined_input, get_combined_input_effect_multival,
ade_broadcast_image_to, extend_to_batch_size, prepare_mask_batch)
from .motion_lora import MotionLoraInfo, MotionLoraList
from .utils_model import get_motion_lora_path, get_motion_model_path, get_sd_model_type, vae_encode_raw_batched, BIGMAX
from .utils_model import get_motion_lora_path, get_motion_model_path, get_sd_model_type, vae_encode_raw_batched, BIGMAX_TENSOR
from .sample_settings import SampleSettings, SeedNoiseGeneration
from .dinklink import DinkLinkConst, get_dinklink, get_acn_outer_sample_wrapper

Expand Down Expand Up @@ -333,7 +333,7 @@ def prepare_current_keyframe(self, patcher: MotionModelPatcher, x: Tensor, t: Te
if curr_t == self.previous_t:
return
prev_index = self.current_index
max_sigma = torch.max(transformer_options.get("sigmas", BIGMAX))
max_sigma = torch.max(transformer_options.get("sample_sigmas", BIGMAX_TENSOR))
# if met guaranteed steps, look for next keyframe in case need to switch
if self.current_keyframe is None or self.current_used_steps >= self.current_keyframe.get_effective_guarantee_steps(max_sigma):
# if has next index, loop through and see if need to switch
Expand Down
4 changes: 2 additions & 2 deletions animatediff/sample_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from . import freeinit
from .context import ContextOptions, ContextOptionsGroup
from .utils_model import SigmaSchedule, BIGMAX
from .utils_model import SigmaSchedule, BIGMAX_TENSOR
from .utils_motion import extend_to_batch_size, get_sorted_list_via_attr, prepare_mask_batch
from .logger import logger

Expand Down Expand Up @@ -672,7 +672,7 @@ def prepare_current_keyframe(self, t: Tensor, transformer_options: dict[str, Ten
if curr_t == self._previous_t:
return
prev_index = self._current_index
max_sigma = torch.max(transformer_options.get("sigmas", BIGMAX))
max_sigma = torch.max(transformer_options.get("sample_sigmas", BIGMAX_TENSOR))
# if met guaranteed steps, look for next keyframe in case need to switch
if self._current_used_steps >= self._current_keyframe.get_effective_guarantee_steps(max_sigma):
# if has next index, loop through and see if need t oswitch
Expand Down
1 change: 1 addition & 0 deletions animatediff/utils_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

BIGMIN = -(2**53-1)
BIGMAX = (2**53-1)
BIGMAX_TENSOR = torch.tensor(BIGMAX)

MAX_RESOLUTION = 16384 # mirrors ComfyUI's nodes.py MAX_RESOLUTION

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-animatediff-evolved"
description = "Improved AnimateDiff integration for ComfyUI."
version = "1.4.0"
version = "1.4.1"
license = { file = "LICENSE" }
dependencies = []

Expand Down

0 comments on commit 7ec4693

Please sign in to comment.