From 1fcfddb28cc681a5397dc0a94b394f4f000b65c5 Mon Sep 17 00:00:00 2001 From: Pam Date: Mon, 29 Jul 2024 02:23:45 +0500 Subject: [PATCH] Fix ADV_CLIP_emb support --- __init__.py | 7 ++- clip_negpip.py | 81 --------------------------------- compat/__init__.py | 0 compat/advanced_encode.py | 94 +++++++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- 5 files changed, 101 insertions(+), 83 deletions(-) create mode 100644 compat/__init__.py create mode 100644 compat/advanced_encode.py diff --git a/__init__.py b/__init__.py index ca417c4..46085bf 100644 --- a/__init__.py +++ b/__init__.py @@ -2,12 +2,15 @@ from .random_gen import RandomPromptGenerator from .cascade_utils import StableCascade_AutoCompLatent from .clip_misc import CLIPTextEncodeBREAK, CLIPMicroConditioning, CLIPTokenCounter -from .clip_negpip import CLIPNegPip, hijack_adv_encode +from .clip_negpip import CLIPNegPip from .attention_couple_ppm import AttentionCouplePPM from .guidance_limiter import GuidanceLimiter from .samplers import CFGPPSamplerSelect, inject_samplers from .schedulers import hijack_schedulers +from .compat.advanced_encode import hijack_adv_encode + + WEB_DIRECTORY = "./js" NODE_CLASS_MAPPINGS = { @@ -43,6 +46,8 @@ "CFGPPSamplerSelect": "CFG++SamplerSelect", } + inject_samplers() hijack_schedulers() + hijack_adv_encode() diff --git a/clip_negpip.py b/clip_negpip.py index 8fca714..79cc00e 100644 --- a/clip_negpip.py +++ b/clip_negpip.py @@ -3,7 +3,6 @@ # https://github.com/hako-mikan/sd-webui-negpip from functools import partial import torch -from math import copysign from comfy import model_management from comfy.model_patcher import ModelPatcher @@ -11,8 +10,6 @@ from comfy.sd1_clip import SD1ClipModel, gen_empty_tokens, ClipTokenWeightEncoder from comfy.sdxl_clip import SDXLClipModel, SDXLRefinerClipModel -INITIALIZED = False - def has_negpip(model_options: dict): try: @@ -76,65 +73,6 @@ def _encode_token_weights_negpip(_self: ClipTokenWeightEncoder, token_weight_pai return torch.cat(output, dim=-2).to(model_management.intermediate_device()), first_pooled -def _advanced_encode_from_tokens_negpip_wrapper(advanced_encode_from_tokens, from_zero): - - def advanced_encode_from_tokens_negpip( - tokenized, - token_normalization, - weight_interpretation, - encode_func, - m_token=266, - length=77, - w_max=1.0, - return_pooled=False, - apply_to_pooled=False, - ): - tokenized_abs = [[(t, abs(w), p) for t, w, p in x] for x in tokenized] - weights_sign = [[copysign(1, w) for _, w, _ in x] for x in tokenized] - - def _encoded_with_negpip(encode_func, m_token=266, length=77): - tokens = [[(m_token, 1.0) for _ in range(length)]] - emb, _ = encode_func(tokens) - if emb.shape[1] == length: - return False - elif emb.shape[1] == length * 2: - return True - raise ValueError("Unknown tensor shape - perhaps you've applied NegPip node more than once") - - encoded_with_negpip = _encoded_with_negpip(encode_func, m_token, length) - - def _encode_func(tokens): - emb, pooled = encode_func(tokens) - if encoded_with_negpip: - return emb[:, 0::2, :], pooled - return emb, pooled - - def _apply_negpip(weights_sign, emb): - emb_negpip = torch.empty_like(emb).repeat(1, 2, 1) - emb_negpip[:, 0::2, :] = emb - emb_negpip[:, 1::2, :] = from_zero(weights_sign, emb) - return emb_negpip - - weighted_emb, pooled = advanced_encode_from_tokens( - tokenized_abs, - token_normalization, - weight_interpretation, - _encode_func, - m_token, - length, - w_max, - return_pooled, - apply_to_pooled, - ) - - if encoded_with_negpip: - weighted_emb = _apply_negpip(weights_sign, weighted_emb) - - return weighted_emb, pooled - - return advanced_encode_from_tokens_negpip - - class CLIPNegPip: @classmethod def INPUT_TYPES(s): @@ -169,22 +107,3 @@ def patch(self, model: ModelPatcher, clip: CLIP): m.set_model_attn2_patch(_negpip_attn) return (m, c) - - -def hijack_adv_encode(): - global INITIALIZED - if not INITIALIZED: - try: - import custom_nodes.ComfyUI_ADV_CLIP_emb.adv_encode as adv_encode - - advanced_encode_from_tokens_negpip = _advanced_encode_from_tokens_negpip_wrapper( - adv_encode.advanced_encode_from_tokens, adv_encode.from_zero - ) - - adv_encode.advanced_encode_from_tokens = advanced_encode_from_tokens_negpip - - except ImportError: - pass - - finally: - INITIALIZED = True diff --git a/compat/__init__.py b/compat/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/compat/advanced_encode.py b/compat/advanced_encode.py new file mode 100644 index 0000000..d27217e --- /dev/null +++ b/compat/advanced_encode.py @@ -0,0 +1,94 @@ +import torch +from math import copysign + +INITIALIZED = False + + +def _advanced_encode_from_tokens_negpip_wrapper(advanced_encode_from_tokens, from_zero): + + def advanced_encode_from_tokens_negpip( + tokenized, + token_normalization, + weight_interpretation, + encode_func, + m_token=266, + length=77, + w_max=1.0, + return_pooled=False, + apply_to_pooled=False, + ): + tokenized_abs = [[(t, abs(w), p) for t, w, p in x] for x in tokenized] + weights_sign = [[copysign(1, w) for _, w, _ in x] for x in tokenized] + + def _encoded_with_negpip(encode_func, m_token=266, length=77): + tokens = [[(m_token, 1.0) for _ in range(length)]] + emb, _ = encode_func(tokens) + if emb.shape[1] == length: + return False + elif emb.shape[1] == length * 2: + return True + raise ValueError("Unknown tensor shape - perhaps you've applied NegPip node more than once") + + encoded_with_negpip = _encoded_with_negpip(encode_func, m_token, length) + + def _encode_func(tokens): + emb, pooled = encode_func(tokens) + if encoded_with_negpip: + return emb[:, 0::2, :], pooled + return emb, pooled + + def _apply_negpip(weights_sign, emb): + emb_negpip = torch.empty_like(emb).repeat(1, 2, 1) + emb_negpip[:, 0::2, :] = emb + emb_negpip[:, 1::2, :] = from_zero(weights_sign, emb) + return emb_negpip + + weighted_emb, pooled = advanced_encode_from_tokens( + tokenized_abs, + token_normalization, + weight_interpretation, + _encode_func, + m_token, + length, + w_max, + return_pooled, + apply_to_pooled, + ) + + if encoded_with_negpip: + weighted_emb = _apply_negpip(weights_sign, weighted_emb) + + return weighted_emb, pooled + + return advanced_encode_from_tokens_negpip + + +def hijack_adv_encode(): + global INITIALIZED + if not INITIALIZED: + import sys + import pathlib + + custom_nodes = pathlib.Path(__file__).parent.parent.parent + assert custom_nodes.name == "custom_nodes" + + sys.path.insert(0, str(custom_nodes)) + + try: + + import custom_nodes.ComfyUI_ADV_CLIP_emb.adv_encode as adv_encode + import ComfyUI_ADV_CLIP_emb.adv_encode as adv_encode_inner + + advanced_encode_from_tokens_negpip = _advanced_encode_from_tokens_negpip_wrapper( + adv_encode.advanced_encode_from_tokens, adv_encode.from_zero + ) + + adv_encode.advanced_encode_from_tokens = advanced_encode_from_tokens_negpip + adv_encode_inner.advanced_encode_from_tokens = advanced_encode_from_tokens_negpip + + except ImportError: + pass + + finally: + sys.path.pop(0) + INITIALIZED = True diff --git a/pyproject.toml b/pyproject.toml index 5f91d9b..1ee440e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "comfyui-ppm" description = "Fixed AttentionCouple/NegPip(negative weights in prompts), more CFG++ samplers, etc." -version = "1.0.4" +version = "1.0.5" license = "AGPL-3.0" [project.urls]