Skip to content

Commit

Permalink
Fix ADV_CLIP_emb support
Browse files Browse the repository at this point in the history
  • Loading branch information
pamparamm committed Jul 28, 2024
1 parent b013d43 commit 1fcfddb
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 83 deletions.
7 changes: 6 additions & 1 deletion __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@
from .random_gen import RandomPromptGenerator
from .cascade_utils import StableCascade_AutoCompLatent
from .clip_misc import CLIPTextEncodeBREAK, CLIPMicroConditioning, CLIPTokenCounter
from .clip_negpip import CLIPNegPip, hijack_adv_encode
from .clip_negpip import CLIPNegPip
from .attention_couple_ppm import AttentionCouplePPM
from .guidance_limiter import GuidanceLimiter
from .samplers import CFGPPSamplerSelect, inject_samplers
from .schedulers import hijack_schedulers

from .compat.advanced_encode import hijack_adv_encode


WEB_DIRECTORY = "./js"

NODE_CLASS_MAPPINGS = {
Expand Down Expand Up @@ -43,6 +46,8 @@
"CFGPPSamplerSelect": "CFG++SamplerSelect",
}


inject_samplers()
hijack_schedulers()

hijack_adv_encode()
81 changes: 0 additions & 81 deletions clip_negpip.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,13 @@
# https://github.com/hako-mikan/sd-webui-negpip
from functools import partial
import torch
from math import copysign

from comfy import model_management
from comfy.model_patcher import ModelPatcher
from comfy.sd import CLIP
from comfy.sd1_clip import SD1ClipModel, gen_empty_tokens, ClipTokenWeightEncoder
from comfy.sdxl_clip import SDXLClipModel, SDXLRefinerClipModel

INITIALIZED = False


def has_negpip(model_options: dict):
try:
Expand Down Expand Up @@ -76,65 +73,6 @@ def _encode_token_weights_negpip(_self: ClipTokenWeightEncoder, token_weight_pai
return torch.cat(output, dim=-2).to(model_management.intermediate_device()), first_pooled


def _advanced_encode_from_tokens_negpip_wrapper(advanced_encode_from_tokens, from_zero):

def advanced_encode_from_tokens_negpip(
tokenized,
token_normalization,
weight_interpretation,
encode_func,
m_token=266,
length=77,
w_max=1.0,
return_pooled=False,
apply_to_pooled=False,
):
tokenized_abs = [[(t, abs(w), p) for t, w, p in x] for x in tokenized]
weights_sign = [[copysign(1, w) for _, w, _ in x] for x in tokenized]

def _encoded_with_negpip(encode_func, m_token=266, length=77):
tokens = [[(m_token, 1.0) for _ in range(length)]]
emb, _ = encode_func(tokens)
if emb.shape[1] == length:
return False
elif emb.shape[1] == length * 2:
return True
raise ValueError("Unknown tensor shape - perhaps you've applied NegPip node more than once")

encoded_with_negpip = _encoded_with_negpip(encode_func, m_token, length)

def _encode_func(tokens):
emb, pooled = encode_func(tokens)
if encoded_with_negpip:
return emb[:, 0::2, :], pooled
return emb, pooled

def _apply_negpip(weights_sign, emb):
emb_negpip = torch.empty_like(emb).repeat(1, 2, 1)
emb_negpip[:, 0::2, :] = emb
emb_negpip[:, 1::2, :] = from_zero(weights_sign, emb)
return emb_negpip

weighted_emb, pooled = advanced_encode_from_tokens(
tokenized_abs,
token_normalization,
weight_interpretation,
_encode_func,
m_token,
length,
w_max,
return_pooled,
apply_to_pooled,
)

if encoded_with_negpip:
weighted_emb = _apply_negpip(weights_sign, weighted_emb)

return weighted_emb, pooled

return advanced_encode_from_tokens_negpip


class CLIPNegPip:
@classmethod
def INPUT_TYPES(s):
Expand Down Expand Up @@ -169,22 +107,3 @@ def patch(self, model: ModelPatcher, clip: CLIP):
m.set_model_attn2_patch(_negpip_attn)

return (m, c)


def hijack_adv_encode():
global INITIALIZED
if not INITIALIZED:
try:
import custom_nodes.ComfyUI_ADV_CLIP_emb.adv_encode as adv_encode

advanced_encode_from_tokens_negpip = _advanced_encode_from_tokens_negpip_wrapper(
adv_encode.advanced_encode_from_tokens, adv_encode.from_zero
)

adv_encode.advanced_encode_from_tokens = advanced_encode_from_tokens_negpip

except ImportError:
pass

finally:
INITIALIZED = True
Empty file added compat/__init__.py
Empty file.
94 changes: 94 additions & 0 deletions compat/advanced_encode.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import torch
from math import copysign

INITIALIZED = False


def _advanced_encode_from_tokens_negpip_wrapper(advanced_encode_from_tokens, from_zero):

def advanced_encode_from_tokens_negpip(
tokenized,
token_normalization,
weight_interpretation,
encode_func,
m_token=266,
length=77,
w_max=1.0,
return_pooled=False,
apply_to_pooled=False,
):
tokenized_abs = [[(t, abs(w), p) for t, w, p in x] for x in tokenized]
weights_sign = [[copysign(1, w) for _, w, _ in x] for x in tokenized]

def _encoded_with_negpip(encode_func, m_token=266, length=77):
tokens = [[(m_token, 1.0) for _ in range(length)]]
emb, _ = encode_func(tokens)
if emb.shape[1] == length:
return False
elif emb.shape[1] == length * 2:
return True
raise ValueError("Unknown tensor shape - perhaps you've applied NegPip node more than once")

encoded_with_negpip = _encoded_with_negpip(encode_func, m_token, length)

def _encode_func(tokens):
emb, pooled = encode_func(tokens)
if encoded_with_negpip:
return emb[:, 0::2, :], pooled
return emb, pooled

def _apply_negpip(weights_sign, emb):
emb_negpip = torch.empty_like(emb).repeat(1, 2, 1)
emb_negpip[:, 0::2, :] = emb
emb_negpip[:, 1::2, :] = from_zero(weights_sign, emb)
return emb_negpip

weighted_emb, pooled = advanced_encode_from_tokens(
tokenized_abs,
token_normalization,
weight_interpretation,
_encode_func,
m_token,
length,
w_max,
return_pooled,
apply_to_pooled,
)

if encoded_with_negpip:
weighted_emb = _apply_negpip(weights_sign, weighted_emb)

return weighted_emb, pooled

return advanced_encode_from_tokens_negpip


def hijack_adv_encode():
global INITIALIZED
if not INITIALIZED:
import sys
import pathlib

custom_nodes = pathlib.Path(__file__).parent.parent.parent
assert custom_nodes.name == "custom_nodes"

sys.path.insert(0, str(custom_nodes))

try:

import custom_nodes.ComfyUI_ADV_CLIP_emb.adv_encode as adv_encode
import ComfyUI_ADV_CLIP_emb.adv_encode as adv_encode_inner

advanced_encode_from_tokens_negpip = _advanced_encode_from_tokens_negpip_wrapper(
adv_encode.advanced_encode_from_tokens, adv_encode.from_zero
)

adv_encode.advanced_encode_from_tokens = advanced_encode_from_tokens_negpip
adv_encode_inner.advanced_encode_from_tokens = advanced_encode_from_tokens_negpip

except ImportError:
pass

finally:
sys.path.pop(0)
INITIALIZED = True
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-ppm"
description = "Fixed AttentionCouple/NegPip(negative weights in prompts), more CFG++ samplers, etc."
version = "1.0.4"
version = "1.0.5"
license = "AGPL-3.0"

[project.urls]
Expand Down

0 comments on commit 1fcfddb

Please sign in to comment.