Commit 5f85a74b authored by MrCheeze's avatar MrCheeze Committed by AUTOMATIC1111

fix bug where when using prompt composition, hijack_comments generated before...

fix bug where when using prompt composition, hijack_comments generated before the final AND will be dropped
parent 32e428ff
...@@ -313,6 +313,7 @@ def process_images(p: StableDiffusionProcessing) -> Processed: ...@@ -313,6 +313,7 @@ def process_images(p: StableDiffusionProcessing) -> Processed:
os.makedirs(p.outpath_grids, exist_ok=True) os.makedirs(p.outpath_grids, exist_ok=True)
modules.sd_hijack.model_hijack.apply_circular(p.tiling) modules.sd_hijack.model_hijack.apply_circular(p.tiling)
modules.sd_hijack.model_hijack.clear_comments()
comments = {} comments = {}
......
...@@ -88,6 +88,9 @@ class StableDiffusionModelHijack: ...@@ -88,6 +88,9 @@ class StableDiffusionModelHijack:
for layer in [layer for layer in self.layers if type(layer) == torch.nn.Conv2d]: for layer in [layer for layer in self.layers if type(layer) == torch.nn.Conv2d]:
layer.padding_mode = 'circular' if enable else 'zeros' layer.padding_mode = 'circular' if enable else 'zeros'
def clear_comments(self):
self.comments = []
def tokenize(self, text): def tokenize(self, text):
_, remade_batch_tokens, _, _, _, token_count = self.clip.process_text([text]) _, remade_batch_tokens, _, _, _, token_count = self.clip.process_text([text])
return remade_batch_tokens[0], token_count, get_target_prompt_token_count(token_count) return remade_batch_tokens[0], token_count, get_target_prompt_token_count(token_count)
...@@ -260,7 +263,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module): ...@@ -260,7 +263,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
batch_multipliers, remade_batch_tokens, used_custom_terms, hijack_comments, hijack_fixes, token_count = self.process_text(text) batch_multipliers, remade_batch_tokens, used_custom_terms, hijack_comments, hijack_fixes, token_count = self.process_text(text)
self.hijack.fixes = hijack_fixes self.hijack.fixes = hijack_fixes
self.hijack.comments = hijack_comments self.hijack.comments += hijack_comments
if len(used_custom_terms) > 0: if len(used_custom_terms) > 0:
self.hijack.comments.append("Used embeddings: " + ", ".join([f'{word} [{checksum}]' for word, checksum in used_custom_terms])) self.hijack.comments.append("Used embeddings: " + ", ".join([f'{word} [{checksum}]' for word, checksum in used_custom_terms]))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment