Commit 615b2fc9 authored by guaneec's avatar guaneec Committed by AUTOMATIC1111

Fix token max length

parent b8eae5de
......@@ -300,7 +300,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
remade_batch_tokens = []
id_start = self.wrapped.tokenizer.bos_token_id
id_end = self.wrapped.tokenizer.eos_token_id
maxlen = self.wrapped.max_length - 2
maxlen = self.wrapped.max_length
used_custom_terms = []
cache = {}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment