diff --git a/comfy/sd1_clip.py b/comfy/sd1_clip.py index bb240526f3b..6f574900fe3 100644 --- a/comfy/sd1_clip.py +++ b/comfy/sd1_clip.py @@ -405,7 +405,7 @@ class SDTokenizer: def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768, embedding_key='clip_l', tokenizer_class=CLIPTokenizer, has_start_token=True, pad_to_max_length=True, min_length=None, pad_token=None, tokenizer_data={}): if tokenizer_path is None: tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer") - self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path) + self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path, clean_up_tokenization_spaces=True) # Fix Transformers FutureWarning by explicitly setting clean_up_tokenization_spaces to True self.max_length = max_length self.min_length = min_length