Skip to content

Commit

Permalink
fixing linting errors
Browse files Browse the repository at this point in the history
  • Loading branch information
D-Sivakumar Sriumapathy authored and D-Sivakumar Sriumapathy committed Dec 3, 2024
1 parent 19bab3d commit aa03a25
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 8 deletions.
1 change: 0 additions & 1 deletion bertopic/representation/_cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ def __init__(

self.prompts_ = []


def extract_topics(
self,
topic_model,
Expand Down
1 change: 0 additions & 1 deletion bertopic/representation/_langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,6 @@ def __init__(
self.tokenizer = tokenizer
validate_truncate_document_parameters(self.tokenizer, self.doc_length)


def extract_topics(
self,
topic_model,
Expand Down
3 changes: 1 addition & 2 deletions bertopic/representation/_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from bertopic.representation._utils import (
retry_with_exponential_backoff,
truncate_document,
validate_truncate_document_parameters
validate_truncate_document_parameters,
)


Expand Down Expand Up @@ -183,7 +183,6 @@ def __init__(
if not self.generator_kwargs.get("stop") and not chat:
self.generator_kwargs["stop"] = "\n"


def extract_topics(
self,
topic_model,
Expand Down
7 changes: 3 additions & 4 deletions bertopic/representation/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,17 @@ def decode(self, doc_chunks):
return truncated_document
return document


def validate_truncate_document_parameters(tokenizer, doc_length) -> Union[None, ValueError]:
"""validates parameters that are used in the function `truncate_document`"""
"""Validates parameters that are used in the function `truncate_document`."""
if tokenizer is None and doc_length is not None:
raise ValueError(
"Please select from one of the valid options for the `tokenizer` parameter: \n"
"{'char', 'whitespace', 'vectorizer'} \n"
"If `tokenizer` is of type callable ensure it has methods to encode and decode a document \n"
)
elif tokenizer is not None and doc_length is None:
raise ValueError(
"If `tokenizer` is provided, `doc_length` of type int must be provided as well."
)
raise ValueError("If `tokenizer` is provided, `doc_length` of type int must be provided as well.")


def retry_with_exponential_backoff(
Expand Down

0 comments on commit aa03a25

Please sign in to comment.