Skip to content

Commit

Permalink
chore: remove print
Browse files Browse the repository at this point in the history
  • Loading branch information
jamescalam committed Nov 29, 2024
1 parent 981b039 commit ff2c3ca
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 16 deletions.
2 changes: 0 additions & 2 deletions semantic_router/encoders/tfidf.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,11 @@ def _fit_validate(self, routes: List[Route]):
raise TypeError("`routes` parameter must be a list of Route objects.")

def _build_word_index(self, docs: List[str]) -> Dict:
print(docs)
words = set()
for doc in docs:
for word in doc.split():
words.add(word)
word_index = {word: i for i, word in enumerate(words)}
print(word_index)
return word_index

def _compute_tf(self, docs: List[str]) -> np.ndarray:
Expand Down
12 changes: 0 additions & 12 deletions semantic_router/routers/hybrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,10 @@ def __init__(
auto_sync: Optional[str] = None,
alpha: float = 0.3,
):
print("...2.1")
if index is None:
logger.warning("No index provided. Using default HybridLocalIndex.")
index = HybridLocalIndex()
print("...2.2")
encoder = self._get_encoder(encoder=encoder)
print("...2.3")
super().__init__(
encoder=encoder,
llm=llm,
Expand All @@ -53,22 +50,17 @@ def __init__(
aggregation=aggregation,
auto_sync=auto_sync,
)
print("...0")
# initialize sparse encoder
self.sparse_encoder = self._get_sparse_encoder(sparse_encoder=sparse_encoder)
print("...5")
# set alpha
self.alpha = alpha
print("...6")
# fit sparse encoder if needed
if (
isinstance(self.sparse_encoder, TfidfEncoder)
and hasattr(self.sparse_encoder, "fit")
and self.routes
):
print("...3")
self.sparse_encoder.fit(self.routes)
print("...4")
# run initialize index now if auto sync is active
if self.auto_sync:
self._init_index_state()
Expand All @@ -94,7 +86,6 @@ def add(self, routes: List[Route] | Route):
# TODO: to merge, self._encode should probably output a special
# TODO Embedding type that can be either dense or hybrid
dense_emb, sparse_emb = self._encode(all_utterances)
print(f"{sparse_emb=}")
self.index.add(
embeddings=dense_emb.tolist(),
routes=route_names,
Expand Down Expand Up @@ -180,8 +171,6 @@ def _encode(self, text: list[str]) -> tuple[np.ndarray, list[SparseEmbedding]]:
xq_s = self.sparse_encoder(text)
# xq_s = np.squeeze(xq_s)
# convex scaling
print(f"{self.sparse_encoder.__class__.__name__=}")
print(f"_encode: {xq_d.shape=}, {xq_s=}")
xq_d, xq_s = self._convex_scaling(dense=xq_d, sparse=xq_s)
return xq_d, xq_s

Expand All @@ -202,7 +191,6 @@ async def _async_encode(
# create dense query vector
xq_d = np.array(dense_vec)

Check warning on line 192 in semantic_router/routers/hybrid.py

View check run for this annotation

Codecov / codecov/patch

semantic_router/routers/hybrid.py#L192

Added line #L192 was not covered by tests
# convex scaling
print(f"_async_encode: {xq_d.shape=}, {xq_s=}")
xq_d, xq_s = self._convex_scaling(dense=xq_d, sparse=xq_s)
return xq_d, xq_s

Check warning on line 195 in semantic_router/routers/hybrid.py

View check run for this annotation

Codecov / codecov/patch

semantic_router/routers/hybrid.py#L194-L195

Added lines #L194 - L195 were not covered by tests

Expand Down
2 changes: 0 additions & 2 deletions tests/unit/test_hybrid_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,14 +146,12 @@ def test_add_multiple_routes(self, openai_encoder, routes):
assert len(route_layer.routes) == 2, "route_layer.routes is not 2"

def test_query_and_classification(self, openai_encoder, routes):
print("...1")
route_layer = HybridRouter(
encoder=openai_encoder,
sparse_encoder=sparse_encoder,
routes=routes,
auto_sync="local",
)
print("...2")
route_layer.set_threshold(0.0)
query_result = route_layer(UTTERANCES[0])
assert query_result.name in ["Route 1", "Route 2"]
Expand Down

0 comments on commit ff2c3ca

Please sign in to comment.