Skip to content

Commit

Permalink
Make client batched POST forward correct index
Browse files Browse the repository at this point in the history
Previosly it forwarded the last indexes of the prediction for all
batches, resulting in loss of most predictions.
  • Loading branch information
Erik Parmann authored and epa095 committed Apr 25, 2019
1 parent 424c869 commit a6adb0c
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 1 deletion.
6 changes: 5 additions & 1 deletion gordo_components/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,12 +302,16 @@ async def _process_post_prediction_task(
# Get the output values
values = np.array(resp["output"])

# Chunks can have None as end-point
chunk_stop = chunk.stop if chunk.stop else len(X)
# Chunks can also be larger than the actual data
chunk_stop = min(chunk_stop, len(X))
predictions = pd.DataFrame(
data=values,
columns=[f"input_{sensor}" for sensor in X.columns]
+ [f"output_{sensor}" for sensor in X.columns],
# match any offsetting from windowed models
index=X.index[-len(values) :],
index=X.index[chunk_stop - len(values) : chunk_stop],
)

# Forward predictions to any other consumer if registered.
Expand Down
24 changes: 24 additions & 0 deletions tests/gordo_components/client/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,29 @@ def test_client_predictions_with_or_without_data_provider(
"""
Run the prediction client with or without a data provider
"""
use_client_predictions(trained_model_directory, use_data_provider, batch_size=1000)


@pytest.mark.dockertest
@pytest.mark.parametrize("batch_size", (10, 100))
@pytest.mark.parametrize("trained_model_directory", (SENSORS,), indirect=True)
def test_client_predictions_different_batch_sizes(
trained_model_directory: pytest.fixture, batch_size: int
):
"""
Run the prediction client with different batch-sizes
"""
use_client_predictions(
trained_model_directory, use_data_provider=True, batch_size=batch_size
)


def use_client_predictions(
trained_model_directory: pytest.fixture, use_data_provider: bool, batch_size: int
):
"""
Run the prediction client with or without a data provider
"""

with watchman(
host="localhost",
Expand Down Expand Up @@ -165,6 +188,7 @@ def test_client_predictions_with_or_without_data_provider(
prediction_forwarder=ForwardPredictionsIntoInflux(
destination_influx_uri=uri
),
batch_size=batch_size,
)

# Should have discovered machine-1 & machine-2
Expand Down

0 comments on commit a6adb0c

Please sign in to comment.